1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
22 #.apidoc title: Object Relational Mapping
23 #.apidoc module-mods: member-order: bysource
26 Object relational mapping to database (postgresql) module
27 * Hierarchical structure
28 * Constraints consistency, validations
29 * Object meta Data depends on its status
30 * Optimised processing by complex query (multiple actions at once)
31 * Default fields value
32 * Permissions optimisation
33 * Persistant object: DB postgresql
35 * Multi-level caching system
36 * 2 different inheritancies
38 - classicals (varchar, integer, boolean, ...)
39 - relations (one2many, many2one, many2many)
60 from lxml import etree
65 import openerp.netsvc as netsvc
66 import openerp.tools as tools
67 from openerp.tools.config import config
68 from openerp.tools.misc import CountingStream
69 from openerp.tools.safe_eval import safe_eval as eval
70 from openerp.tools.translate import _
71 from openerp import SUPERUSER_ID
72 from query import Query
74 _logger = logging.getLogger(__name__)
75 _schema = logging.getLogger(__name__ + '.schema')
77 # List of etree._Element subclasses that we choose to ignore when parsing XML.
78 from openerp.tools import SKIPPED_ELEMENT_TYPES
80 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
81 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
83 def transfer_field_to_modifiers(field, modifiers):
86 for attr in ('invisible', 'readonly', 'required'):
87 state_exceptions[attr] = []
88 default_values[attr] = bool(field.get(attr))
89 for state, modifs in (field.get("states",{})).items():
91 if default_values[modif[0]] != modif[1]:
92 state_exceptions[modif[0]].append(state)
94 for attr, default_value in default_values.items():
95 if state_exceptions[attr]:
96 modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
98 modifiers[attr] = default_value
101 # Don't deal with groups, it is done by check_group().
102 # Need the context to evaluate the invisible attribute on tree views.
103 # For non-tree views, the context shouldn't be given.
104 def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
105 if node.get('attrs'):
106 modifiers.update(eval(node.get('attrs')))
108 if node.get('states'):
109 if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
110 # TODO combine with AND or OR, use implicit AND for now.
111 modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
113 modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
115 for a in ('invisible', 'readonly', 'required'):
117 v = bool(eval(node.get(a), {'context': context or {}}))
118 if in_tree_view and a == 'invisible':
119 # Invisible in a tree view has a specific meaning, make it a
120 # new key in the modifiers attribute.
121 modifiers['tree_invisible'] = v
122 elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
123 # Don't set the attribute to False if a dynamic value was
124 # provided (i.e. a domain from attrs or states).
128 def simplify_modifiers(modifiers):
129 for a in ('invisible', 'readonly', 'required'):
130 if a in modifiers and not modifiers[a]:
134 def transfer_modifiers_to_node(modifiers, node):
136 simplify_modifiers(modifiers)
137 node.set('modifiers', simplejson.dumps(modifiers))
139 def setup_modifiers(node, field=None, context=None, in_tree_view=False):
140 """ Processes node attributes and field descriptors to generate
141 the ``modifiers`` node attribute and set it on the provided node.
143 Alters its first argument in-place.
145 :param node: ``field`` node from an OpenERP view
146 :type node: lxml.etree._Element
147 :param dict field: field descriptor corresponding to the provided node
148 :param dict context: execution context used to evaluate node attributes
149 :param bool in_tree_view: triggers the ``tree_invisible`` code
150 path (separate from ``invisible``): in
151 tree view there are two levels of
152 invisibility, cell content (a column is
153 present but the cell itself is not
154 displayed) with ``invisible`` and column
155 invisibility (the whole column is
156 hidden) with ``tree_invisible``.
160 if field is not None:
161 transfer_field_to_modifiers(field, modifiers)
162 transfer_node_to_modifiers(
163 node, modifiers, context=context, in_tree_view=in_tree_view)
164 transfer_modifiers_to_node(modifiers, node)
166 def test_modifiers(what, expected):
168 if isinstance(what, basestring):
169 node = etree.fromstring(what)
170 transfer_node_to_modifiers(node, modifiers)
171 simplify_modifiers(modifiers)
172 json = simplejson.dumps(modifiers)
173 assert json == expected, "%s != %s" % (json, expected)
174 elif isinstance(what, dict):
175 transfer_field_to_modifiers(what, modifiers)
176 simplify_modifiers(modifiers)
177 json = simplejson.dumps(modifiers)
178 assert json == expected, "%s != %s" % (json, expected)
183 # openerp.osv.orm.modifiers_tests()
184 def modifiers_tests():
185 test_modifiers('<field name="a"/>', '{}')
186 test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
187 test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
188 test_modifiers('<field name="a" required="1"/>', '{"required": true}')
189 test_modifiers('<field name="a" invisible="0"/>', '{}')
190 test_modifiers('<field name="a" readonly="0"/>', '{}')
191 test_modifiers('<field name="a" required="0"/>', '{}')
192 test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
193 test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
194 test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
195 test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
197 # The dictionary is supposed to be the result of fields_get().
198 test_modifiers({}, '{}')
199 test_modifiers({"invisible": True}, '{"invisible": true}')
200 test_modifiers({"invisible": False}, '{}')
203 def check_object_name(name):
204 """ Check if the given name is a valid openerp object name.
206 The _name attribute in osv and osv_memory object is subject to
207 some restrictions. This function returns True or False whether
208 the given name is allowed or not.
210 TODO: this is an approximation. The goal in this approximation
211 is to disallow uppercase characters (in some places, we quote
212 table/column names and in other not, which leads to this kind
215 psycopg2.ProgrammingError: relation "xxx" does not exist).
217 The same restriction should apply to both osv and osv_memory
218 objects for consistency.
221 if regex_object_name.match(name) is None:
225 def raise_on_invalid_object_name(name):
226 if not check_object_name(name):
227 msg = "The _name attribute %s is not valid." % name
229 raise except_orm('ValueError', msg)
231 POSTGRES_CONFDELTYPES = {
239 def intersect(la, lb):
240 return filter(lambda x: x in lb, la)
242 def fix_import_export_id_paths(fieldname):
244 Fixes the id fields in import and exports, and splits field paths
247 :param str fieldname: name of the field to import/export
248 :return: split field name
251 fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
252 fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
253 return fixed_external_id.split('/')
255 class except_orm(Exception):
256 def __init__(self, name, value):
259 self.args = (name, value)
261 class BrowseRecordError(Exception):
264 class browse_null(object):
265 """ Readonly python database object browser
271 def __getitem__(self, name):
274 def __getattr__(self, name):
275 return None # XXX: return self ?
283 def __nonzero__(self):
286 def __unicode__(self):
291 # TODO: execute an object method on browse_record_list
293 class browse_record_list(list):
294 """ Collection of browse objects
296 Such an instance will be returned when doing a ``browse([ids..])``
297 and will be iterable, yielding browse() objects
300 def __init__(self, lst, context=None):
303 super(browse_record_list, self).__init__(lst)
304 self.context = context
307 class browse_record(object):
308 """ An object that behaves like a row of an object's table.
309 It has attributes after the columns of the corresponding object.
313 uobj = pool.get('res.users')
314 user_rec = uobj.browse(cr, uid, 104)
318 def __init__(self, cr, uid, id, table, cache, context=None,
319 list_class=browse_record_list, fields_process=None):
321 :param table: the browsed object (inherited from orm)
322 :param dict cache: a dictionary of model->field->data to be shared
323 across browse objects, thus reducing the SQL
324 read()s. It can speed up things a lot, but also be
325 disastrous if not discarded after write()/unlink()
327 :param dict context: dictionary with an optional context
329 if fields_process is None:
333 self._list_class = list_class
337 self._table = table # deprecated, use _model!
339 self._table_name = self._table._name
340 self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name)
341 self._context = context
342 self._fields_process = fields_process
344 cache.setdefault(table._name, {})
345 self._data = cache[table._name]
347 # if not (id and isinstance(id, (int, long,))):
348 # raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
349 # if not table.exists(cr, uid, id, context):
350 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
352 if id not in self._data:
353 self._data[id] = {'id': id}
357 def __getitem__(self, name):
361 if name not in self._data[self._id]:
362 # build the list of fields we will fetch
364 # fetch the definition of the field which was asked for
365 if name in self._table._columns:
366 col = self._table._columns[name]
367 elif name in self._table._inherit_fields:
368 col = self._table._inherit_fields[name][2]
369 elif hasattr(self._table, str(name)):
370 attr = getattr(self._table, name)
371 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
372 def function_proxy(*args, **kwargs):
373 if 'context' not in kwargs and self._context:
374 kwargs.update(context=self._context)
375 return attr(self._cr, self._uid, [self._id], *args, **kwargs)
376 return function_proxy
380 error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
381 self.__logger.warning(error_msg)
382 if self.__logger.isEnabledFor(logging.DEBUG):
383 self.__logger.debug(''.join(traceback.format_stack()))
384 raise KeyError(error_msg)
386 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
388 # gen the list of "local" (ie not inherited) fields which are classic or many2one
389 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
390 # gen the list of inherited fields
391 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
392 # complete the field list with the inherited fields which are classic or many2one
393 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
394 # otherwise we fetch only that field
396 fields_to_fetch = [(name, col)]
397 ids = filter(lambda id: name not in self._data[id], self._data.keys())
399 field_names = map(lambda x: x[0], fields_to_fetch)
400 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
402 # TODO: improve this, very slow for reports
403 if self._fields_process:
404 lang = self._context.get('lang', 'en_US') or 'en_US'
405 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
407 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
408 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
410 for field_name, field_column in fields_to_fetch:
411 if field_column._type in self._fields_process:
412 for result_line in field_values:
413 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
414 if result_line[field_name]:
415 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
418 # Where did those ids come from? Perhaps old entries in ir_model_dat?
419 _logger.warning("No field_values found for ids %s in %s", ids, self)
420 raise KeyError('Field %s not found in %s'%(name, self))
421 # create browse records for 'remote' objects
422 for result_line in field_values:
424 for field_name, field_column in fields_to_fetch:
425 if field_column._type == 'many2one':
426 if result_line[field_name]:
427 obj = self._table.pool.get(field_column._obj)
428 if isinstance(result_line[field_name], (list, tuple)):
429 value = result_line[field_name][0]
431 value = result_line[field_name]
433 # FIXME: this happen when a _inherits object
434 # overwrite a field of it parent. Need
435 # testing to be sure we got the right
436 # object and not the parent one.
437 if not isinstance(value, browse_record):
439 # In some cases the target model is not available yet, so we must ignore it,
440 # which is safe in most cases, this value will just be loaded later when needed.
441 # This situation can be caused by custom fields that connect objects with m2o without
442 # respecting module dependencies, causing relationships to be connected to soon when
443 # the target is not loaded yet.
445 new_data[field_name] = browse_record(self._cr,
446 self._uid, value, obj, self._cache,
447 context=self._context,
448 list_class=self._list_class,
449 fields_process=self._fields_process)
451 new_data[field_name] = value
453 new_data[field_name] = browse_null()
455 new_data[field_name] = browse_null()
456 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
457 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
458 elif field_column._type in ('reference'):
459 if result_line[field_name]:
460 if isinstance(result_line[field_name], browse_record):
461 new_data[field_name] = result_line[field_name]
463 ref_obj, ref_id = result_line[field_name].split(',')
464 ref_id = long(ref_id)
466 obj = self._table.pool.get(ref_obj)
467 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
469 new_data[field_name] = browse_null()
471 new_data[field_name] = browse_null()
473 new_data[field_name] = result_line[field_name]
474 self._data[result_line['id']].update(new_data)
476 if not name in self._data[self._id]:
477 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
478 self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values)
479 self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table)
480 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
481 return self._data[self._id][name]
483 def __getattr__(self, name):
487 raise AttributeError(e)
489 def __contains__(self, name):
490 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
493 raise NotImplementedError("Iteration is not allowed on %s" % self)
495 def __hasattr__(self, name):
502 return "browse_record(%s, %d)" % (self._table_name, self._id)
504 def __eq__(self, other):
505 if not isinstance(other, browse_record):
507 return (self._table_name, self._id) == (other._table_name, other._id)
509 def __ne__(self, other):
510 if not isinstance(other, browse_record):
512 return (self._table_name, self._id) != (other._table_name, other._id)
514 # we need to define __unicode__ even though we've already defined __str__
515 # because we have overridden __getattr__
516 def __unicode__(self):
517 return unicode(str(self))
520 return hash((self._table_name, self._id))
525 """Force refreshing this browse_record's data and all the data of the
526 records that belong to the same cache, by emptying the cache completely,
527 preserving only the record identifiers (for prefetching optimizations).
529 for model, model_cache in self._cache.iteritems():
530 # only preserve the ids of the records that were in the cache
531 cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
532 self._cache[model].clear()
533 self._cache[model].update(cached_ids)
535 def pg_varchar(size=0):
536 """ Returns the VARCHAR declaration for the provided size:
538 * If no size (or an empty or negative size is provided) return an
540 * Otherwise return a VARCHAR(n)
542 :type int size: varchar size, optional
546 if not isinstance(size, int):
547 raise TypeError("VARCHAR parameter should be an int, got %s"
550 return 'VARCHAR(%d)' % size
553 FIELDS_TO_PGTYPES = {
554 fields.boolean: 'bool',
555 fields.integer: 'int4',
559 fields.datetime: 'timestamp',
560 fields.binary: 'bytea',
561 fields.many2one: 'int4',
562 fields.serialized: 'text',
565 def get_pg_type(f, type_override=None):
567 :param fields._column f: field to get a Postgres type for
568 :param type type_override: use the provided type for dispatching instead of the field's own type
569 :returns: (postgres_identification_type, postgres_type_specification)
572 field_type = type_override or type(f)
574 if field_type in FIELDS_TO_PGTYPES:
575 pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
576 elif issubclass(field_type, fields.float):
578 pg_type = ('numeric', 'NUMERIC')
580 pg_type = ('float8', 'DOUBLE PRECISION')
581 elif issubclass(field_type, (fields.char, fields.reference)):
582 pg_type = ('varchar', pg_varchar(f.size))
583 elif issubclass(field_type, fields.selection):
584 if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
585 or getattr(f, 'size', None) == -1:
586 pg_type = ('int4', 'INTEGER')
588 pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
589 elif issubclass(field_type, fields.function):
590 if f._type == 'selection':
591 pg_type = ('varchar', pg_varchar())
593 pg_type = get_pg_type(f, getattr(fields, f._type))
595 _logger.warning('%s type not supported!', field_type)
601 class MetaModel(type):
602 """ Metaclass for the Model.
604 This class is used as the metaclass for the Model class to discover
605 the models defined in a module (i.e. without instanciating them).
606 If the automatic discovery is not needed, it is possible to set the
607 model's _register attribute to False.
611 module_to_models = {}
613 def __init__(self, name, bases, attrs):
614 if not self._register:
615 self._register = True
616 super(MetaModel, self).__init__(name, bases, attrs)
619 # The (OpenERP) module name can be in the `openerp.addons` namespace
620 # or not. For instance module `sale` can be imported as
621 # `openerp.addons.sale` (the good way) or `sale` (for backward
623 module_parts = self.__module__.split('.')
624 if len(module_parts) > 2 and module_parts[0] == 'openerp' and \
625 module_parts[1] == 'addons':
626 module_name = self.__module__.split('.')[2]
628 module_name = self.__module__.split('.')[0]
629 if not hasattr(self, '_module'):
630 self._module = module_name
632 # Remember which models to instanciate for this module.
633 self.module_to_models.setdefault(self._module, []).append(self)
636 # Definition of log access columns, automatically added to models if
637 # self._log_access is True
638 LOG_ACCESS_COLUMNS = {
639 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
640 'create_date': 'TIMESTAMP',
641 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
642 'write_date': 'TIMESTAMP'
644 # special columns automatically created by the ORM
645 MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
647 class BaseModel(object):
648 """ Base class for OpenERP models.
650 OpenERP models are created by inheriting from this class' subclasses:
652 * Model: for regular database-persisted models
653 * TransientModel: for temporary data, stored in the database but automatically
654 vaccuumed every so often
655 * AbstractModel: for abstract super classes meant to be shared by multiple
656 _inheriting classes (usually Models or TransientModels)
658 The system will later instantiate the class once per database (on
659 which the class' module is installed).
661 To create a class that should not be instantiated, the _register class attribute
664 __metaclass__ = MetaModel
665 _auto = True # create database backend
666 _register = False # Set to false if the model shouldn't be automatically discovered.
672 _parent_name = 'parent_id'
673 _parent_store = False
674 _parent_order = False
681 # dict of {field:method}, with method returning the (name_get of records, {id: fold})
682 # to include in the _read_group, if grouped on this field
686 _transient = False # True in a TransientModel
687 _transient_max_count = None
688 _transient_max_hours = None
689 _transient_check_time = 20
692 # { 'parent_model': 'm2o_field', ... }
695 # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
696 # model from which it is inherits'd, r is the (local) field towards m, f
697 # is the _column object itself, and n is the original (i.e. top-most)
700 # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
701 # field_column_obj, origina_parent_model), ... }
704 # Mapping field name/column_info object
705 # This is similar to _inherit_fields but:
706 # 1. includes self fields,
707 # 2. uses column_info instead of a triple.
713 _sql_constraints = []
714 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
716 CONCURRENCY_CHECK_FIELD = '__last_update'
718 def log(self, cr, uid, id, message, secondary=False, context=None):
719 return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
721 def view_init(self, cr, uid, fields_list, context=None):
722 """Override this method to do specific things when a view on the object is opened."""
725 def _field_create(self, cr, context=None):
726 """ Create entries in ir_model_fields for all the model's fields.
728 If necessary, also create an entry in ir_model, and if called from the
729 modules loading scheme (by receiving 'module' in the context), also
730 create entries in ir_model_data (for the model and the fields).
732 - create an entry in ir_model (if there is not already one),
733 - create an entry in ir_model_data (if there is not already one, and if
734 'module' is in the context),
735 - update ir_model_fields with the fields found in _columns
736 (TODO there is some redundancy as _columns is updated from
737 ir_model_fields in __init__).
742 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
744 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
745 model_id = cr.fetchone()[0]
746 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
748 model_id = cr.fetchone()[0]
749 if 'module' in context:
750 name_id = 'model_'+self._name.replace('.', '_')
751 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
753 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
754 (name_id, context['module'], 'ir.model', model_id)
759 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
761 for rec in cr.dictfetchall():
762 cols[rec['name']] = rec
764 ir_model_fields_obj = self.pool.get('ir.model.fields')
766 # sparse field should be created at the end, as it depends on its serialized field already existing
767 model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
768 for (k, f) in model_fields:
770 'model_id': model_id,
773 'field_description': f.string,
775 'relation': f._obj or '',
776 'view_load': (f.view_load and 1) or 0,
777 'select_level': tools.ustr(f.select or 0),
778 'readonly': (f.readonly and 1) or 0,
779 'required': (f.required and 1) or 0,
780 'selectable': (f.selectable and 1) or 0,
781 'translate': (f.translate and 1) or 0,
782 'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
783 'serialization_field_id': None,
785 if getattr(f, 'serialization_field', None):
786 # resolve link to serialization_field if specified by name
787 serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
788 if not serialization_field_id:
789 raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
790 vals['serialization_field_id'] = serialization_field_id[0]
792 # When its a custom field,it does not contain f.select
793 if context.get('field_state', 'base') == 'manual':
794 if context.get('field_name', '') == k:
795 vals['select_level'] = context.get('select', '0')
796 #setting value to let the problem NOT occur next time
798 vals['select_level'] = cols[k]['select_level']
801 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
802 id = cr.fetchone()[0]
804 cr.execute("""INSERT INTO ir_model_fields (
805 id, model_id, model, name, field_description, ttype,
806 relation,view_load,state,select_level,relation_field, translate, serialization_field_id
808 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
810 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
811 vals['relation'], bool(vals['view_load']), 'base',
812 vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
814 if 'module' in context:
815 name1 = 'field_' + self._table + '_' + k
816 cr.execute("select name from ir_model_data where name=%s", (name1,))
818 name1 = name1 + "_" + str(id)
819 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
820 (name1, context['module'], 'ir.model.fields', id)
823 for key, val in vals.items():
824 if cols[k][key] != vals[key]:
825 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
827 cr.execute("""UPDATE ir_model_fields SET
828 model_id=%s, field_description=%s, ttype=%s, relation=%s,
829 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
831 model=%s AND name=%s""", (
832 vals['model_id'], vals['field_description'], vals['ttype'],
833 vals['relation'], bool(vals['view_load']),
834 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
840 # Goal: try to apply inheritance at the instanciation level and
841 # put objects in the pool var
844 def create_instance(cls, pool, cr):
845 """ Instanciate a given model.
847 This class method instanciates the class of some model (i.e. a class
848 deriving from osv or osv_memory). The class might be the class passed
849 in argument or, if it inherits from another class, a class constructed
850 by combining the two classes.
852 The ``attributes`` argument specifies which parent class attributes
855 TODO: the creation of the combined class is repeated at each call of
856 this method. This is probably unnecessary.
859 attributes = ['_columns', '_defaults', '_inherits', '_constraints',
862 parent_names = getattr(cls, '_inherit', None)
864 if isinstance(parent_names, (str, unicode)):
865 name = cls._name or parent_names
866 parent_names = [parent_names]
869 # for res.parnter.address compatiblity, should be remove in v7
870 if 'res.partner.address' in parent_names:
871 parent_names.pop(parent_names.index('res.partner.address'))
872 parent_names.append('res.partner')
874 raise TypeError('_name is mandatory in case of multiple inheritance')
876 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
877 parent_model = pool.get(parent_name)
879 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
880 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
881 if not getattr(cls, '_original_module', None) and name == parent_model._name:
882 cls._original_module = parent_model._original_module
883 parent_class = parent_model.__class__
886 new = copy.copy(getattr(parent_model, s, {}))
888 # Don't _inherit custom fields.
892 # Duplicate float fields because they have a .digits
893 # cache (which must be per-registry, not server-wide).
895 if new[c]._type == 'float':
896 new[c] = copy.copy(new[c])
897 if hasattr(new, 'update'):
898 new.update(cls.__dict__.get(s, {}))
899 elif s=='_constraints':
900 for c in cls.__dict__.get(s, []):
902 for c2 in range(len(new)):
903 #For _constraints, we should check field and methods as well
904 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
905 or getattr(new[c2][0],'__name__', True) == \
906 getattr(c[0],'__name__', False)):
907 # If new class defines a constraint with
908 # same function name, we let it override
917 new.extend(cls.__dict__.get(s, []))
919 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
920 if not getattr(cls, '_original_module', None):
921 cls._original_module = cls._module
922 obj = object.__new__(cls)
923 obj.__init__(pool, cr)
927 """Register this model.
929 This doesn't create an instance but simply register the model
930 as being part of the module where it is defined.
935 # Set the module name (e.g. base, sale, accounting, ...) on the class.
936 module = cls.__module__.split('.')[0]
937 if not hasattr(cls, '_module'):
940 # Record this class in the list of models to instantiate for this module,
941 # managed by the metaclass.
942 module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
943 if cls not in module_model_list:
944 module_model_list.append(cls)
946 # Since we don't return an instance here, the __init__
947 # method won't be called.
950 def __init__(self, pool, cr):
951 """ Initialize a model and make it part of the given registry.
953 - copy the stored fields' functions in the osv_pool,
954 - update the _columns with the fields found in ir_model_fields,
955 - ensure there is a many2one for each _inherits'd parent,
956 - update the children's _columns,
957 - give a chance to each field to initialize itself.
960 pool.add(self._name, self)
963 if not self._name and not hasattr(self, '_inherit'):
964 name = type(self).__name__.split('.')[0]
965 msg = "The class %s has to have a _name attribute" % name
968 raise except_orm('ValueError', msg)
970 if not self._description:
971 self._description = self._name
973 self._table = self._name.replace('.', '_')
975 if not hasattr(self, '_log_access'):
976 # If _log_access is not specified, it is the same value as _auto.
977 self._log_access = getattr(self, "_auto", True)
979 self._columns = self._columns.copy()
980 for store_field in self._columns:
981 f = self._columns[store_field]
982 if hasattr(f, 'digits_change'):
984 def not_this_field(stored_func):
985 x, y, z, e, f, l = stored_func
986 return x != self._name or y != store_field
987 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
988 if not isinstance(f, fields.function):
994 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
995 for object, aa in sm.items():
997 (fnct, fields2, order, length) = aa
999 (fnct, fields2, order) = aa
1002 raise except_orm('Error',
1003 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
1004 self.pool._store_function.setdefault(object, [])
1005 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
1006 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
1008 for (key, _, msg) in self._sql_constraints:
1009 self.pool._sql_error[self._table+'_'+key] = msg
1011 # Load manual fields
1013 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
1015 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
1016 for field in cr.dictfetchall():
1017 if field['name'] in self._columns:
1020 'string': field['field_description'],
1021 'required': bool(field['required']),
1022 'readonly': bool(field['readonly']),
1023 'domain': eval(field['domain']) if field['domain'] else None,
1024 'size': field['size'],
1025 'ondelete': field['on_delete'],
1026 'translate': (field['translate']),
1028 #'select': int(field['select_level'])
1031 if field['serialization_field_id']:
1032 cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
1033 attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
1034 if field['ttype'] in ['many2one', 'one2many', 'many2many']:
1035 attrs.update({'relation': field['relation']})
1036 self._columns[field['name']] = fields.sparse(**attrs)
1037 elif field['ttype'] == 'selection':
1038 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
1039 elif field['ttype'] == 'reference':
1040 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
1041 elif field['ttype'] == 'many2one':
1042 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
1043 elif field['ttype'] == 'one2many':
1044 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
1045 elif field['ttype'] == 'many2many':
1046 _rel1 = field['relation'].replace('.', '_')
1047 _rel2 = field['model'].replace('.', '_')
1048 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
1049 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1051 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1052 self._inherits_check()
1053 self._inherits_reload()
1054 if not self._sequence:
1055 self._sequence = self._table + '_id_seq'
1056 for k in self._defaults:
1057 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1058 for f in self._columns:
1059 self._columns[f].restart()
1062 if self.is_transient():
1063 self._transient_check_count = 0
1064 self._transient_max_count = config.get('osv_memory_count_limit')
1065 self._transient_max_hours = config.get('osv_memory_age_limit')
1066 assert self._log_access, "TransientModels must have log_access turned on, "\
1067 "in order to implement their access rights policy"
1070 if self._rec_name is not None:
1071 assert self._rec_name in self._columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
1073 self._rec_name = 'name'
1076 def __export_row(self, cr, uid, row, fields, context=None):
1080 def check_type(field_type):
1081 if field_type == 'float':
1083 elif field_type == 'integer':
1085 elif field_type == 'boolean':
1089 def selection_field(in_field):
1090 col_obj = self.pool.get(in_field.keys()[0])
1091 if f[i] in col_obj._columns.keys():
1092 return col_obj._columns[f[i]]
1093 elif f[i] in col_obj._inherits.keys():
1094 selection_field(col_obj._inherits)
1098 def _get_xml_id(self, cr, uid, r):
1099 model_data = self.pool.get('ir.model.data')
1100 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
1102 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
1104 r = '%s.%s' % (d['module'], d['name'])
1110 n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
1111 if not model_data.search(cr, uid, [('name', '=', n)]):
1114 model_data.create(cr, uid, {
1116 'model': self._name,
1118 'module': '__export__',
1124 data = map(lambda x: '', range(len(fields)))
1126 for fpos in range(len(fields)):
1136 r = _get_xml_id(self, cr, uid, r)
1139 # To display external name of selection field when its exported
1140 if f[i] in self._columns.keys():
1141 cols = self._columns[f[i]]
1142 elif f[i] in self._inherit_fields.keys():
1143 cols = selection_field(self._inherits)
1144 if cols and cols._type == 'selection':
1145 sel_list = cols.selection
1146 if r and type(sel_list) == type([]):
1147 r = [x[1] for x in sel_list if r==x[0]]
1148 r = r and r[0] or False
1150 if f[i] in self._columns:
1151 r = check_type(self._columns[f[i]]._type)
1152 elif f[i] in self._inherit_fields:
1153 r = check_type(self._inherit_fields[f[i]][2]._type)
1154 data[fpos] = r or False
1156 if isinstance(r, (browse_record_list, list)):
1158 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
1161 if [x for x in fields2 if x]:
1163 done.append(fields2)
1164 if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'):
1165 data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r])
1169 lines2 = row2._model.__export_row(cr, uid, row2, fields2,
1172 for fpos2 in range(len(fields)):
1173 if lines2 and lines2[0][fpos2]:
1174 data[fpos2] = lines2[0][fpos2]
1178 name_relation = self.pool.get(rr._table_name)._rec_name
1179 if isinstance(rr[name_relation], browse_record):
1180 rr = rr[name_relation]
1181 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
1182 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
1183 dt += tools.ustr(rr_name or '') + ','
1184 data[fpos] = dt[:-1]
1193 if isinstance(r, browse_record):
1194 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
1195 r = r and r[0] and r[0][1] or ''
1196 data[fpos] = tools.ustr(r or '')
1197 return [data] + lines
1199 def export_data(self, cr, uid, ids, fields_to_export, context=None):
1201 Export fields for selected objects
1203 :param cr: database cursor
1204 :param uid: current user id
1205 :param ids: list of ids
1206 :param fields_to_export: list of fields
1207 :param context: context arguments, like lang, time zone
1208 :rtype: dictionary with a *datas* matrix
1210 This method is used when exporting data via client menu
1215 cols = self._columns.copy()
1216 for f in self._inherit_fields:
1217 cols.update({f: self._inherit_fields[f][2]})
1218 fields_to_export = map(fix_import_export_id_paths, fields_to_export)
1220 for row in self.browse(cr, uid, ids, context):
1221 datas += self.__export_row(cr, uid, row, fields_to_export, context)
1222 return {'datas': datas}
1224 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
1227 Use :meth:`~load` instead
1229 Import given data in given module
1231 This method is used when importing data via client menu.
1233 Example of fields to import for a sale.order::
1236 partner_id, (=name_search)
1237 order_line/.id, (=database_id)
1239 order_line/product_id/id, (=xml id)
1240 order_line/price_unit,
1241 order_line/product_uom_qty,
1242 order_line/product_uom/id (=xml_id)
1244 This method returns a 4-tuple with the following structure::
1246 (return_code, errored_resource, error_message, unused)
1248 * The first item is a return code, it is ``-1`` in case of
1249 import error, or the last imported row number in case of success
1250 * The second item contains the record data dict that failed to import
1251 in case of error, otherwise it's 0
1252 * The third item contains an error message string in case of error,
1254 * The last item is currently unused, with no specific semantics
1256 :param fields: list of fields to import
1257 :param datas: data to import
1258 :param mode: 'init' or 'update' for record creation
1259 :param current_module: module name
1260 :param noupdate: flag for record creation
1261 :param filename: optional file to store partial import state for recovery
1262 :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
1263 :rtype: (int, dict or 0, str or 0, str or 0)
1265 context = dict(context) if context is not None else {}
1266 context['_import_current_module'] = current_module
1268 fields = map(fix_import_export_id_paths, fields)
1269 ir_model_data_obj = self.pool.get('ir.model.data')
1272 if m['type'] == 'error':
1273 raise Exception(m['message'])
1275 if config.get('import_partial') and filename:
1276 with open(config.get('import_partial'), 'rb') as partial_import_file:
1277 data = pickle.load(partial_import_file)
1278 position = data.get(filename, 0)
1282 for res_id, xml_id, res, info in self._convert_records(cr, uid,
1283 self._extract_records(cr, uid, fields, datas,
1284 context=context, log=log),
1285 context=context, log=log):
1286 ir_model_data_obj._update(cr, uid, self._name,
1287 current_module, res, mode=mode, xml_id=xml_id,
1288 noupdate=noupdate, res_id=res_id, context=context)
1289 position = info.get('rows', {}).get('to', 0) + 1
1290 if config.get('import_partial') and filename and (not (position%100)):
1291 with open(config.get('import_partial'), 'rb') as partial_import:
1292 data = pickle.load(partial_import)
1293 data[filename] = position
1294 with open(config.get('import_partial'), 'wb') as partial_import:
1295 pickle.dump(data, partial_import)
1296 if context.get('defer_parent_store_computation'):
1297 self._parent_store_compute(cr)
1299 except Exception, e:
1301 return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
1303 if context.get('defer_parent_store_computation'):
1304 self._parent_store_compute(cr)
1305 return position, 0, 0, 0
1307 def load(self, cr, uid, fields, data, context=None):
1309 Attempts to load the data matrix, and returns a list of ids (or
1310 ``False`` if there was an error and no id could be generated) and a
1313 The ids are those of the records created and saved (in database), in
1314 the same order they were extracted from the file. They can be passed
1315 directly to :meth:`~read`
1317 :param fields: list of fields to import, at the same index as the corresponding data
1318 :type fields: list(str)
1319 :param data: row-major matrix of data to import
1320 :type data: list(list(str))
1321 :param dict context:
1322 :returns: {ids: list(int)|False, messages: [Message]}
1324 cr.execute('SAVEPOINT model_load')
1327 fields = map(fix_import_export_id_paths, fields)
1328 ModelData = self.pool['ir.model.data']
1329 fg = self.fields_get(cr, uid, context=context)
1336 for id, xid, record, info in self._convert_records(cr, uid,
1337 self._extract_records(cr, uid, fields, data,
1338 context=context, log=messages.append),
1339 context=context, log=messages.append):
1341 cr.execute('SAVEPOINT model_load_save')
1342 except psycopg2.InternalError, e:
1343 # broken transaction, exit and hope the source error was
1345 if not any(message['type'] == 'error' for message in messages):
1346 messages.append(dict(info, type='error',message=
1347 u"Unknown database error: '%s'" % e))
1350 ids.append(ModelData._update(cr, uid, self._name,
1351 current_module, record, mode=mode, xml_id=xid,
1352 noupdate=noupdate, res_id=id, context=context))
1353 cr.execute('RELEASE SAVEPOINT model_load_save')
1354 except psycopg2.Warning, e:
1355 _logger.exception('Failed to import record %s', record)
1356 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1357 messages.append(dict(info, type='warning', message=str(e)))
1358 except psycopg2.Error, e:
1359 _logger.exception('Failed to import record %s', record)
1360 # Failed to write, log to messages, rollback savepoint (to
1361 # avoid broken transaction) and keep going
1362 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1363 messages.append(dict(
1365 **PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
1366 if any(message['type'] == 'error' for message in messages):
1367 cr.execute('ROLLBACK TO SAVEPOINT model_load')
1369 return {'ids': ids, 'messages': messages}
1370 def _extract_records(self, cr, uid, fields_, data,
1371 context=None, log=lambda a: None):
1372 """ Generates record dicts from the data sequence.
1374 The result is a generator of dicts mapping field names to raw
1375 (unconverted, unvalidated) values.
1377 For relational fields, if sub-fields were provided the value will be
1378 a list of sub-records
1380 The following sub-fields may be set on the record (by key):
1381 * None is the name_get for the record (to use with name_create/name_search)
1382 * "id" is the External ID for the record
1383 * ".id" is the Database ID for the record
1385 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1386 # Fake columns to avoid special cases in extractor
1387 columns[None] = fields.char('rec_name')
1388 columns['id'] = fields.char('External ID')
1389 columns['.id'] = fields.integer('Database ID')
1391 # m2o fields can't be on multiple lines so exclude them from the
1392 # is_relational field rows filter, but special-case it later on to
1393 # be handled with relational fields (as it can have subfields)
1394 is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
1395 get_o2m_values = itemgetter_tuple(
1396 [index for index, field in enumerate(fields_)
1397 if columns[field[0]]._type == 'one2many'])
1398 get_nono2m_values = itemgetter_tuple(
1399 [index for index, field in enumerate(fields_)
1400 if columns[field[0]]._type != 'one2many'])
1401 # Checks if the provided row has any non-empty non-relational field
1402 def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
1403 return any(g(row)) and not any(f(row))
1407 if index >= len(data): return
1410 # copy non-relational fields to record dict
1411 record = dict((field[0], value)
1412 for field, value in itertools.izip(fields_, row)
1413 if not is_relational(field[0]))
1415 # Get all following rows which have relational values attached to
1416 # the current record (no non-relational values)
1417 record_span = itertools.takewhile(
1418 only_o2m_values, itertools.islice(data, index + 1, None))
1419 # stitch record row back on for relational fields
1420 record_span = list(itertools.chain([row], record_span))
1421 for relfield in set(
1422 field[0] for field in fields_
1423 if is_relational(field[0])):
1424 column = columns[relfield]
1425 # FIXME: how to not use _obj without relying on fields_get?
1426 Model = self.pool[column._obj]
1428 # get only cells for this sub-field, should be strictly
1429 # non-empty, field path [None] is for name_get column
1430 indices, subfields = zip(*((index, field[1:] or [None])
1431 for index, field in enumerate(fields_)
1432 if field[0] == relfield))
1434 # return all rows which have at least one value for the
1435 # subfields of relfield
1436 relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
1437 record[relfield] = [subrecord
1438 for subrecord, _subinfo in Model._extract_records(
1439 cr, uid, subfields, relfield_data,
1440 context=context, log=log)]
1442 yield record, {'rows': {
1444 'to': index + len(record_span) - 1
1446 index += len(record_span)
1447 def _convert_records(self, cr, uid, records,
1448 context=None, log=lambda a: None):
1449 """ Converts records from the source iterable (recursive dicts of
1450 strings) into forms which can be written to the database (via
1451 self.create or (ir.model.data)._update)
1453 :returns: a list of triplets of (id, xid, record)
1454 :rtype: list((int|None, str|None, dict))
1456 if context is None: context = {}
1457 Converter = self.pool['ir.fields.converter']
1458 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1459 Translation = self.pool['ir.translation']
1461 (f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
1462 context.get('lang'))
1463 or column.string or f))
1464 for f, column in columns.iteritems())
1466 (k, Converter.to_field(cr, uid, self, column, context=context))
1467 for k, column in columns.iteritems())
1469 def _log(base, field, exception):
1470 type = 'warning' if isinstance(exception, Warning) else 'error'
1471 record = dict(base, field=field, type=type,
1472 message=unicode(exception.args[0]) % base)
1473 if len(exception.args) > 1 and exception.args[1]:
1474 record.update(exception.args[1])
1477 stream = CountingStream(records)
1478 for record, extras in stream:
1482 # name_get/name_create
1483 if None in record: pass
1490 dbid = int(record['.id'])
1492 # in case of overridden id column
1493 dbid = record['.id']
1494 if not self.search(cr, uid, [('id', '=', dbid)], context=context):
1497 record=stream.index,
1499 message=_(u"Unknown database identifier '%s'") % dbid))
1502 for field, strvalue in record.iteritems():
1503 if field in (None, 'id', '.id'): continue
1505 converted[field] = False
1508 # In warnings and error messages, use translated string as
1510 message_base = dict(
1511 extras, record=stream.index, field=field_names[field])
1513 converted[field], ws = converters[field](strvalue)
1516 if isinstance(w, basestring):
1517 # wrap warning string in an ImportWarning for
1519 w = ImportWarning(w)
1520 _log(message_base, field, w)
1521 except ValueError, e:
1522 _log(message_base, field, e)
1524 yield dbid, xid, converted, dict(extras, record=stream.index)
1526 def get_invalid_fields(self, cr, uid):
1527 return list(self._invalids)
1529 def _validate(self, cr, uid, ids, context=None):
1530 context = context or {}
1531 lng = context.get('lang')
1532 trans = self.pool.get('ir.translation')
1534 for constraint in self._constraints:
1535 fun, msg, fields = constraint
1536 if not fun(self, cr, uid, ids):
1537 # Check presence of __call__ directly instead of using
1538 # callable() because it will be deprecated as of Python 3.0
1539 if hasattr(msg, '__call__'):
1540 tmp_msg = msg(self, cr, uid, ids, context=context)
1541 if isinstance(tmp_msg, tuple):
1542 tmp_msg, params = tmp_msg
1543 translated_msg = tmp_msg % params
1545 translated_msg = tmp_msg
1547 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg)
1549 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1551 self._invalids.update(fields)
1553 raise except_orm('ValidateError', '\n'.join(error_msgs))
1555 self._invalids.clear()
1557 def default_get(self, cr, uid, fields_list, context=None):
1559 Returns default values for the fields in fields_list.
1561 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1562 :type fields_list: list
1563 :param context: optional context dictionary - it may contains keys for specifying certain options
1564 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1565 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1566 or override a default value for a field.
1567 A special ``bin_size`` boolean flag may also be passed in the context to request the
1568 value of all fields.binary columns to be returned as the size of the binary instead of its
1569 contents. This can also be selectively overriden by passing a field-specific flag
1570 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1571 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1572 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1574 # trigger view init hook
1575 self.view_init(cr, uid, fields_list, context)
1581 # get the default values for the inherited fields
1582 for t in self._inherits.keys():
1583 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1586 # get the default values defined in the object
1587 for f in fields_list:
1588 if f in self._defaults:
1589 if callable(self._defaults[f]):
1590 defaults[f] = self._defaults[f](self, cr, uid, context)
1592 defaults[f] = self._defaults[f]
1594 fld_def = ((f in self._columns) and self._columns[f]) \
1595 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1598 if isinstance(fld_def, fields.property):
1599 property_obj = self.pool.get('ir.property')
1600 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1602 if isinstance(prop_value, (browse_record, browse_null)):
1603 defaults[f] = prop_value.id
1605 defaults[f] = prop_value
1607 if f not in defaults:
1610 # get the default values set by the user and override the default
1611 # values defined in the object
1612 ir_values_obj = self.pool.get('ir.values')
1613 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1614 for id, field, field_value in res:
1615 if field in fields_list:
1616 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1617 if fld_def._type == 'many2one':
1618 obj = self.pool.get(fld_def._obj)
1619 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1621 if fld_def._type == 'many2many':
1622 obj = self.pool.get(fld_def._obj)
1624 for i in range(len(field_value)):
1625 if not obj.search(cr, uid, [('id', '=',
1628 field_value2.append(field_value[i])
1629 field_value = field_value2
1630 if fld_def._type == 'one2many':
1631 obj = self.pool.get(fld_def._obj)
1633 for i in range(len(field_value)):
1634 field_value2.append({})
1635 for field2 in field_value[i]:
1636 if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one':
1637 obj2 = self.pool.get(obj._columns[field2]._obj)
1638 if not obj2.search(cr, uid,
1639 [('id', '=', field_value[i][field2])]):
1641 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one':
1642 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1643 if not obj2.search(cr, uid,
1644 [('id', '=', field_value[i][field2])]):
1646 # TODO add test for many2many and one2many
1647 field_value2[i][field2] = field_value[i][field2]
1648 field_value = field_value2
1649 defaults[field] = field_value
1651 # get the default values from the context
1652 for key in context or {}:
1653 if key.startswith('default_') and (key[8:] in fields_list):
1654 defaults[key[8:]] = context[key]
1657 def fields_get_keys(self, cr, user, context=None):
1658 res = self._columns.keys()
1659 # TODO I believe this loop can be replace by
1660 # res.extend(self._inherit_fields.key())
1661 for parent in self._inherits:
1662 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1665 def _rec_name_fallback(self, cr, uid, context=None):
1666 rec_name = self._rec_name
1667 if rec_name not in self._columns:
1668 rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
1672 # Overload this method if you need a window title which depends on the context
1674 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1677 def user_has_groups(self, cr, uid, groups, context=None):
1678 """Return true if the user is at least member of one of the groups
1679 in groups_str. Typically used to resolve ``groups`` attribute
1680 in view and model definitions.
1682 :param str groups: comma-separated list of fully-qualified group
1683 external IDs, e.g.: ``base.group_user,base.group_system``
1684 :return: True if the current user is a member of one of the
1687 return any([self.pool.get('res.users').has_group(cr, uid, group_ext_id)
1688 for group_ext_id in groups.split(',')])
1690 def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
1691 """Return the description of the fields in the node.
1693 In a normal call to this method, node is a complete view architecture
1694 but it is actually possible to give some sub-node (this is used so
1695 that the method can call itself recursively).
1697 Originally, the field descriptions are drawn from the node itself.
1698 But there is now some code calling fields_get() in order to merge some
1699 of those information in the architecture.
1711 if isinstance(s, unicode):
1712 return s.encode('utf8')
1715 def check_group(node):
1716 """Apply group restrictions, may be set at view level or model level::
1717 * at view level this means the element should be made invisible to
1718 people who are not members
1719 * at model level (exclusively for fields, obviously), this means
1720 the field should be completely removed from the view, as it is
1721 completely unavailable for non-members
1723 :return: True if field should be included in the result of fields_view_get
1725 if node.tag == 'field' and node.get('name') in self._all_columns:
1726 column = self._all_columns[node.get('name')].column
1727 if column.groups and not self.user_has_groups(cr, user,
1728 groups=column.groups,
1730 node.getparent().remove(node)
1731 fields.pop(node.get('name'), None)
1732 # no point processing view-level ``groups`` anymore, return
1734 if node.get('groups'):
1735 can_see = self.user_has_groups(cr, user,
1736 groups=node.get('groups'),
1739 node.set('invisible', '1')
1740 modifiers['invisible'] = True
1741 if 'attrs' in node.attrib:
1742 del(node.attrib['attrs']) #avoid making field visible later
1743 del(node.attrib['groups'])
1746 if node.tag in ('field', 'node', 'arrow'):
1747 if node.get('object'):
1752 if f.tag in ('field'):
1753 xml += etree.tostring(f, encoding="utf-8")
1755 new_xml = etree.fromstring(encode(xml))
1756 ctx = context.copy()
1757 ctx['base_model_name'] = self._name
1758 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1763 attrs = {'views': views}
1765 if node.get('name'):
1768 if node.get('name') in self._columns:
1769 column = self._columns[node.get('name')]
1771 column = self._inherit_fields[node.get('name')][2]
1776 relation = self.pool.get(column._obj)
1781 if f.tag in ('form', 'tree', 'graph', 'kanban'):
1783 ctx = context.copy()
1784 ctx['base_model_name'] = self._name
1785 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1786 views[str(f.tag)] = {
1790 attrs = {'views': views}
1791 if node.get('widget') and node.get('widget') == 'selection':
1792 # Prepare the cached selection list for the client. This needs to be
1793 # done even when the field is invisible to the current user, because
1794 # other events could need to change its value to any of the selectable ones
1795 # (such as on_change events, refreshes, etc.)
1797 # If domain and context are strings, we keep them for client-side, otherwise
1798 # we evaluate them server-side to consider them when generating the list of
1800 # TODO: find a way to remove this hack, by allow dynamic domains
1802 if column._domain and not isinstance(column._domain, basestring):
1803 dom = column._domain
1804 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1805 search_context = dict(context)
1806 if column._context and not isinstance(column._context, basestring):
1807 search_context.update(column._context)
1808 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1809 if (node.get('required') and not int(node.get('required'))) or not column.required:
1810 attrs['selection'].append((False, ''))
1811 fields[node.get('name')] = attrs
1813 field = model_fields.get(node.get('name'))
1815 transfer_field_to_modifiers(field, modifiers)
1818 elif node.tag in ('form', 'tree'):
1819 result = self.view_header_get(cr, user, False, node.tag, context)
1821 node.set('string', result)
1822 in_tree_view = node.tag == 'tree'
1824 elif node.tag == 'calendar':
1825 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1826 if node.get(additional_field):
1827 fields[node.get(additional_field)] = {}
1829 if not check_group(node):
1830 # node must be removed, no need to proceed further with its children
1833 # The view architeture overrides the python model.
1834 # Get the attrs before they are (possibly) deleted by check_group below
1835 transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
1837 # TODO remove attrs couterpart in modifiers when invisible is true ?
1840 if 'lang' in context:
1841 if node.text and node.text.strip():
1842 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.text.strip())
1844 node.text = node.text.replace(node.text.strip(), trans)
1845 if node.tail and node.tail.strip():
1846 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.tail.strip())
1848 node.tail = node.tail.replace(node.tail.strip(), trans)
1850 if node.get('string') and not result:
1851 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1852 if trans == node.get('string') and ('base_model_name' in context):
1853 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1854 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1855 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1857 node.set('string', trans)
1859 for attr_name in ('confirm', 'sum', 'help', 'placeholder'):
1860 attr_value = node.get(attr_name)
1862 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
1864 node.set(attr_name, trans)
1867 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1868 fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
1870 transfer_modifiers_to_node(modifiers, node)
1873 def _disable_workflow_buttons(self, cr, user, node):
1874 """ Set the buttons in node to readonly if the user can't activate them. """
1876 # admin user can always activate workflow buttons
1879 # TODO handle the case of more than one workflow for a model or multiple
1880 # transitions with different groups and same signal
1881 usersobj = self.pool.get('res.users')
1882 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1883 for button in buttons:
1884 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1885 cr.execute("""SELECT DISTINCT t.group_id
1887 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1888 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1891 AND t.group_id is NOT NULL
1892 """, (self._name, button.get('name')))
1893 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1894 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1895 button.set('readonly', str(int(not can_click)))
1898 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1899 """ Return an architecture and a description of all the fields.
1901 The field description combines the result of fields_get() and
1904 :param node: the architecture as as an etree
1905 :return: a tuple (arch, fields) where arch is the given node as a
1906 string and fields is the description of all the fields.
1910 if node.tag == 'diagram':
1911 if node.getchildren()[0].tag == 'node':
1912 node_model = self.pool.get(node.getchildren()[0].get('object'))
1913 node_fields = node_model.fields_get(cr, user, None, context)
1914 fields.update(node_fields)
1915 if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
1916 node.set("create", 'false')
1917 if node.getchildren()[1].tag == 'arrow':
1918 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
1919 fields.update(arrow_fields)
1921 fields = self.fields_get(cr, user, None, context)
1922 fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
1923 node = self._disable_workflow_buttons(cr, user, node)
1924 if node.tag in ('kanban', 'tree', 'form', 'gantt'):
1925 for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
1926 if not node.get(action) and not self.check_access_rights(cr, user, operation, raise_exception=False):
1927 node.set(action, 'false')
1928 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1929 for k in fields.keys():
1930 if k not in fields_def:
1932 for field in fields_def:
1934 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1935 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1936 elif field in fields:
1937 fields[field].update(fields_def[field])
1939 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1940 res = cr.fetchall()[:]
1942 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1943 msg = "\n * ".join([r[0] for r in res])
1944 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1946 raise except_orm('View error', msg)
1949 def _get_default_form_view(self, cr, user, context=None):
1950 """ Generates a default single-line form view using all fields
1951 of the current model except the m2m and o2m ones.
1953 :param cr: database cursor
1954 :param int user: user id
1955 :param dict context: connection context
1956 :returns: a form view as an lxml document
1957 :rtype: etree._Element
1959 view = etree.Element('form', string=self._description)
1960 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1961 for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
1962 if descriptor['type'] in ('one2many', 'many2many'):
1964 etree.SubElement(view, 'field', name=field)
1965 if descriptor['type'] == 'text':
1966 etree.SubElement(view, 'newline')
1969 def _get_default_search_view(self, cr, user, context=None):
1970 """ Generates a single-field search view, based on _rec_name.
1972 :param cr: database cursor
1973 :param int user: user id
1974 :param dict context: connection context
1975 :returns: a tree view as an lxml document
1976 :rtype: etree._Element
1978 view = etree.Element('search', string=self._description)
1979 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1982 def _get_default_tree_view(self, cr, user, context=None):
1983 """ Generates a single-field tree view, based on _rec_name.
1985 :param cr: database cursor
1986 :param int user: user id
1987 :param dict context: connection context
1988 :returns: a tree view as an lxml document
1989 :rtype: etree._Element
1991 view = etree.Element('tree', string=self._description)
1992 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1995 def _get_default_calendar_view(self, cr, user, context=None):
1996 """ Generates a default calendar view by trying to infer
1997 calendar fields from a number of pre-set attribute names
1999 :param cr: database cursor
2000 :param int user: user id
2001 :param dict context: connection context
2002 :returns: a calendar view
2003 :rtype: etree._Element
2005 def set_first_of(seq, in_, to):
2006 """Sets the first value of ``seq`` also found in ``in_`` to
2007 the ``to`` attribute of the view being closed over.
2009 Returns whether it's found a suitable value (and set it on
2010 the attribute) or not
2018 view = etree.Element('calendar', string=self._description)
2019 etree.SubElement(view, 'field', self._rec_name_fallback(cr, user, context))
2021 if (self._date_name not in self._columns):
2023 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
2024 if dt in self._columns:
2025 self._date_name = dt
2030 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
2031 view.set('date_start', self._date_name)
2033 set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
2034 self._columns, 'color')
2036 if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
2037 self._columns, 'date_stop'):
2038 if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
2039 self._columns, 'date_delay'):
2041 _('Invalid Object Architecture!'),
2042 _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % (self._name)))
2047 # if view_id, view_type is not required
2049 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
2051 Get the detailed composition of the requested view like fields, model, view architecture
2053 :param cr: database cursor
2054 :param user: current user id
2055 :param view_id: id of the view or None
2056 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
2057 :param context: context arguments, like lang, time zone
2058 :param toolbar: true to include contextual actions
2059 :param submenu: deprecated
2060 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
2061 :raise AttributeError:
2062 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
2063 * if some tag other than 'position' is found in parent view
2064 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
2071 if isinstance(s, unicode):
2072 return s.encode('utf8')
2075 def raise_view_error(error_msg, child_view_id):
2076 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
2077 error_msg = error_msg % {'parent_xml_id': view.xml_id}
2078 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
2079 % (child_view.xml_id, self._name, error_msg))
2081 def locate(source, spec):
2082 """ Locate a node in a source (parent) architecture.
2084 Given a complete source (parent) architecture (i.e. the field
2085 `arch` in a view), and a 'spec' node (a node in an inheriting
2086 view that specifies the location in the source view of what
2087 should be changed), return (if it exists) the node in the
2088 source view matching the specification.
2090 :param source: a parent architecture to modify
2091 :param spec: a modifying node in an inheriting view
2092 :return: a node in the source matching the spec
2095 if spec.tag == 'xpath':
2096 nodes = source.xpath(spec.get('expr'))
2097 return nodes[0] if nodes else None
2098 elif spec.tag == 'field':
2099 # Only compare the field name: a field can be only once in a given view
2100 # at a given level (and for multilevel expressions, we should use xpath
2101 # inheritance spec anyway).
2102 for node in source.getiterator('field'):
2103 if node.get('name') == spec.get('name'):
2107 for node in source.getiterator(spec.tag):
2108 if isinstance(node, SKIPPED_ELEMENT_TYPES):
2110 if all(node.get(attr) == spec.get(attr) \
2111 for attr in spec.attrib
2112 if attr not in ('position','version')):
2113 # Version spec should match parent's root element's version
2114 if spec.get('version') and spec.get('version') != source.get('version'):
2119 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
2120 """ Apply an inheriting view.
2122 Apply to a source architecture all the spec nodes (i.e. nodes
2123 describing where and what changes to apply to some parent
2124 architecture) given by an inheriting view.
2126 :param source: a parent architecture to modify
2127 :param specs_arch: a modifying architecture in an inheriting view
2128 :param inherit_id: the database id of the inheriting view
2129 :return: a modified source where the specs are applied
2132 specs_tree = etree.fromstring(encode(specs_arch))
2133 # Queue of specification nodes (i.e. nodes describing where and
2134 # changes to apply to some parent architecture).
2135 specs = [specs_tree]
2139 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
2141 if spec.tag == 'data':
2142 specs += [ c for c in specs_tree ]
2144 node = locate(source, spec)
2145 if node is not None:
2146 pos = spec.get('position', 'inside')
2147 if pos == 'replace':
2148 if node.getparent() is None:
2149 source = copy.deepcopy(spec[0])
2152 node.addprevious(child)
2153 node.getparent().remove(node)
2154 elif pos == 'attributes':
2155 for child in spec.getiterator('attribute'):
2156 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
2158 node.set(attribute[0], attribute[1])
2160 del(node.attrib[attribute[0]])
2162 sib = node.getnext()
2166 elif pos == 'after':
2171 sib.addprevious(child)
2172 elif pos == 'before':
2173 node.addprevious(child)
2175 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
2178 ' %s="%s"' % (attr, spec.get(attr))
2179 for attr in spec.attrib
2180 if attr != 'position'
2182 tag = "<%s%s>" % (spec.tag, attrs)
2183 if spec.get('version') and spec.get('version') != source.get('version'):
2184 raise_view_error("Mismatching view API version for element '%s': %r vs %r in parent view '%%(parent_xml_id)s'" % \
2185 (tag, spec.get('version'), source.get('version')), inherit_id)
2186 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
2190 def apply_view_inheritance(cr, user, source, inherit_id):
2191 """ Apply all the (directly and indirectly) inheriting views.
2193 :param source: a parent architecture to modify (with parent
2194 modifications already applied)
2195 :param inherit_id: the database view_id of the parent view
2196 :return: a modified source where all the modifying architecture
2200 sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
2201 for (view_arch, view_id) in sql_inherit:
2202 source = apply_inheritance_specs(source, view_arch, view_id)
2203 source = apply_view_inheritance(cr, user, source, view_id)
2206 result = {'type': view_type, 'model': self._name}
2209 parent_view_model = None
2210 view_ref = context.get(view_type + '_view_ref')
2211 # Search for a root (i.e. without any parent) view.
2213 if view_ref and not view_id:
2215 module, view_ref = view_ref.split('.', 1)
2216 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
2217 view_ref_res = cr.fetchone()
2219 view_id = view_ref_res[0]
2222 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2224 WHERE id=%s""", (view_id,))
2226 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2228 WHERE model=%s AND type=%s AND inherit_id IS NULL
2229 ORDER BY priority""", (self._name, view_type))
2230 sql_res = cr.dictfetchone()
2235 view_id = sql_res['inherit_id'] or sql_res['id']
2236 parent_view_model = sql_res['model']
2237 if not sql_res['inherit_id']:
2240 # if a view was found
2242 source = etree.fromstring(encode(sql_res['arch']))
2244 arch=apply_view_inheritance(cr, user, source, sql_res['id']),
2245 type=sql_res['type'],
2246 view_id=sql_res['id'],
2247 name=sql_res['name'],
2248 field_parent=sql_res['field_parent'] or False)
2250 # otherwise, build some kind of default view
2252 view = getattr(self, '_get_default_%s_view' % view_type)(
2254 except AttributeError:
2255 # what happens here, graph case?
2256 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
2264 if parent_view_model != self._name:
2265 ctx = context.copy()
2266 ctx['base_model_name'] = parent_view_model
2269 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
2270 result['arch'] = xarch
2271 result['fields'] = xfields
2276 for key in ('report_sxw_content', 'report_rml_content',
2277 'report_sxw', 'report_rml',
2278 'report_sxw_content_data', 'report_rml_content_data'):
2282 ir_values_obj = self.pool.get('ir.values')
2283 resprint = ir_values_obj.get(cr, user, 'action',
2284 'client_print_multi', [(self._name, False)], False,
2286 resaction = ir_values_obj.get(cr, user, 'action',
2287 'client_action_multi', [(self._name, False)], False,
2290 resrelate = ir_values_obj.get(cr, user, 'action',
2291 'client_action_relate', [(self._name, False)], False,
2293 resaction = [clean(action) for action in resaction
2294 if view_type == 'tree' or not action[2].get('multi')]
2295 resprint = [clean(print_) for print_ in resprint
2296 if view_type == 'tree' or not print_[2].get('multi')]
2297 #When multi="True" set it will display only in More of the list view
2298 resrelate = [clean(action) for action in resrelate
2299 if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
2301 for x in itertools.chain(resprint, resaction, resrelate):
2302 x['string'] = x['name']
2304 result['toolbar'] = {
2306 'action': resaction,
2311 _view_look_dom_arch = __view_look_dom_arch
2313 def search_count(self, cr, user, args, context=None):
2316 res = self.search(cr, user, args, context=context, count=True)
2317 if isinstance(res, list):
2321 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
2323 Search for records based on a search domain.
2325 :param cr: database cursor
2326 :param user: current user id
2327 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
2328 :param offset: optional number of results to skip in the returned values (default: 0)
2329 :param limit: optional max number of records to return (default: **None**)
2330 :param order: optional columns to sort by (default: self._order=id )
2331 :param context: optional context arguments, like lang, time zone
2332 :type context: dictionary
2333 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
2334 :return: id or list of ids of records matching the criteria
2335 :rtype: integer or list of integers
2336 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
2338 **Expressing a search domain (args)**
2340 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
2342 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
2343 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
2344 The semantics of most of these operators are obvious.
2345 The ``child_of`` operator will look for records who are children or grand-children of a given record,
2346 according to the semantics of this model (i.e following the relationship field named by
2347 ``self._parent_name``, by default ``parent_id``.
2348 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
2350 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
2351 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
2352 Be very careful about this when you combine them the first time.
2354 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
2356 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
2358 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
2360 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
2363 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
2365 def name_get(self, cr, user, ids, context=None):
2366 """Returns the preferred display value (text representation) for the records with the
2367 given ``ids``. By default this will be the value of the ``name`` column, unless
2368 the model implements a custom behavior.
2369 Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
2373 :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
2377 if isinstance(ids, (int, long)):
2380 if self._rec_name in self._all_columns:
2381 rec_name_column = self._all_columns[self._rec_name].column
2382 return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context))
2383 for r in self.read(cr, user, ids, [self._rec_name],
2384 load='_classic_write', context=context)]
2385 return [(id, "%s,%s" % (self._name, id)) for id in ids]
2387 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
2388 """Search for records that have a display name matching the given ``name`` pattern if compared
2389 with the given ``operator``, while also matching the optional search domain (``args``).
2390 This is used for example to provide suggestions based on a partial value for a relational
2392 Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
2395 This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
2396 and then :meth:`~.name_get` on the result of the search.
2398 :param list args: optional search domain (see :meth:`~.search` for syntax),
2399 specifying further restrictions
2400 :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
2402 :param int limit: optional max number of records to return
2404 :return: list of pairs ``(id,text_repr)`` for all matching records.
2406 return self._name_search(cr, user, name, args, operator, context, limit)
2408 def name_create(self, cr, uid, name, context=None):
2409 """Creates a new record by calling :meth:`~.create` with only one
2410 value provided: the name of the new record (``_rec_name`` field).
2411 The new record will also be initialized with any default values applicable
2412 to this model, or provided through the context. The usual behavior of
2413 :meth:`~.create` applies.
2414 Similarly, this method may raise an exception if the model has multiple
2415 required fields and some do not have default values.
2417 :param name: name of the record to create
2420 :return: the :meth:`~.name_get` pair value for the newly-created record.
2422 rec_id = self.create(cr, uid, {self._rec_name: name}, context);
2423 return self.name_get(cr, uid, [rec_id], context)[0]
2425 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2426 # solve some access rights issues
2427 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2433 # optimize out the default criterion of ``ilike ''`` that matches everything
2434 if not (name == '' and operator == 'ilike'):
2435 args += [(self._rec_name, operator, name)]
2436 access_rights_uid = name_get_uid or user
2437 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2438 res = self.name_get(cr, access_rights_uid, ids, context)
2441 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2444 self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
2446 fields = self._columns.keys() + self._inherit_fields.keys()
2447 #FIXME: collect all calls to _get_source into one SQL call.
2449 res[lang] = {'code': lang}
2451 if f in self._columns:
2452 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2454 res[lang][f] = res_trans
2456 res[lang][f] = self._columns[f].string
2457 for table in self._inherits:
2458 cols = intersect(self._inherit_fields.keys(), fields)
2459 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2462 res[lang]['code'] = lang
2463 for f in res2[lang]:
2464 res[lang][f] = res2[lang][f]
2467 def write_string(self, cr, uid, id, langs, vals, context=None):
2468 self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
2469 #FIXME: try to only call the translation in one SQL
2472 if field in self._columns:
2473 src = self._columns[field].string
2474 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2475 for table in self._inherits:
2476 cols = intersect(self._inherit_fields.keys(), vals)
2478 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2481 def _add_missing_default_values(self, cr, uid, values, context=None):
2482 missing_defaults = []
2483 avoid_tables = [] # avoid overriding inherited values when parent is set
2484 for tables, parent_field in self._inherits.items():
2485 if parent_field in values:
2486 avoid_tables.append(tables)
2487 for field in self._columns.keys():
2488 if not field in values:
2489 missing_defaults.append(field)
2490 for field in self._inherit_fields.keys():
2491 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2492 missing_defaults.append(field)
2494 if len(missing_defaults):
2495 # override defaults with the provided values, never allow the other way around
2496 defaults = self.default_get(cr, uid, missing_defaults, context)
2498 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2499 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2500 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2501 defaults[dv] = [(6, 0, defaults[dv])]
2502 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2503 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2504 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2505 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2506 defaults.update(values)
2510 def clear_caches(self):
2511 """ Clear the caches
2513 This clears the caches associated to methods decorated with
2514 ``tools.ormcache`` or ``tools.ormcache_multi``.
2517 getattr(self, '_ormcache')
2519 except AttributeError:
2523 def _read_group_fill_results(self, cr, uid, domain, groupby, groupby_list, aggregated_fields,
2524 read_group_result, read_group_order=None, context=None):
2525 """Helper method for filling in empty groups for all possible values of
2526 the field being grouped by"""
2528 # self._group_by_full should map groupable fields to a method that returns
2529 # a list of all aggregated values that we want to display for this field,
2530 # in the form of a m2o-like pair (key,label).
2531 # This is useful to implement kanban views for instance, where all columns
2532 # should be displayed even if they don't contain any record.
2534 # Grab the list of all groups that should be displayed, including all present groups
2535 present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
2536 all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
2537 read_group_order=read_group_order,
2538 access_rights_uid=openerp.SUPERUSER_ID,
2541 result_template = dict.fromkeys(aggregated_fields, False)
2542 result_template[groupby + '_count'] = 0
2543 if groupby_list and len(groupby_list) > 1:
2544 result_template['__context'] = {'group_by': groupby_list[1:]}
2546 # Merge the left_side (current results as dicts) with the right_side (all
2547 # possible values as m2o pairs). Both lists are supposed to be using the
2548 # same ordering, and can be merged in one pass.
2551 def append_left(left_side):
2552 grouped_value = left_side[groupby] and left_side[groupby][0]
2553 if not grouped_value in known_values:
2554 result.append(left_side)
2555 known_values[grouped_value] = left_side
2557 count_attr = groupby + '_count'
2558 known_values[grouped_value].update({count_attr: left_side[count_attr]})
2559 def append_right(right_side):
2560 grouped_value = right_side[0]
2561 if not grouped_value in known_values:
2562 line = dict(result_template)
2563 line[groupby] = right_side
2564 line['__domain'] = [(groupby,'=',grouped_value)] + domain
2566 known_values[grouped_value] = line
2567 while read_group_result or all_groups:
2568 left_side = read_group_result[0] if read_group_result else None
2569 right_side = all_groups[0] if all_groups else None
2570 assert left_side is None or left_side[groupby] is False \
2571 or isinstance(left_side[groupby], (tuple,list)), \
2572 'M2O-like pair expected, got %r' % left_side[groupby]
2573 assert right_side is None or isinstance(right_side, (tuple,list)), \
2574 'M2O-like pair expected, got %r' % right_side
2575 if left_side is None:
2576 append_right(all_groups.pop(0))
2577 elif right_side is None:
2578 append_left(read_group_result.pop(0))
2579 elif left_side[groupby] == right_side:
2580 append_left(read_group_result.pop(0))
2581 all_groups.pop(0) # discard right_side
2582 elif not left_side[groupby] or not left_side[groupby][0]:
2583 # left side == "Undefined" entry, not present on right_side
2584 append_left(read_group_result.pop(0))
2586 append_right(all_groups.pop(0))
2590 r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
2593 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2595 Get the list of records in list view grouped by the given ``groupby`` fields
2597 :param cr: database cursor
2598 :param uid: current user id
2599 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2600 :param list fields: list of fields present in the list view specified on the object
2601 :param list groupby: fields by which the records will be grouped
2602 :param int offset: optional number of records to skip
2603 :param int limit: optional max number of records to return
2604 :param dict context: context arguments, like lang, time zone
2605 :param list orderby: optional ``order by`` specification, for
2606 overriding the natural sort ordering of the
2607 groups, see also :py:meth:`~osv.osv.osv.search`
2608 (supported only for many2one fields currently)
2609 :return: list of dictionaries(one dictionary for each record) containing:
2611 * the values of fields grouped by the fields in ``groupby`` argument
2612 * __domain: list of tuples specifying the search criteria
2613 * __context: dictionary with argument like ``groupby``
2614 :rtype: [{'field_name_1': value, ...]
2615 :raise AccessError: * if user has no read rights on the requested object
2616 * if user tries to bypass access rules for read on the requested object
2619 context = context or {}
2620 self.check_access_rights(cr, uid, 'read')
2622 fields = self._columns.keys()
2624 query = self._where_calc(cr, uid, domain, context=context)
2625 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2627 # Take care of adding join(s) if groupby is an '_inherits'ed field
2628 groupby_list = groupby
2629 qualified_groupby_field = groupby
2631 if isinstance(groupby, list):
2632 groupby = groupby[0]
2633 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2636 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2637 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2638 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2640 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2641 fget = self.fields_get(cr, uid, fields)
2643 group_count = group_by = groupby
2645 if fget.get(groupby):
2646 groupby_type = fget[groupby]['type']
2647 if groupby_type in ('date', 'datetime'):
2648 qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
2649 flist = "%s as %s " % (qualified_groupby_field, groupby)
2650 elif groupby_type == 'boolean':
2651 qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
2652 flist = "%s as %s " % (qualified_groupby_field, groupby)
2654 flist = qualified_groupby_field
2656 # Don't allow arbitrary values, as this would be a SQL injection vector!
2657 raise except_orm(_('Invalid group_by'),
2658 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2660 aggregated_fields = [
2662 if f not in ('id', 'sequence')
2663 if fget[f]['type'] in ('integer', 'float')
2664 if (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2665 for f in aggregated_fields:
2666 group_operator = fget[f].get('group_operator', 'sum')
2669 qualified_field = '"%s"."%s"' % (self._table, f)
2670 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2672 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2674 from_clause, where_clause, where_clause_params = query.get_sql()
2675 where_clause = where_clause and ' WHERE ' + where_clause
2676 limit_str = limit and ' limit %d' % limit or ''
2677 offset_str = offset and ' offset %d' % offset or ''
2678 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2680 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2683 for r in cr.dictfetchall():
2684 for fld, val in r.items():
2685 if val == None: r[fld] = False
2686 alldata[r['id']] = r
2689 order = orderby or groupby
2690 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
2691 # the IDS of records that have groupby field value = False or '' should be sorted too
2692 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2693 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2694 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2695 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2699 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2700 if not isinstance(groupby_list, (str, unicode)):
2701 if groupby or not context.get('group_by_no_leaf', False):
2702 d['__context'] = {'group_by': groupby_list[1:]}
2703 if groupby and groupby in fget:
2704 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2705 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2706 days = calendar.monthrange(dt.year, dt.month)[1]
2708 date_value = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d')
2709 d[groupby] = babel.dates.format_date(
2710 date_value, format='MMMM yyyy', locale=context.get('lang', 'en_US'))
2711 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2712 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2713 del alldata[d['id']][groupby]
2714 d.update(alldata[d['id']])
2717 if groupby and groupby in self._group_by_full:
2718 data = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
2719 aggregated_fields, data, read_group_order=order,
2724 def _inherits_join_add(self, current_table, parent_model_name, query):
2726 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2727 :param current_table: current model object
2728 :param parent_model_name: name of the parent model for which the clauses should be added
2729 :param query: query object on which the JOIN should be added
2731 inherits_field = current_table._inherits[parent_model_name]
2732 parent_model = self.pool.get(parent_model_name)
2733 parent_table_name = parent_model._table
2734 quoted_parent_table_name = '"%s"' % parent_table_name
2735 if quoted_parent_table_name not in query.tables:
2736 query.tables.append(quoted_parent_table_name)
2737 query.where_clause.append('(%s.%s = %s.id)' % (current_table._table, inherits_field, parent_table_name))
2741 def _inherits_join_calc(self, field, query):
2743 Adds missing table select and join clause(s) to ``query`` for reaching
2744 the field coming from an '_inherits' parent table (no duplicates).
2746 :param field: name of inherited field to reach
2747 :param query: query object on which the JOIN should be added
2748 :return: qualified name of field, to be used in SELECT clause
2750 current_table = self
2751 while field in current_table._inherit_fields and not field in current_table._columns:
2752 parent_model_name = current_table._inherit_fields[field][0]
2753 parent_table = self.pool.get(parent_model_name)
2754 self._inherits_join_add(current_table, parent_model_name, query)
2755 current_table = parent_table
2756 return '"%s".%s' % (current_table._table, field)
2758 def _parent_store_compute(self, cr):
2759 if not self._parent_store:
2761 _logger.info('Computing parent left and right for table %s...', self._table)
2762 def browse_rec(root, pos=0):
2764 where = self._parent_name+'='+str(root)
2766 where = self._parent_name+' IS NULL'
2767 if self._parent_order:
2768 where += ' order by '+self._parent_order
2769 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2771 for id in cr.fetchall():
2772 pos2 = browse_rec(id[0], pos2)
2773 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2775 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2776 if self._parent_order:
2777 query += ' order by ' + self._parent_order
2780 for (root,) in cr.fetchall():
2781 pos = browse_rec(root, pos)
2784 def _update_store(self, cr, f, k):
2785 _logger.info("storing computed values of fields.function '%s'", k)
2786 ss = self._columns[k]._symbol_set
2787 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2788 cr.execute('select id from '+self._table)
2789 ids_lst = map(lambda x: x[0], cr.fetchall())
2792 ids_lst = ids_lst[40:]
2793 res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
2794 for key, val in res.items():
2797 # if val is a many2one, just write the ID
2798 if type(val) == tuple:
2800 if val is not False:
2801 cr.execute(update_query, (ss[1](val), key))
2803 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2804 """Raise except_orm if value is not among the valid values for the selection field"""
2805 if self._columns[field]._type == 'reference':
2806 val_model, val_id_str = value.split(',', 1)
2809 val_id = long(val_id_str)
2813 raise except_orm(_('ValidateError'),
2814 _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
2818 if isinstance(self._columns[field].selection, (tuple, list)):
2819 if val in dict(self._columns[field].selection):
2821 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2823 raise except_orm(_('ValidateError'),
2824 _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
2826 def _check_removed_columns(self, cr, log=False):
2827 # iterate on the database columns to drop the NOT NULL constraints
2828 # of fields which were required but have been removed (or will be added by another module)
2829 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2830 columns += MAGIC_COLUMNS
2831 cr.execute("SELECT a.attname, a.attnotnull"
2832 " FROM pg_class c, pg_attribute a"
2833 " WHERE c.relname=%s"
2834 " AND c.oid=a.attrelid"
2835 " AND a.attisdropped=%s"
2836 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2837 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2839 for column in cr.dictfetchall():
2841 _logger.debug("column %s is in the table %s but not in the corresponding object %s",
2842 column['attname'], self._table, self._name)
2843 if column['attnotnull']:
2844 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2845 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2846 self._table, column['attname'])
2848 def _save_constraint(self, cr, constraint_name, type):
2850 Record the creation of a constraint for this model, to make it possible
2851 to delete it later when the module is uninstalled. Type can be either
2852 'f' or 'u' depending on the constraing being a foreign key or not.
2854 assert type in ('f', 'u')
2856 SELECT 1 FROM ir_model_constraint, ir_module_module
2857 WHERE ir_model_constraint.module=ir_module_module.id
2858 AND ir_model_constraint.name=%s
2859 AND ir_module_module.name=%s
2860 """, (constraint_name, self._module))
2863 INSERT INTO ir_model_constraint
2864 (name, date_init, date_update, module, model, type)
2865 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2866 (SELECT id FROM ir_module_module WHERE name=%s),
2867 (SELECT id FROM ir_model WHERE model=%s), %s)""",
2868 (constraint_name, self._module, self._name, type))
2870 def _save_relation_table(self, cr, relation_table):
2872 Record the creation of a many2many for this model, to make it possible
2873 to delete it later when the module is uninstalled.
2876 SELECT 1 FROM ir_model_relation, ir_module_module
2877 WHERE ir_model_relation.module=ir_module_module.id
2878 AND ir_model_relation.name=%s
2879 AND ir_module_module.name=%s
2880 """, (relation_table, self._module))
2882 cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
2883 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2884 (SELECT id FROM ir_module_module WHERE name=%s),
2885 (SELECT id FROM ir_model WHERE model=%s))""",
2886 (relation_table, self._module, self._name))
2888 # checked version: for direct m2o starting from `self`
2889 def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
2890 assert self.is_transient() or not dest_model.is_transient(), \
2891 'Many2One relationships from non-transient Model to TransientModel are forbidden'
2892 if self.is_transient() and not dest_model.is_transient():
2893 # TransientModel relationships to regular Models are annoying
2894 # usually because they could block deletion due to the FKs.
2895 # So unless stated otherwise we default them to ondelete=cascade.
2896 ondelete = ondelete or 'cascade'
2897 self._foreign_keys.append((self._table, source_field, dest_model._table, ondelete or 'set null'))
2898 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2899 self._table, source_field, dest_model._table, ondelete)
2901 # unchecked version: for custom cases, such as m2m relationships
2902 def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
2903 self._foreign_keys.append((source_table, source_field, dest_model._table, ondelete or 'set null'))
2904 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2905 source_table, source_field, dest_model._table, ondelete)
2907 def _drop_constraint(self, cr, source_table, constraint_name):
2908 cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
2910 def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
2911 # Find FK constraint(s) currently established for the m2o field,
2912 # and see whether they are stale or not
2913 cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
2914 cl2.relname as foreign_table
2915 FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
2916 pg_attribute as att1, pg_attribute as att2
2917 WHERE con.conrelid = cl1.oid
2918 AND cl1.relname = %s
2919 AND con.confrelid = cl2.oid
2920 AND array_lower(con.conkey, 1) = 1
2921 AND con.conkey[1] = att1.attnum
2922 AND att1.attrelid = cl1.oid
2923 AND att1.attname = %s
2924 AND array_lower(con.confkey, 1) = 1
2925 AND con.confkey[1] = att2.attnum
2926 AND att2.attrelid = cl2.oid
2927 AND att2.attname = %s
2928 AND con.contype = 'f'""", (source_table, source_field, 'id'))
2929 constraints = cr.dictfetchall()
2931 if len(constraints) == 1:
2932 # Is it the right constraint?
2934 if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
2935 or cons['foreign_table'] != dest_model._table:
2936 _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
2937 source_table, cons['constraint_name'])
2938 self._drop_constraint(cr, source_table, cons['constraint_name'])
2939 self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
2940 # else it's all good, nothing to do!
2942 # Multiple FKs found for the same field, drop them all, and re-create
2943 for cons in constraints:
2944 _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
2945 source_table, cons['constraint_name'])
2946 self._drop_constraint(cr, source_table, cons['constraint_name'])
2947 self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
2951 def _auto_init(self, cr, context=None):
2954 Call _field_create and, unless _auto is False:
2956 - create the corresponding table in database for the model,
2957 - possibly add the parent columns in database,
2958 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2959 'write_date' in database if _log_access is True (the default),
2960 - report on database columns no more existing in _columns,
2961 - remove no more existing not null constraints,
2962 - alter existing database columns to match _columns,
2963 - create database tables to match _columns,
2964 - add database indices to match _columns,
2965 - save in self._foreign_keys a list a foreign keys to create (see
2969 self._foreign_keys = []
2970 raise_on_invalid_object_name(self._name)
2973 store_compute = False
2975 update_custom_fields = context.get('update_custom_fields', False)
2976 self._field_create(cr, context=context)
2977 create = not self._table_exist(cr)
2978 if getattr(self, '_auto', True):
2981 self._create_table(cr)
2984 if self._parent_store:
2985 if not self._parent_columns_exist(cr):
2986 self._create_parent_columns(cr)
2987 store_compute = True
2989 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2990 if self._log_access:
2991 self._add_log_columns(cr)
2993 self._check_removed_columns(cr, log=False)
2995 # iterate on the "object columns"
2996 column_data = self._select_column_data(cr)
2998 for k, f in self._columns.iteritems():
2999 if k in MAGIC_COLUMNS:
3001 # Don't update custom (also called manual) fields
3002 if f.manual and not update_custom_fields:
3005 if isinstance(f, fields.one2many):
3006 self._o2m_raise_on_missing_reference(cr, f)
3008 elif isinstance(f, fields.many2many):
3009 self._m2m_raise_or_create_relation(cr, f)
3012 res = column_data.get(k)
3014 # The field is not found as-is in database, try if it
3015 # exists with an old name.
3016 if not res and hasattr(f, 'oldname'):
3017 res = column_data.get(f.oldname)
3019 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
3021 column_data[k] = res
3022 _schema.debug("Table '%s': renamed column '%s' to '%s'",
3023 self._table, f.oldname, k)
3025 # The field already exists in database. Possibly
3026 # change its type, rename it, drop it or change its
3029 f_pg_type = res['typname']
3030 f_pg_size = res['size']
3031 f_pg_notnull = res['attnotnull']
3032 if isinstance(f, fields.function) and not f.store and\
3033 not getattr(f, 'nodrop', False):
3034 _logger.info('column %s (%s) in table %s removed: converted to a function !\n',
3035 k, f.string, self._table)
3036 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
3038 _schema.debug("Table '%s': dropped column '%s' with cascade",
3042 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
3047 ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
3048 ('varchar', 'text', 'TEXT', ''),
3049 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3050 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
3051 ('timestamp', 'date', 'date', '::date'),
3052 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3053 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3055 if f_pg_type == 'varchar' and f._type == 'char' and ((f.size is None and f_pg_size) or f_pg_size < f.size):
3056 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3057 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
3058 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
3059 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3061 _schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
3062 self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
3064 if (f_pg_type==c[0]) and (f._type==c[1]):
3065 if f_pg_type != f_obj_type:
3067 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3068 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
3069 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
3070 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3072 _schema.debug("Table '%s': column '%s' changed type from %s to %s",
3073 self._table, k, c[0], c[1])
3076 if f_pg_type != f_obj_type:
3080 newname = k + '_moved' + str(i)
3081 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
3082 "WHERE c.relname=%s " \
3083 "AND a.attname=%s " \
3084 "AND c.oid=a.attrelid ", (self._table, newname))
3085 if not cr.fetchone()[0]:
3089 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3090 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
3091 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3092 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3093 _schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
3094 self._table, k, f_pg_type, f._type, newname)
3096 # if the field is required and hasn't got a NOT NULL constraint
3097 if f.required and f_pg_notnull == 0:
3098 # set the field to the default value if any
3099 if k in self._defaults:
3100 if callable(self._defaults[k]):
3101 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3103 default = self._defaults[k]
3105 if (default is not None):
3106 ss = self._columns[k]._symbol_set
3107 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
3108 cr.execute(query, (ss[1](default),))
3109 # add the NOT NULL constraint
3112 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3114 _schema.debug("Table '%s': column '%s': added NOT NULL constraint",
3117 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
3118 "If you want to have it, you should update the records and execute manually:\n"\
3119 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3120 _schema.warning(msg, self._table, k, self._table, k)
3122 elif not f.required and f_pg_notnull == 1:
3123 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3125 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
3128 indexname = '%s_%s_index' % (self._table, k)
3129 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
3130 res2 = cr.dictfetchall()
3131 if not res2 and f.select:
3132 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3134 if f._type == 'text':
3135 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
3136 msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\
3137 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
3138 " because there is a length limit for indexable btree values!\n"\
3139 "Use a search view instead if you simply want to make the field searchable."
3140 _schema.warning(msg, self._table, f._type, k)
3141 if res2 and not f.select:
3142 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
3144 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
3145 _schema.debug(msg, self._table, k, f._type)
3147 if isinstance(f, fields.many2one):
3148 dest_model = self.pool.get(f._obj)
3149 if dest_model._table != 'ir_actions':
3150 self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete)
3152 # The field doesn't exist in database. Create it if necessary.
3154 if not isinstance(f, fields.function) or f.store:
3155 # add the missing field
3156 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3157 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3158 _schema.debug("Table '%s': added column '%s' with definition=%s",
3159 self._table, k, get_pg_type(f)[1])
3162 if not create and k in self._defaults:
3163 if callable(self._defaults[k]):
3164 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3166 default = self._defaults[k]
3168 ss = self._columns[k]._symbol_set
3169 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
3170 cr.execute(query, (ss[1](default),))
3172 _logger.debug("Table '%s': setting default value of new column %s", self._table, k)
3174 # remember the functions to call for the stored fields
3175 if isinstance(f, fields.function):
3177 if f.store is not True: # i.e. if f.store is a dict
3178 order = f.store[f.store.keys()[0]][2]
3179 todo_end.append((order, self._update_store, (f, k)))
3181 # and add constraints if needed
3182 if isinstance(f, fields.many2one):
3183 if not self.pool.get(f._obj):
3184 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
3185 dest_model = self.pool.get(f._obj)
3186 ref = dest_model._table
3187 # ir_actions is inherited so foreign key doesn't work on it
3188 if ref != 'ir_actions':
3189 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
3191 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3195 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3196 _schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
3199 msg = "WARNING: unable to set column %s of table %s not null !\n"\
3200 "Try to re-run: openerp-server --update=module\n"\
3201 "If it doesn't work, update records and execute manually:\n"\
3202 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3203 _logger.warning(msg, k, self._table, self._table, k)
3207 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3208 create = not bool(cr.fetchone())
3210 cr.commit() # start a new transaction
3212 self._add_sql_constraints(cr)
3215 self._execute_sql(cr)
3218 self._parent_store_compute(cr)
3223 def _auto_end(self, cr, context=None):
3224 """ Create the foreign keys recorded by _auto_init. """
3225 for t, k, r, d in self._foreign_keys:
3226 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
3227 self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
3229 del self._foreign_keys
3232 def _table_exist(self, cr):
3233 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3237 def _create_table(self, cr):
3238 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
3239 cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
3240 _schema.debug("Table '%s': created", self._table)
3243 def _parent_columns_exist(self, cr):
3244 cr.execute("""SELECT c.relname
3245 FROM pg_class c, pg_attribute a
3246 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3247 """, (self._table, 'parent_left'))
3251 def _create_parent_columns(self, cr):
3252 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
3253 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
3254 if 'parent_left' not in self._columns:
3255 _logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
3257 _schema.debug("Table '%s': added column '%s' with definition=%s",
3258 self._table, 'parent_left', 'INTEGER')
3259 elif not self._columns['parent_left'].select:
3260 _logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
3262 if 'parent_right' not in self._columns:
3263 _logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
3265 _schema.debug("Table '%s': added column '%s' with definition=%s",
3266 self._table, 'parent_right', 'INTEGER')
3267 elif not self._columns['parent_right'].select:
3268 _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
3270 if self._columns[self._parent_name].ondelete != 'cascade':
3271 _logger.error("The column %s on object %s must be set as ondelete='cascade'",
3272 self._parent_name, self._name)
3277 def _add_log_columns(self, cr):
3278 for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
3281 FROM pg_class c, pg_attribute a
3282 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3283 """, (self._table, field))
3285 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
3287 _schema.debug("Table '%s': added column '%s' with definition=%s",
3288 self._table, field, field_def)
3291 def _select_column_data(self, cr):
3292 # attlen is the number of bytes necessary to represent the type when
3293 # the type has a fixed size. If the type has a varying size attlen is
3294 # -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
3295 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \
3296 "FROM pg_class c,pg_attribute a,pg_type t " \
3297 "WHERE c.relname=%s " \
3298 "AND c.oid=a.attrelid " \
3299 "AND a.atttypid=t.oid", (self._table,))
3300 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
3303 def _o2m_raise_on_missing_reference(self, cr, f):
3304 # TODO this check should be a method on fields.one2many.
3306 other = self.pool.get(f._obj)
3308 # TODO the condition could use fields_get_keys().
3309 if f._fields_id not in other._columns.keys():
3310 if f._fields_id not in other._inherit_fields.keys():
3311 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
3313 def _m2m_raise_or_create_relation(self, cr, f):
3314 m2m_tbl, col1, col2 = f._sql_names(self)
3315 self._save_relation_table(cr, m2m_tbl)
3316 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
3317 if not cr.dictfetchall():
3318 if not self.pool.get(f._obj):
3319 raise except_orm('Programming Error', ('Many2Many destination model does not exist: `%s`') % (f._obj,))
3320 dest_model = self.pool.get(f._obj)
3321 ref = dest_model._table
3322 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s")) WITH OIDS' % (m2m_tbl, col1, col2, col1, col2))
3323 # create foreign key references with ondelete=cascade, unless the targets are SQL views
3324 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
3325 if not cr.fetchall():
3326 self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
3327 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
3328 if not cr.fetchall():
3329 self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
3331 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
3332 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
3333 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
3335 _schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
3338 def _add_sql_constraints(self, cr):
3341 Modify this model's database table constraints so they match the one in
3345 def unify_cons_text(txt):
3346 return txt.lower().replace(', ',',').replace(' (','(')
3348 for (key, con, _) in self._sql_constraints:
3349 conname = '%s_%s' % (self._table, key)
3351 self._save_constraint(cr, conname, 'u')
3352 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
3353 existing_constraints = cr.dictfetchall()
3357 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
3358 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
3359 self._table, conname, con),
3360 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3365 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3366 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3367 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3373 if not existing_constraints:
3374 # constraint does not exists:
3375 sql_actions['add']['execute'] = True
3376 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3377 elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
3378 # constraint exists but its definition has changed:
3379 sql_actions['drop']['execute'] = True
3380 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3381 sql_actions['add']['execute'] = True
3382 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3384 # we need to add the constraint:
3385 sql_actions = [item for item in sql_actions.values()]
3386 sql_actions.sort(key=lambda x: x['order'])
3387 for sql_action in [action for action in sql_actions if action['execute']]:
3389 cr.execute(sql_action['query'])
3391 _schema.debug(sql_action['msg_ok'])
3393 _schema.warning(sql_action['msg_err'])
3397 def _execute_sql(self, cr):
3398 """ Execute the SQL code from the _sql attribute (if any)."""
3399 if hasattr(self, "_sql"):
3400 for line in self._sql.split(';'):
3401 line2 = line.replace('\n', '').strip()
3407 # Update objects that uses this one to update their _inherits fields
3410 def _inherits_reload_src(self):
3411 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3412 for obj in self.pool.models.values():
3413 if self._name in obj._inherits:
3414 obj._inherits_reload()
3417 def _inherits_reload(self):
3418 """ Recompute the _inherit_fields mapping.
3420 This will also call itself on each inherits'd child model.
3424 for table in self._inherits:
3425 other = self.pool.get(table)
3426 for col in other._columns.keys():
3427 res[col] = (table, self._inherits[table], other._columns[col], table)
3428 for col in other._inherit_fields.keys():
3429 res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
3430 self._inherit_fields = res
3431 self._all_columns = self._get_column_infos()
3432 self._inherits_reload_src()
3435 def _get_column_infos(self):
3436 """Returns a dict mapping all fields names (direct fields and
3437 inherited field via _inherits) to a ``column_info`` struct
3438 giving detailed columns """
3440 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
3441 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
3442 for k, col in self._columns.iteritems():
3443 result[k] = fields.column_info(k, col)
3447 def _inherits_check(self):
3448 for table, field_name in self._inherits.items():
3449 if field_name not in self._columns:
3450 _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
3451 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3452 required=True, ondelete="cascade")
3453 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
3454 _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.', field_name, self._name)
3455 self._columns[field_name].required = True
3456 self._columns[field_name].ondelete = "cascade"
3458 #def __getattr__(self, name):
3460 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3461 # (though inherits doesn't use Python inheritance).
3462 # Handles translating between local ids and remote ids.
3463 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3464 # when you have inherits.
3466 # for model, field in self._inherits.iteritems():
3467 # proxy = self.pool.get(model)
3468 # if hasattr(proxy, name):
3469 # attribute = getattr(proxy, name)
3470 # if not hasattr(attribute, '__call__'):
3474 # return super(orm, self).__getattr__(name)
3476 # def _proxy(cr, uid, ids, *args, **kwargs):
3477 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3478 # lst = [obj[field].id for obj in objects if obj[field]]
3479 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3484 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
3485 """ Return the definition of each field.
3487 The returned value is a dictionary (indiced by field name) of
3488 dictionaries. The _inherits'd fields are included. The string, help,
3489 and selection (if present) attributes are translated.
3491 :param cr: database cursor
3492 :param user: current user id
3493 :param fields: list of fields
3494 :param context: context arguments, like lang, time zone
3495 :return: dictionary of field dictionaries, each one describing a field of the business object
3496 :raise AccessError: * if user has no create/write rights on the requested object
3502 write_access = self.check_access_rights(cr, user, 'write', raise_exception=False) \
3503 or self.check_access_rights(cr, user, 'create', raise_exception=False)
3507 translation_obj = self.pool.get('ir.translation')
3508 for parent in self._inherits:
3509 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
3511 for f, field in self._columns.iteritems():
3512 if (allfields and f not in allfields) or \
3513 (field.groups and not self.user_has_groups(cr, user, groups=field.groups, context=context)):
3516 res[f] = fields.field_to_dict(self, cr, user, field, context=context)
3518 if not write_access:
3519 res[f]['readonly'] = True
3520 res[f]['states'] = {}
3522 if 'lang' in context:
3523 if 'string' in res[f]:
3524 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context['lang'])
3526 res[f]['string'] = res_trans
3527 if 'help' in res[f]:
3528 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context['lang'])
3530 res[f]['help'] = help_trans
3531 if 'selection' in res[f]:
3532 if isinstance(field.selection, (tuple, list)):
3533 sel = field.selection
3535 for key, val in sel:
3538 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context['lang'], val)
3539 sel2.append((key, val2 or val))
3540 res[f]['selection'] = sel2
3544 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3545 """ Read records with given ids with the given fields
3547 :param cr: database cursor
3548 :param user: current user id
3549 :param ids: id or list of the ids of the records to read
3550 :param fields: optional list of field names to return (default: all fields would be returned)
3551 :type fields: list (example ['field_name_1', ...])
3552 :param context: optional context dictionary - it may contains keys for specifying certain options
3553 like ``context_lang``, ``context_tz`` to alter the results of the call.
3554 A special ``bin_size`` boolean flag may also be passed in the context to request the
3555 value of all fields.binary columns to be returned as the size of the binary instead of its
3556 contents. This can also be selectively overriden by passing a field-specific flag
3557 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
3558 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
3559 :return: list of dictionaries((dictionary per record asked)) with requested field values
3560 :rtype: [{‘name_of_the_field’: value, ...}, ...]
3561 :raise AccessError: * if user has no read rights on the requested object
3562 * if user tries to bypass access rules for read on the requested object
3568 self.check_access_rights(cr, user, 'read')
3570 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
3571 if isinstance(ids, (int, long)):
3575 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3576 result = self._read_flat(cr, user, select, fields, context, load)
3579 for key, v in r.items():
3583 if isinstance(ids, (int, long, dict)):
3584 return result and result[0] or False
3587 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3592 if fields_to_read == None:
3593 fields_to_read = self._columns.keys()
3595 # Construct a clause for the security rules.
3596 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3597 # or will at least contain self._table.
3598 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3600 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3601 fields_pre = [f for f in fields_to_read if
3602 f == self.CONCURRENCY_CHECK_FIELD
3603 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3604 ] + self._inherits.values()
3608 def convert_field(f):
3609 f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
3610 if f in ('create_date', 'write_date'):
3611 return "date_trunc('second', %s) as %s" % (f_qual, f)
3612 if f == self.CONCURRENCY_CHECK_FIELD:
3613 if self._log_access:
3614 return "COALESCE(%s.write_date, %s.create_date, (now() at time zone 'UTC'))::timestamp AS %s" % (self._table, self._table, f,)
3615 return "(now() at time zone 'UTC')::timestamp AS %s" % (f,)
3616 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3617 return 'length(%s) as "%s"' % (f_qual, f)
3620 fields_pre2 = map(convert_field, fields_pre)
3621 order_by = self._parent_order or self._order
3622 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
3623 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3625 query += " AND " + (' OR '.join(rule_clause))
3626 query += " ORDER BY " + order_by
3627 for sub_ids in cr.split_for_in_conditions(ids):
3629 cr.execute(query, [tuple(sub_ids)] + rule_params)
3630 self._check_record_rules_result_count(cr, user, sub_ids, 'read', context=context)
3632 cr.execute(query, (tuple(sub_ids),))
3633 res.extend(cr.dictfetchall())
3635 res = map(lambda x: {'id': x}, ids)
3637 for f in fields_pre:
3638 if f == self.CONCURRENCY_CHECK_FIELD:
3640 if self._columns[f].translate:
3641 ids = [x['id'] for x in res]
3642 #TODO: optimize out of this loop
3643 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3645 r[f] = res_trans.get(r['id'], False) or r[f]
3647 for table in self._inherits:
3648 col = self._inherits[table]
3649 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3652 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3660 if not record[col]: # if the record is deleted from _inherits table?
3662 record.update(res3[record[col]])
3663 if col not in fields_to_read:
3666 # all fields which need to be post-processed by a simple function (symbol_get)
3667 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3670 for f in fields_post:
3671 r[f] = self._columns[f]._symbol_get(r[f])
3672 ids = [x['id'] for x in res]
3674 # all non inherited fields for which the attribute whose name is in load is False
3675 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3677 # Compute POST fields
3679 for f in fields_post:
3680 todo.setdefault(self._columns[f]._multi, [])
3681 todo[self._columns[f]._multi].append(f)
3682 for key, val in todo.items():
3684 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3685 assert res2 is not None, \
3686 'The function field "%s" on the "%s" model returned None\n' \
3687 '(a dictionary was expected).' % (val[0], self._name)
3690 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3691 multi_fields = res2.get(record['id'],{})
3693 record[pos] = multi_fields.get(pos,[])
3696 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3699 record[f] = res2[record['id']]
3703 # Warn about deprecated fields now that fields_pre and fields_post are computed
3704 # Explicitly use list() because we may receive tuples
3705 for f in list(fields_pre) + list(fields_post):
3706 field_column = self._all_columns.get(f) and self._all_columns.get(f).column
3707 if field_column and field_column.deprecated:
3708 _logger.warning('Field %s.%s is deprecated: %s', self._name, f, field_column.deprecated)
3712 for field in vals.copy():
3714 if field in self._columns:
3715 fobj = self._columns[field]
3722 for group in groups:
3723 module = group.split(".")[0]
3724 grp = group.split(".")[1]
3725 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3726 (grp, module, 'res.groups', user))
3727 readonly = cr.fetchall()
3728 if readonly[0][0] >= 1:
3731 elif readonly[0][0] == 0:
3737 if type(vals[field]) == type([]):
3739 elif type(vals[field]) == type(0.0):
3741 elif type(vals[field]) == type(''):
3742 vals[field] = '=No Permission='
3747 # TODO check READ access
3748 def perm_read(self, cr, user, ids, context=None, details=True):
3750 Returns some metadata about the given records.
3752 :param details: if True, \*_uid fields are replaced with the name of the user
3753 :return: list of ownership dictionaries for each requested record
3754 :rtype: list of dictionaries with the following keys:
3757 * create_uid: user who created the record
3758 * create_date: date when the record was created
3759 * write_uid: last user who changed the record
3760 * write_date: date of the last change to the record
3761 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3768 uniq = isinstance(ids, (int, long))
3772 if self._log_access:
3773 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3774 quoted_table = '"%s"' % self._table
3775 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3776 query = '''SELECT %s, __imd.module, __imd.name
3777 FROM %s LEFT JOIN ir_model_data __imd
3778 ON (__imd.model = %%s and __imd.res_id = %s.id)
3779 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3780 cr.execute(query, (self._name, tuple(ids)))
3781 res = cr.dictfetchall()
3784 r[key] = r[key] or False
3785 if details and key in ('write_uid', 'create_uid') and r[key]:
3787 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3789 pass # Leave the numeric uid there
3790 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3791 del r['name'], r['module']
3796 def _check_concurrency(self, cr, ids, context):
3799 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3801 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)"
3802 for sub_ids in cr.split_for_in_conditions(ids):
3805 id_ref = "%s,%s" % (self._name, id)
3806 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3808 ids_to_check.extend([id, update_date])
3809 if not ids_to_check:
3811 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3814 # mention the first one only to keep the error message readable
3815 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3817 def _check_record_rules_result_count(self, cr, uid, ids, operation, context=None):
3818 """Verify that number of returned rows after applying record rules matches
3819 the length of `ids`, and raise an appropriate exception if it does not.
3821 if cr.rowcount != len(ids):
3822 # Attempt to distinguish record rule restriction vs deleted records,
3823 # to provide a more specific error message
3824 cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(ids),))
3825 if cr.rowcount != len(ids):
3826 if operation == 'unlink':
3827 # no need to warn about deleting an already deleted record!
3829 _logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name)
3830 raise except_orm(_('Missing document(s)'),
3831 _('One of the documents you are trying to access has been deleted, please try again after refreshing.'))
3832 _logger.warning('Access Denied by record rules for operation: %s, uid: %s, model: %s', operation, uid, self._name)
3833 raise except_orm(_('Access Denied'),
3834 _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3835 (self._description, operation))
3837 def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
3838 """Verifies that the operation given by ``operation`` is allowed for the user
3839 according to the access rights."""
3840 return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
3842 def check_access_rule(self, cr, uid, ids, operation, context=None):
3843 """Verifies that the operation given by ``operation`` is allowed for the user
3844 according to ir.rules.
3846 :param operation: one of ``write``, ``unlink``
3847 :raise except_orm: * if current ir.rules do not permit this operation.
3848 :return: None if the operation is allowed
3850 if uid == SUPERUSER_ID:
3853 if self.is_transient():
3854 # Only one single implicit access rule for transient models: owner only!
3855 # This is ok to hardcode because we assert that TransientModels always
3856 # have log_access enabled so that the create_uid column is always there.
3857 # And even with _inherits, these fields are always present in the local
3858 # table too, so no need for JOINs.
3859 cr.execute("""SELECT distinct create_uid
3861 WHERE id IN %%s""" % self._table, (tuple(ids),))
3862 uids = [x[0] for x in cr.fetchall()]
3863 if len(uids) != 1 or uids[0] != uid:
3864 raise except_orm(_('Access Denied'),
3865 _('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,))
3867 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3869 where_clause = ' and ' + ' and '.join(where_clause)
3870 for sub_ids in cr.split_for_in_conditions(ids):
3871 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3872 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3873 [sub_ids] + where_params)
3874 self._check_record_rules_result_count(cr, uid, sub_ids, operation, context=context)
3876 def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
3877 """Call given workflow trigger as a result of a CRUD operation"""
3878 wf_service = netsvc.LocalService("workflow")
3880 getattr(wf_service, trigger)(uid, self._name, res_id, cr)
3882 def _workflow_signal(self, cr, uid, ids, signal, context=None):
3883 """Send given workflow signal"""
3884 wf_service = netsvc.LocalService("workflow")
3886 wf_service.trg_validate(uid, self._name, res_id, signal, cr)
3888 def unlink(self, cr, uid, ids, context=None):
3890 Delete records with given ids
3892 :param cr: database cursor
3893 :param uid: current user id
3894 :param ids: id or list of ids
3895 :param context: (optional) context arguments, like lang, time zone
3897 :raise AccessError: * if user has no unlink rights on the requested object
3898 * if user tries to bypass access rules for unlink on the requested object
3899 :raise UserError: if the record is default property for other records
3904 if isinstance(ids, (int, long)):
3907 result_store = self._store_get_values(cr, uid, ids, None, context)
3909 self._check_concurrency(cr, ids, context)
3911 self.check_access_rights(cr, uid, 'unlink')
3913 ir_property = self.pool.get('ir.property')
3915 # Check if the records are used as default properties.
3916 domain = [('res_id', '=', False),
3917 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3919 if ir_property.search(cr, uid, domain, context=context):
3920 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3922 # Delete the records' properties.
3923 property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context)
3924 ir_property.unlink(cr, uid, property_ids, context=context)
3926 self._workflow_trigger(cr, uid, ids, 'trg_delete', context=context)
3928 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3929 pool_model_data = self.pool.get('ir.model.data')
3930 ir_values_obj = self.pool.get('ir.values')
3931 for sub_ids in cr.split_for_in_conditions(ids):
3932 cr.execute('delete from ' + self._table + ' ' \
3933 'where id IN %s', (sub_ids,))
3935 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3936 # as these are not connected with real database foreign keys, and would be dangling references.
3937 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
3938 # to avoid possible side-effects during admin calls.
3939 # Step 1. Calling unlink of ir_model_data only for the affected IDS
3940 reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
3941 # Step 2. Marching towards the real deletion of referenced records
3943 pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
3945 # For the same reason, removing the record relevant to ir_values
3946 ir_value_ids = ir_values_obj.search(cr, uid,
3947 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3950 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3952 for order, object, store_ids, fields in result_store:
3953 if object != self._name:
3954 obj = self.pool.get(object)
3955 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3956 rids = map(lambda x: x[0], cr.fetchall())
3958 obj._store_set_values(cr, uid, rids, fields, context)
3965 def write(self, cr, user, ids, vals, context=None):
3967 Update records with given ids with the given field values
3969 :param cr: database cursor
3970 :param user: current user id
3972 :param ids: object id or list of object ids to update according to **vals**
3973 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3974 :type vals: dictionary
3975 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3976 :type context: dictionary
3978 :raise AccessError: * if user has no write rights on the requested object
3979 * if user tries to bypass access rules for write on the requested object
3980 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3981 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3983 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3985 + For a many2many field, a list of tuples is expected.
3986 Here is the list of tuple that are accepted, with the corresponding semantics ::
3988 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3989 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3990 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3991 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3992 (4, ID) link to existing record with id = ID (adds a relationship)
3993 (5) unlink all (like using (3,ID) for all linked records)
3994 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3997 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3999 + For a one2many field, a lits of tuples is expected.
4000 Here is the list of tuple that are accepted, with the corresponding semantics ::
4002 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4003 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4004 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4007 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
4009 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
4010 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
4014 for field in vals.copy():
4016 if field in self._columns:
4017 fobj = self._columns[field]
4018 elif field in self._inherit_fields:
4019 fobj = self._inherit_fields[field][2]
4026 for group in groups:
4027 module = group.split(".")[0]
4028 grp = group.split(".")[1]
4029 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
4030 (grp, module, 'res.groups', user))
4031 readonly = cr.fetchall()
4032 if readonly[0][0] >= 1:
4043 if isinstance(ids, (int, long)):
4046 self._check_concurrency(cr, ids, context)
4047 self.check_access_rights(cr, user, 'write')
4049 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
4051 # No direct update of parent_left/right
4052 vals.pop('parent_left', None)
4053 vals.pop('parent_right', None)
4055 parents_changed = []
4056 parent_order = self._parent_order or self._order
4057 if self._parent_store and (self._parent_name in vals):
4058 # The parent_left/right computation may take up to
4059 # 5 seconds. No need to recompute the values if the
4060 # parent is the same.
4061 # Note: to respect parent_order, nodes must be processed in
4062 # order, so ``parents_changed`` must be ordered properly.
4063 parent_val = vals[self._parent_name]
4065 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
4066 (self._table, self._parent_name, self._parent_name, parent_order)
4067 cr.execute(query, (tuple(ids), parent_val))
4069 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
4070 (self._table, self._parent_name, parent_order)
4071 cr.execute(query, (tuple(ids),))
4072 parents_changed = map(operator.itemgetter(0), cr.fetchall())
4079 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
4081 field_column = self._all_columns.get(field) and self._all_columns.get(field).column
4082 if field_column and field_column.deprecated:
4083 _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
4084 if field in self._columns:
4085 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
4086 if (not totranslate) or not self._columns[field].translate:
4087 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
4088 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
4089 direct.append(field)
4091 upd_todo.append(field)
4093 updend.append(field)
4094 if field in self._columns \
4095 and hasattr(self._columns[field], 'selection') \
4097 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4099 if self._log_access:
4100 upd0.append('write_uid=%s')
4101 upd0.append("write_date=(now() at time zone 'UTC')")
4105 self.check_access_rule(cr, user, ids, 'write', context=context)
4106 for sub_ids in cr.split_for_in_conditions(ids):
4107 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
4108 'where id IN %s', upd1 + [sub_ids])
4109 if cr.rowcount != len(sub_ids):
4110 raise except_orm(_('AccessError'),
4111 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
4116 if self._columns[f].translate:
4117 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
4120 # Inserting value to DB
4121 self.write(cr, user, ids, {f: vals[f]})
4122 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
4125 # call the 'set' method of fields which are not classic_write
4126 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4128 # default element in context must be removed when call a one2many or many2many
4129 rel_context = context.copy()
4130 for c in context.items():
4131 if c[0].startswith('default_'):
4132 del rel_context[c[0]]
4134 for field in upd_todo:
4136 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
4138 unknown_fields = updend[:]
4139 for table in self._inherits:
4140 col = self._inherits[table]
4142 for sub_ids in cr.split_for_in_conditions(ids):
4143 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
4144 'where id IN %s', (sub_ids,))
4145 nids.extend([x[0] for x in cr.fetchall()])
4149 if self._inherit_fields[val][0] == table:
4151 unknown_fields.remove(val)
4153 self.pool.get(table).write(cr, user, nids, v, context)
4157 'No such field(s) in model %s: %s.',
4158 self._name, ', '.join(unknown_fields))
4159 self._validate(cr, user, ids, context)
4161 # TODO: use _order to set dest at the right position and not first node of parent
4162 # We can't defer parent_store computation because the stored function
4163 # fields that are computer may refer (directly or indirectly) to
4164 # parent_left/right (via a child_of domain)
4167 self.pool._init_parent[self._name] = True
4169 order = self._parent_order or self._order
4170 parent_val = vals[self._parent_name]
4172 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
4174 clause, params = '%s IS NULL' % (self._parent_name,), ()
4176 for id in parents_changed:
4177 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
4178 pleft, pright = cr.fetchone()
4179 distance = pright - pleft + 1
4181 # Positions of current siblings, to locate proper insertion point;
4182 # this can _not_ be fetched outside the loop, as it needs to be refreshed
4183 # after each update, in case several nodes are sequentially inserted one
4184 # next to the other (i.e computed incrementally)
4185 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
4186 parents = cr.fetchall()
4188 # Find Position of the element
4190 for (parent_pright, parent_id) in parents:
4193 position = parent_pright + 1
4195 # It's the first node of the parent
4200 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
4201 position = cr.fetchone()[0] + 1
4203 if pleft < position <= pright:
4204 raise except_orm(_('UserError'), _('Recursivity Detected.'))
4206 if pleft < position:
4207 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4208 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4209 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
4211 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4212 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4213 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
4215 result += self._store_get_values(cr, user, ids, vals.keys(), context)
4219 for order, object, ids_to_update, fields_to_recompute in result:
4220 key = (object, tuple(fields_to_recompute))
4221 done.setdefault(key, {})
4222 # avoid to do several times the same computation
4224 for id in ids_to_update:
4225 if id not in done[key]:
4226 done[key][id] = True
4228 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
4230 self._workflow_trigger(cr, user, ids, 'trg_write', context=context)
4234 # TODO: Should set perm to user.xxx
4236 def create(self, cr, user, vals, context=None):
4238 Create a new record for the model.
4240 The values for the new record are initialized using the ``vals``
4241 argument, and if necessary the result of ``default_get()``.
4243 :param cr: database cursor
4244 :param user: current user id
4246 :param vals: field values for new record, e.g {'field_name': field_value, ...}
4247 :type vals: dictionary
4248 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4249 :type context: dictionary
4250 :return: id of new record created
4251 :raise AccessError: * if user has no create rights on the requested object
4252 * if user tries to bypass access rules for create on the requested object
4253 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4254 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4256 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
4257 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
4264 if self.is_transient():
4265 self._transient_vacuum(cr, user)
4267 self.check_access_rights(cr, user, 'create')
4269 if self._log_access:
4270 for f in LOG_ACCESS_COLUMNS:
4271 if vals.pop(f, None) is not None:
4273 'Field `%s` is not allowed when creating the model `%s`.',
4275 vals = self._add_missing_default_values(cr, user, vals, context)
4278 for v in self._inherits:
4279 if self._inherits[v] not in vals:
4282 tocreate[v] = {'id': vals[self._inherits[v]]}
4283 (upd0, upd1, upd2) = ('', '', [])
4286 for v in vals.keys():
4287 if v in self._inherit_fields and v not in self._columns:
4288 (table, col, col_detail, original_parent) = self._inherit_fields[v]
4289 tocreate[table][v] = vals[v]
4292 if (v not in self._inherit_fields) and (v not in self._columns):
4294 unknown_fields.append(v)
4297 'No such field(s) in model %s: %s.',
4298 self._name, ', '.join(unknown_fields))
4300 # Try-except added to filter the creation of those records whose filds are readonly.
4301 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
4303 cr.execute("SELECT nextval('"+self._sequence+"')")
4305 raise except_orm(_('UserError'),
4306 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
4308 id_new = cr.fetchone()[0]
4309 for table in tocreate:
4310 if self._inherits[table] in vals:
4311 del vals[self._inherits[table]]
4313 record_id = tocreate[table].pop('id', None)
4315 if record_id is None or not record_id:
4316 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
4318 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
4320 upd0 += ',' + self._inherits[table]
4322 upd2.append(record_id)
4324 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
4325 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
4327 for bool_field in bool_fields:
4328 if bool_field not in vals:
4329 vals[bool_field] = False
4331 for field in vals.copy():
4333 if field in self._columns:
4334 fobj = self._columns[field]
4336 fobj = self._inherit_fields[field][2]
4342 for group in groups:
4343 module = group.split(".")[0]
4344 grp = group.split(".")[1]
4345 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
4346 (grp, module, 'res.groups', user))
4347 readonly = cr.fetchall()
4348 if readonly[0][0] >= 1:
4351 elif readonly[0][0] == 0:
4359 if self._columns[field]._classic_write:
4360 upd0 = upd0 + ',"' + field + '"'
4361 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
4362 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
4363 #for the function fields that receive a value, we set them directly in the database
4364 #(they may be required), but we also need to trigger the _fct_inv()
4365 if (hasattr(self._columns[field], '_fnct_inv')) and not isinstance(self._columns[field], fields.related):
4366 #TODO: this way to special case the related fields is really creepy but it shouldn't be changed at
4367 #one week of the release candidate. It seems the only good way to handle correctly this is to add an
4368 #attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise
4369 #if, for example, the related has a default value (for usability) then the fct_inv is called and it
4370 #may raise some access rights error. Changing this is a too big change for now, and is thus postponed
4371 #after the release but, definitively, the behavior shouldn't be different for related and function
4373 upd_todo.append(field)
4375 #TODO: this `if´ statement should be removed because there is no good reason to special case the fields
4376 #related. See the above TODO comment for further explanations.
4377 if not isinstance(self._columns[field], fields.related):
4378 upd_todo.append(field)
4379 if field in self._columns \
4380 and hasattr(self._columns[field], 'selection') \
4382 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4383 if self._log_access:
4384 upd0 += ',create_uid,create_date,write_uid,write_date'
4385 upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')"
4386 upd2.extend((user, user))
4387 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
4388 self.check_access_rule(cr, user, [id_new], 'create', context=context)
4389 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4391 if self._parent_store and not context.get('defer_parent_store_computation'):
4393 self.pool._init_parent[self._name] = True
4395 parent = vals.get(self._parent_name, False)
4397 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
4399 result_p = cr.fetchall()
4400 for (pleft,) in result_p:
4405 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
4406 pleft_old = cr.fetchone()[0]
4409 cr.execute('select max(parent_right) from '+self._table)
4410 pleft = cr.fetchone()[0] or 0
4411 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
4412 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
4413 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4415 # default element in context must be remove when call a one2many or many2many
4416 rel_context = context.copy()
4417 for c in context.items():
4418 if c[0].startswith('default_'):
4419 del rel_context[c[0]]
4422 for field in upd_todo:
4423 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4424 self._validate(cr, user, [id_new], context)
4426 if not context.get('no_store_function', False):
4427 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
4430 for order, object, ids, fields2 in result:
4431 if not (object, ids, fields2) in done:
4432 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
4433 done.append((object, ids, fields2))
4435 if self._log_create and not (context and context.get('no_store_function', False)):
4436 message = self._description + \
4438 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4439 "' " + _("created.")
4440 self.log(cr, user, id_new, message, True, context=context)
4441 self._workflow_trigger(cr, user, [id_new], 'trg_create', context=context)
4444 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
4445 """Fetch records as objects allowing to use dot notation to browse fields and relations
4447 :param cr: database cursor
4448 :param uid: current user id
4449 :param select: id or list of ids.
4450 :param context: context arguments, like lang, time zone
4451 :rtype: object or list of objects requested
4454 self._list_class = list_class or browse_record_list
4456 # need to accepts ints and longs because ids coming from a method
4457 # launched by button in the interface have a type long...
4458 if isinstance(select, (int, long)):
4459 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
4460 elif isinstance(select, list):
4461 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
4463 return browse_null()
4465 def _store_get_values(self, cr, uid, ids, fields, context):
4466 """Returns an ordered list of fields.functions to call due to
4467 an update operation on ``fields`` of records with ``ids``,
4468 obtained by calling the 'store' functions of these fields,
4469 as setup by their 'store' attribute.
4471 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4473 if fields is None: fields = []
4474 stored_functions = self.pool._store_function.get(self._name, [])
4476 # use indexed names for the details of the stored_functions:
4477 model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
4479 # only keep functions that should be triggered for the ``fields``
4481 to_compute = [f for f in stored_functions \
4482 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
4485 for function in to_compute:
4486 # use admin user for accessing objects having rules defined on store fields
4487 target_ids = [id for id in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id]
4489 # the compound key must consider the priority and model name
4490 key = (function[priority_], function[model_name_])
4491 for target_id in target_ids:
4492 mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
4494 # Here mapping looks like:
4495 # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
4496 # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
4497 # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
4500 # Now we need to generate the batch function calls list
4502 # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
4504 for ((priority,model), id_map) in mapping.iteritems():
4505 functions_ids_maps = {}
4506 # function_ids_maps =
4507 # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
4508 for id, functions in id_map.iteritems():
4509 functions_ids_maps.setdefault(tuple(functions), []).append(id)
4510 for functions, ids in functions_ids_maps.iteritems():
4511 call_map.setdefault((priority,model),[]).append((priority, model, ids,
4512 [f[func_field_to_compute_] for f in functions]))
4513 ordered_keys = call_map.keys()
4517 result = reduce(operator.add, (call_map[k] for k in ordered_keys))
4520 def _store_set_values(self, cr, uid, ids, fields, context):
4521 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4522 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4527 if self._log_access:
4528 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4532 field_dict.setdefault(r[0], [])
4533 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4534 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4535 for i in self.pool._store_function.get(self._name, []):
4537 up_write_date = write_date + datetime.timedelta(hours=i[5])
4538 if datetime.datetime.now() < up_write_date:
4540 field_dict[r[0]].append(i[1])
4546 if self._columns[f]._multi not in keys:
4547 keys.append(self._columns[f]._multi)
4548 todo.setdefault(self._columns[f]._multi, [])
4549 todo[self._columns[f]._multi].append(f)
4553 # use admin user for accessing objects having rules defined on store fields
4554 result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
4555 for id, value in result.items():
4557 for f in value.keys():
4558 if f in field_dict[id]:
4565 if self._columns[v]._type == 'many2one':
4567 value[v] = value[v][0]
4570 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4571 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4574 cr.execute('update "' + self._table + '" set ' + \
4575 ','.join(upd0) + ' where id = %s', upd1)
4579 # use admin user for accessing objects having rules defined on store fields
4580 result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
4581 for r in result.keys():
4583 if r in field_dict.keys():
4584 if f in field_dict[r]:
4586 for id, value in result.items():
4587 if self._columns[f]._type == 'many2one':
4592 cr.execute('update "' + self._table + '" set ' + \
4593 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4599 def perm_write(self, cr, user, ids, fields, context=None):
4600 raise NotImplementedError(_('This method does not exist anymore'))
4602 # TODO: ameliorer avec NULL
4603 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4604 """Computes the WHERE clause needed to implement an OpenERP domain.
4605 :param domain: the domain to compute
4607 :param active_test: whether the default filtering of records with ``active``
4608 field set to ``False`` should be applied.
4609 :return: the query expressing the given domain as provided in domain
4610 :rtype: osv.query.Query
4615 # if the object has a field named 'active', filter out all inactive
4616 # records unless they were explicitely asked for
4617 if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
4619 # the item[0] trick below works for domain items and '&'/'|'/'!'
4621 if not any(item[0] == 'active' for item in domain):
4622 domain.insert(0, ('active', '=', 1))
4624 domain = [('active', '=', 1)]
4627 e = expression.expression(cr, user, domain, self, context)
4628 tables = e.get_tables()
4629 where_clause, where_params = e.to_sql()
4630 where_clause = where_clause and [where_clause] or []
4632 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4634 return Query(tables, where_clause, where_params)
4636 def _check_qorder(self, word):
4637 if not regex_order.match(word):
4638 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4641 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4642 """Add what's missing in ``query`` to implement all appropriate ir.rules
4643 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4645 :param query: the current query object
4647 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4649 if parent_model and child_object:
4650 # as inherited rules are being applied, we need to add the missing JOIN
4651 # to reach the parent table (if it was not JOINed yet in the query)
4652 child_object._inherits_join_add(child_object, parent_model, query)
4653 query.where_clause += added_clause
4654 query.where_clause_params += added_params
4655 for table in added_tables:
4656 if table not in query.tables:
4657 query.tables.append(table)
4661 # apply main rules on the object
4662 rule_obj = self.pool.get('ir.rule')
4663 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
4665 # apply ir.rules from the parents (through _inherits)
4666 for inherited_model in self._inherits:
4667 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
4668 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
4670 def _generate_m2o_order_by(self, order_field, query):
4672 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4673 either native m2o fields or function/related fields that are stored, including
4674 intermediate JOINs for inheritance if required.
4676 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4678 if order_field not in self._columns and order_field in self._inherit_fields:
4679 # also add missing joins for reaching the table containing the m2o field
4680 qualified_field = self._inherits_join_calc(order_field, query)
4681 order_field_column = self._inherit_fields[order_field][2]
4683 qualified_field = '"%s"."%s"' % (self._table, order_field)
4684 order_field_column = self._columns[order_field]
4686 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4687 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4688 _logger.debug("Many2one function/related fields must be stored " \
4689 "to be used as ordering fields! Ignoring sorting for %s.%s",
4690 self._name, order_field)
4693 # figure out the applicable order_by for the m2o
4694 dest_model = self.pool.get(order_field_column._obj)
4695 m2o_order = dest_model._order
4696 if not regex_order.match(m2o_order):
4697 # _order is complex, can't use it here, so we default to _rec_name
4698 m2o_order = dest_model._rec_name
4700 # extract the field names, to be able to qualify them and add desc/asc
4702 for order_part in m2o_order.split(","):
4703 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
4704 m2o_order = m2o_order_list
4706 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4707 # as we don't want to exclude results that have NULL values for the m2o
4708 src_table, src_field = qualified_field.replace('"','').split('.', 1)
4709 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
4710 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
4711 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4714 def _generate_order_by(self, order_spec, query):
4716 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4717 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4719 :raise" except_orm in case order_spec is malformed
4721 order_by_clause = self._order
4723 order_by_elements = []
4724 self._check_qorder(order_spec)
4725 for order_part in order_spec.split(','):
4726 order_split = order_part.strip().split(' ')
4727 order_field = order_split[0].strip()
4728 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4730 if order_field == 'id':
4731 order_by_clause = '"%s"."%s"' % (self._table, order_field)
4732 elif order_field in self._columns:
4733 order_column = self._columns[order_field]
4734 if order_column._classic_read:
4735 inner_clause = '"%s"."%s"' % (self._table, order_field)
4736 elif order_column._type == 'many2one':
4737 inner_clause = self._generate_m2o_order_by(order_field, query)
4739 continue # ignore non-readable or "non-joinable" fields
4740 elif order_field in self._inherit_fields:
4741 parent_obj = self.pool.get(self._inherit_fields[order_field][3])
4742 order_column = parent_obj._columns[order_field]
4743 if order_column._classic_read:
4744 inner_clause = self._inherits_join_calc(order_field, query)
4745 elif order_column._type == 'many2one':
4746 inner_clause = self._generate_m2o_order_by(order_field, query)
4748 continue # ignore non-readable or "non-joinable" fields
4750 if isinstance(inner_clause, list):
4751 for clause in inner_clause:
4752 order_by_elements.append("%s %s" % (clause, order_direction))
4754 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4755 if order_by_elements:
4756 order_by_clause = ",".join(order_by_elements)
4758 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4760 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4762 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4763 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4764 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4765 This is ok at the security level because this method is private and not callable through XML-RPC.
4767 :param access_rights_uid: optional user ID to use when checking access rights
4768 (not for ir.rules, this is only for ir.model.access)
4772 self.check_access_rights(cr, access_rights_uid or user, 'read')
4774 # For transient models, restrict acces to the current user, except for the super-user
4775 if self.is_transient() and self._log_access and user != SUPERUSER_ID:
4776 args = expression.AND(([('create_uid', '=', user)], args or []))
4778 query = self._where_calc(cr, user, args, context=context)
4779 self._apply_ir_rules(cr, user, query, 'read', context=context)
4780 order_by = self._generate_order_by(order, query)
4781 from_clause, where_clause, where_clause_params = query.get_sql()
4783 limit_str = limit and ' limit %d' % limit or ''
4784 offset_str = offset and ' offset %d' % offset or ''
4785 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4788 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4791 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4793 return [x[0] for x in res]
4795 # returns the different values ever entered for one field
4796 # this is used, for example, in the client when the user hits enter on
4798 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4801 if field in self._inherit_fields:
4802 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4804 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4806 def copy_data(self, cr, uid, id, default=None, context=None):
4808 Copy given record's data with all its fields values
4810 :param cr: database cursor
4811 :param user: current user id
4812 :param id: id of the record to copy
4813 :param default: field values to override in the original values of the copied record
4814 :type default: dictionary
4815 :param context: context arguments, like lang, time zone
4816 :type context: dictionary
4817 :return: dictionary containing all the field values
4823 # avoid recursion through already copied records in case of circular relationship
4824 seen_map = context.setdefault('__copy_data_seen',{})
4825 if id in seen_map.setdefault(self._name,[]):
4827 seen_map[self._name].append(id)
4831 if 'state' not in default:
4832 if 'state' in self._defaults:
4833 if callable(self._defaults['state']):
4834 default['state'] = self._defaults['state'](self, cr, uid, context)
4836 default['state'] = self._defaults['state']
4838 context_wo_lang = context.copy()
4839 if 'lang' in context:
4840 del context_wo_lang['lang']
4841 data = self.read(cr, uid, [id,], context=context_wo_lang)
4845 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4847 # build a black list of fields that should not be copied
4848 blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
4849 def blacklist_given_fields(obj):
4850 # blacklist the fields that are given by inheritance
4851 for other, field_to_other in obj._inherits.items():
4852 blacklist.add(field_to_other)
4853 if field_to_other in default:
4854 # all the fields of 'other' are given by the record: default[field_to_other],
4855 # except the ones redefined in self
4856 blacklist.update(set(self.pool.get(other)._all_columns) - set(self._columns))
4858 blacklist_given_fields(self.pool.get(other))
4859 blacklist_given_fields(self)
4862 for f, colinfo in self._all_columns.items():
4863 field = colinfo.column
4866 elif f in blacklist:
4868 elif isinstance(field, fields.function):
4870 elif field._type == 'many2one':
4871 res[f] = data[f] and data[f][0]
4872 elif field._type == 'one2many':
4873 other = self.pool.get(field._obj)
4874 # duplicate following the order of the ids because we'll rely on
4875 # it later for copying translations in copy_translation()!
4876 lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
4877 # the lines are duplicated using the wrong (old) parent, but then
4878 # are reassigned to the correct one thanks to the (0, 0, ...)
4879 res[f] = [(0, 0, line) for line in lines if line]
4880 elif field._type == 'many2many':
4881 res[f] = [(6, 0, data[f])]
4887 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4891 # avoid recursion through already copied records in case of circular relationship
4892 seen_map = context.setdefault('__copy_translations_seen',{})
4893 if old_id in seen_map.setdefault(self._name,[]):
4895 seen_map[self._name].append(old_id)
4897 trans_obj = self.pool.get('ir.translation')
4898 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4899 fields = self.fields_get(cr, uid, context=context)
4901 translation_records = []
4902 for field_name, field_def in fields.items():
4903 # we must recursively copy the translations for o2o and o2m
4904 if field_def['type'] == 'one2many':
4905 target_obj = self.pool.get(field_def['relation'])
4906 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4907 # here we rely on the order of the ids to match the translations
4908 # as foreseen in copy_data()
4909 old_children = sorted(old_record[field_name])
4910 new_children = sorted(new_record[field_name])
4911 for (old_child, new_child) in zip(old_children, new_children):
4912 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4913 # and for translatable fields we keep them for copy
4914 elif field_def.get('translate'):
4916 if field_name in self._columns:
4917 trans_name = self._name + "," + field_name
4918 elif field_name in self._inherit_fields:
4919 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4921 trans_ids = trans_obj.search(cr, uid, [
4922 ('name', '=', trans_name),
4923 ('res_id', '=', old_id)
4925 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4927 for record in translation_records:
4929 record['res_id'] = new_id
4930 trans_obj.create(cr, uid, record, context=context)
4933 def copy(self, cr, uid, id, default=None, context=None):
4935 Duplicate record with given id updating it with default values
4937 :param cr: database cursor
4938 :param uid: current user id
4939 :param id: id of the record to copy
4940 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4941 :type default: dictionary
4942 :param context: context arguments, like lang, time zone
4943 :type context: dictionary
4944 :return: id of the newly created record
4949 context = context.copy()
4950 data = self.copy_data(cr, uid, id, default, context)
4951 new_id = self.create(cr, uid, data, context)
4952 self.copy_translations(cr, uid, id, new_id, context)
4955 def exists(self, cr, uid, ids, context=None):
4956 """Checks whether the given id or ids exist in this model,
4957 and return the list of ids that do. This is simple to use for
4958 a truth test on a browse_record::
4963 :param ids: id or list of ids to check for existence
4964 :type ids: int or [int]
4965 :return: the list of ids that currently exist, out of
4968 if type(ids) in (int, long):
4970 query = 'SELECT id FROM "%s"' % (self._table)
4971 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4972 return [x[0] for x in cr.fetchall()]
4974 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4975 _logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4977 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4978 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4979 return self._check_recursion(cr, uid, ids, context, parent)
4981 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4983 Verifies that there is no loop in a hierarchical structure of records,
4984 by following the parent relationship using the **parent** field until a loop
4985 is detected or until a top-level record is found.
4987 :param cr: database cursor
4988 :param uid: current user id
4989 :param ids: list of ids of records to check
4990 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4991 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4995 parent = self._parent_name
4997 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
5000 for i in range(0, len(ids), cr.IN_MAX):
5001 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
5002 cr.execute(query, (tuple(sub_ids_parent),))
5003 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
5004 ids_parent = ids_parent2
5005 for i in ids_parent:
5010 def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
5011 """Retrieve the External ID(s) of any database record.
5013 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
5015 :return: map of ids to the list of their fully qualified External IDs
5016 in the form ``module.key``, or an empty list when there's no External
5017 ID for a record, e.g.::
5019 { 'id': ['module.ext_id', 'module.ext_id_bis'],
5022 ir_model_data = self.pool.get('ir.model.data')
5023 data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
5024 data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
5027 # can't use dict.fromkeys() as the list would be shared!
5029 for record in data_results:
5030 result[record['res_id']].append('%(module)s.%(name)s' % record)
5033 def get_external_id(self, cr, uid, ids, *args, **kwargs):
5034 """Retrieve the External ID of any database record, if there
5035 is one. This method works as a possible implementation
5036 for a function field, to be able to add it to any
5037 model object easily, referencing it as ``Model.get_external_id``.
5039 When multiple External IDs exist for a record, only one
5040 of them is returned (randomly).
5042 :return: map of ids to their fully qualified XML ID,
5043 defaulting to an empty string when there's none
5044 (to be usable as a function field),
5047 { 'id': 'module.ext_id',
5050 results = self._get_xml_ids(cr, uid, ids)
5051 for k, v in results.iteritems():
5058 # backwards compatibility
5059 get_xml_id = get_external_id
5060 _get_xml_ids = _get_external_ids
5063 def is_transient(self):
5064 """ Return whether the model is transient.
5066 See :class:`TransientModel`.
5069 return self._transient
5071 def _transient_clean_rows_older_than(self, cr, seconds):
5072 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5073 cr.execute("SELECT id FROM " + self._table + " WHERE"
5074 " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp <"
5075 " ((now() at time zone 'UTC') - interval %s)", ("%s seconds" % seconds,))
5076 ids = [x[0] for x in cr.fetchall()]
5077 self.unlink(cr, SUPERUSER_ID, ids)
5079 def _transient_clean_old_rows(self, cr, count):
5080 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5082 "SELECT id, COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
5083 " AS t FROM " + self._table +
5084 " ORDER BY t LIMIT %s", (count,))
5085 ids = [x[0] for x in cr.fetchall()]
5086 self.unlink(cr, SUPERUSER_ID, ids)
5088 def _transient_vacuum(self, cr, uid, force=False):
5089 """Clean the transient records.
5091 This unlinks old records from the transient model tables whenever the
5092 "_transient_max_count" or "_max_age" conditions (if any) are reached.
5093 Actual cleaning will happen only once every "_transient_check_time" calls.
5094 This means this method can be called frequently called (e.g. whenever
5095 a new record is created).
5097 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5098 self._transient_check_count += 1
5099 if (not force) and (self._transient_check_count % self._transient_check_time):
5100 self._transient_check_count = 0
5103 # Age-based expiration
5104 if self._transient_max_hours:
5105 self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
5107 # Count-based expiration
5108 if self._transient_max_count:
5109 self._transient_clean_old_rows(cr, self._transient_max_count)
5113 def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None):
5114 """ Serializes one2many and many2many commands into record dictionaries
5115 (as if all the records came from the database via a read()). This
5116 method is aimed at onchange methods on one2many and many2many fields.
5118 Because commands might be creation commands, not all record dicts
5119 will contain an ``id`` field. Commands matching an existing record
5120 will have an ``id``.
5122 :param field_name: name of the one2many or many2many field matching the commands
5123 :type field_name: str
5124 :param commands: one2many or many2many commands to execute on ``field_name``
5125 :type commands: list((int|False, int|False, dict|False))
5126 :param fields: list of fields to read from the database, when applicable
5127 :type fields: list(str)
5128 :returns: records in a shape similar to that returned by ``read()``
5129 (except records may be missing the ``id`` field if they don't exist in db)
5132 result = [] # result (list of dict)
5133 record_ids = [] # ids of records to read
5134 updates = {} # {id: dict} of updates on particular records
5136 for command in commands:
5137 if not isinstance(command, (list, tuple)):
5138 record_ids.append(command)
5139 elif command[0] == 0:
5140 result.append(command[2])
5141 elif command[0] == 1:
5142 record_ids.append(command[1])
5143 updates.setdefault(command[1], {}).update(command[2])
5144 elif command[0] in (2, 3):
5145 record_ids = [id for id in record_ids if id != command[1]]
5146 elif command[0] == 4:
5147 record_ids.append(command[1])
5148 elif command[0] == 5:
5149 result, record_ids = [], []
5150 elif command[0] == 6:
5151 result, record_ids = [], list(command[2])
5153 # read the records and apply the updates
5154 other_model = self.pool.get(self._all_columns[field_name].column._obj)
5155 for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
5156 record.update(updates.get(record['id'], {}))
5157 result.append(record)
5161 # for backward compatibility
5162 resolve_o2m_commands_to_record_dicts = resolve_2many_commands
5164 # keep this import here, at top it will cause dependency cycle errors
5167 class Model(BaseModel):
5168 """Main super-class for regular database-persisted OpenERP models.
5170 OpenERP models are created by inheriting from this class::
5175 The system will later instantiate the class once per database (on
5176 which the class' module is installed).
5179 _register = False # not visible in ORM registry, meant to be python-inherited only
5180 _transient = False # True in a TransientModel
5182 class TransientModel(BaseModel):
5183 """Model super-class for transient records, meant to be temporarily
5184 persisted, and regularly vaccuum-cleaned.
5186 A TransientModel has a simplified access rights management,
5187 all users can create new records, and may only access the
5188 records they created. The super-user has unrestricted access
5189 to all TransientModel records.
5192 _register = False # not visible in ORM registry, meant to be python-inherited only
5195 class AbstractModel(BaseModel):
5196 """Abstract Model super-class for creating an abstract class meant to be
5197 inherited by regular models (Models or TransientModels) but not meant to
5198 be usable on its own, or persisted.
5200 Technical note: we don't want to make AbstractModel the super-class of
5201 Model or BaseModel because it would not make sense to put the main
5202 definition of persistence methods such as create() in it, and still we
5203 should be able to override them within an AbstractModel.
5205 _auto = False # don't create any database backend for AbstractModels
5206 _register = False # not visible in ORM registry, meant to be python-inherited only
5208 def itemgetter_tuple(items):
5209 """ Fixes itemgetter inconsistency (useful in some cases) of not returning
5210 a tuple if len(items) == 1: always returns an n-tuple where n = len(items)
5215 return lambda gettable: (gettable[items[0]],)
5216 return operator.itemgetter(*items)
5217 class ImportWarning(Warning):
5218 """ Used to send warnings upwards the stack during the import process
5223 def convert_pgerror_23502(model, fields, info, e):
5224 m = re.match(r'^null value in column "(?P<field>\w+)" violates '
5225 r'not-null constraint\n',
5227 if not m or m.group('field') not in fields:
5228 return {'message': unicode(e)}
5229 field = fields[m.group('field')]
5231 'message': _(u"Missing required value for the field '%(field)s'") % {
5232 'field': field['string']
5234 'field': m.group('field'),
5237 PGERROR_TO_OE = collections.defaultdict(
5238 # shape of mapped converters
5239 lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
5240 # not_null_violation
5241 '23502': convert_pgerror_23502,
5243 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: