1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
22 #.apidoc title: Object Relational Mapping
23 #.apidoc module-mods: member-order: bysource
26 Object relational mapping to database (postgresql) module
27 * Hierarchical structure
28 * Constraints consistency, validations
29 * Object meta Data depends on its status
30 * Optimised processing by complex query (multiple actions at once)
31 * Default fields value
32 * Permissions optimisation
33 * Persistant object: DB postgresql
35 * Multi-level caching system
36 * 2 different inheritancies
38 - classicals (varchar, integer, boolean, ...)
39 - relations (one2many, many2one, many2many)
57 from lxml import etree
61 import openerp.netsvc as netsvc
62 import openerp.tools as tools
63 from openerp.tools.config import config
64 from openerp.tools.safe_eval import safe_eval as eval
65 from openerp.tools.translate import _
66 from openerp import SUPERUSER_ID
67 from query import Query
69 # List of etree._Element subclasses that we choose to ignore when parsing XML.
70 from openerp.tools import SKIPPED_ELEMENT_TYPES
72 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
73 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
75 def transfer_field_to_modifiers(field, modifiers):
78 for attr in ('invisible', 'readonly', 'required'):
79 state_exceptions[attr] = []
80 default_values[attr] = bool(field.get(attr))
81 for state, modifs in (field.get("states",{})).items():
83 if default_values[modif[0]] != modif[1]:
84 state_exceptions[modif[0]].append(state)
86 for attr, default_value in default_values.items():
87 if state_exceptions[attr]:
88 modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
90 modifiers[attr] = default_value
93 # Don't deal with groups, it is done by check_group().
94 # Need the context to evaluate the invisible attribute on tree views.
95 # For non-tree views, the context shouldn't be given.
96 def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
98 modifiers.update(eval(node.get('attrs')))
100 if node.get('states'):
101 if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
102 # TODO combine with AND or OR, use implicit AND for now.
103 modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
105 modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
107 for a in ('invisible', 'readonly', 'required'):
109 v = bool(eval(node.get(a), {'context': context or {}}))
110 if in_tree_view and a == 'invisible':
111 # Invisible in a tree view has a specific meaning, make it a
112 # new key in the modifiers attribute.
113 modifiers['tree_invisible'] = v
114 elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
115 # Don't set the attribute to False if a dynamic value was
116 # provided (i.e. a domain from attrs or states).
120 def simplify_modifiers(modifiers):
121 for a in ('invisible', 'readonly', 'required'):
122 if a in modifiers and not modifiers[a]:
126 def transfer_modifiers_to_node(modifiers, node):
128 simplify_modifiers(modifiers)
129 node.set('modifiers', simplejson.dumps(modifiers))
131 def setup_modifiers(node, field=None, context=None, in_tree_view=False):
132 """ Processes node attributes and field descriptors to generate
133 the ``modifiers`` node attribute and set it on the provided node.
135 Alters its first argument in-place.
137 :param node: ``field`` node from an OpenERP view
138 :type node: lxml.etree._Element
139 :param dict field: field descriptor corresponding to the provided node
140 :param dict context: execution context used to evaluate node attributes
141 :param bool in_tree_view: triggers the ``tree_invisible`` code
142 path (separate from ``invisible``): in
143 tree view there are two levels of
144 invisibility, cell content (a column is
145 present but the cell itself is not
146 displayed) with ``invisible`` and column
147 invisibility (the whole column is
148 hidden) with ``tree_invisible``.
152 if field is not None:
153 transfer_field_to_modifiers(field, modifiers)
154 transfer_node_to_modifiers(
155 node, modifiers, context=context, in_tree_view=in_tree_view)
156 transfer_modifiers_to_node(modifiers, node)
158 def test_modifiers(what, expected):
160 if isinstance(what, basestring):
161 node = etree.fromstring(what)
162 transfer_node_to_modifiers(node, modifiers)
163 simplify_modifiers(modifiers)
164 json = simplejson.dumps(modifiers)
165 assert json == expected, "%s != %s" % (json, expected)
166 elif isinstance(what, dict):
167 transfer_field_to_modifiers(what, modifiers)
168 simplify_modifiers(modifiers)
169 json = simplejson.dumps(modifiers)
170 assert json == expected, "%s != %s" % (json, expected)
175 # openerp.osv.orm.modifiers_tests()
176 def modifiers_tests():
177 test_modifiers('<field name="a"/>', '{}')
178 test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
179 test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
180 test_modifiers('<field name="a" required="1"/>', '{"required": true}')
181 test_modifiers('<field name="a" invisible="0"/>', '{}')
182 test_modifiers('<field name="a" readonly="0"/>', '{}')
183 test_modifiers('<field name="a" required="0"/>', '{}')
184 test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
185 test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
186 test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
187 test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
189 # The dictionary is supposed to be the result of fields_get().
190 test_modifiers({}, '{}')
191 test_modifiers({"invisible": True}, '{"invisible": true}')
192 test_modifiers({"invisible": False}, '{}')
195 def check_object_name(name):
196 """ Check if the given name is a valid openerp object name.
198 The _name attribute in osv and osv_memory object is subject to
199 some restrictions. This function returns True or False whether
200 the given name is allowed or not.
202 TODO: this is an approximation. The goal in this approximation
203 is to disallow uppercase characters (in some places, we quote
204 table/column names and in other not, which leads to this kind
207 psycopg2.ProgrammingError: relation "xxx" does not exist).
209 The same restriction should apply to both osv and osv_memory
210 objects for consistency.
213 if regex_object_name.match(name) is None:
217 def raise_on_invalid_object_name(name):
218 if not check_object_name(name):
219 msg = "The _name attribute %s is not valid." % name
220 logger = netsvc.Logger()
221 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
222 raise except_orm('ValueError', msg)
224 POSTGRES_CONFDELTYPES = {
232 def last_day_of_current_month():
233 today = datetime.date.today()
234 last_day = str(calendar.monthrange(today.year, today.month)[1])
235 return time.strftime('%Y-%m-' + last_day)
237 def intersect(la, lb):
238 return filter(lambda x: x in lb, la)
240 def fix_import_export_id_paths(fieldname):
242 Fixes the id fields in import and exports, and splits field paths
245 :param str fieldname: name of the field to import/export
246 :return: split field name
249 fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
250 fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
251 return fixed_external_id.split('/')
253 class except_orm(Exception):
254 def __init__(self, name, value):
257 self.args = (name, value)
259 class BrowseRecordError(Exception):
262 class browse_null(object):
263 """ Readonly python database object browser
269 def __getitem__(self, name):
272 def __getattr__(self, name):
273 return None # XXX: return self ?
281 def __nonzero__(self):
284 def __unicode__(self):
289 # TODO: execute an object method on browse_record_list
291 class browse_record_list(list):
292 """ Collection of browse objects
294 Such an instance will be returned when doing a ``browse([ids..])``
295 and will be iterable, yielding browse() objects
298 def __init__(self, lst, context=None):
301 super(browse_record_list, self).__init__(lst)
302 self.context = context
305 class browse_record(object):
306 """ An object that behaves like a row of an object's table.
307 It has attributes after the columns of the corresponding object.
311 uobj = pool.get('res.users')
312 user_rec = uobj.browse(cr, uid, 104)
315 logger = netsvc.Logger()
317 def __init__(self, cr, uid, id, table, cache, context=None, list_class=None, fields_process=None):
319 @param cache a dictionary of model->field->data to be shared accross browse
320 objects, thus reducing the SQL read()s . It can speed up things a lot,
321 but also be disastrous if not discarded after write()/unlink() operations
322 @param table the object (inherited from orm)
323 @param context dictionary with an optional context
325 if fields_process is None:
329 self._list_class = list_class or browse_record_list
333 self._table = table # deprecated, use _model!
335 self._table_name = self._table._name
336 self.__logger = logging.getLogger(
337 'osv.browse_record.' + self._table_name)
338 self._context = context
339 self._fields_process = fields_process
341 cache.setdefault(table._name, {})
342 self._data = cache[table._name]
344 if not (id and isinstance(id, (int, long,))):
345 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
346 # if not table.exists(cr, uid, id, context):
347 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
349 if id not in self._data:
350 self._data[id] = {'id': id}
354 def __getitem__(self, name):
358 if name not in self._data[self._id]:
359 # build the list of fields we will fetch
361 # fetch the definition of the field which was asked for
362 if name in self._table._columns:
363 col = self._table._columns[name]
364 elif name in self._table._inherit_fields:
365 col = self._table._inherit_fields[name][2]
366 elif hasattr(self._table, str(name)):
367 attr = getattr(self._table, name)
368 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
369 def function_proxy(*args, **kwargs):
370 if 'context' not in kwargs and self._context:
371 kwargs.update(context=self._context)
372 return attr(self._cr, self._uid, [self._id], *args, **kwargs)
373 return function_proxy
377 error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
378 self.logger.notifyChannel("browse_record", netsvc.LOG_WARNING, error_msg)
379 raise KeyError(error_msg)
381 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
383 # gen the list of "local" (ie not inherited) fields which are classic or many2one
384 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
385 # gen the list of inherited fields
386 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
387 # complete the field list with the inherited fields which are classic or many2one
388 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
389 # otherwise we fetch only that field
391 fields_to_fetch = [(name, col)]
392 ids = filter(lambda id: name not in self._data[id], self._data.keys())
394 field_names = map(lambda x: x[0], fields_to_fetch)
395 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
397 # TODO: improve this, very slow for reports
398 if self._fields_process:
399 lang = self._context.get('lang', 'en_US') or 'en_US'
400 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
402 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
403 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
405 for field_name, field_column in fields_to_fetch:
406 if field_column._type in self._fields_process:
407 for result_line in field_values:
408 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
409 if result_line[field_name]:
410 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
413 # Where did those ids come from? Perhaps old entries in ir_model_dat?
414 self.__logger.warn("No field_values found for ids %s in %s", ids, self)
415 raise KeyError('Field %s not found in %s'%(name, self))
416 # create browse records for 'remote' objects
417 for result_line in field_values:
419 for field_name, field_column in fields_to_fetch:
420 if field_column._type in ('many2one', 'one2one'):
421 if result_line[field_name]:
422 obj = self._table.pool.get(field_column._obj)
423 if isinstance(result_line[field_name], (list, tuple)):
424 value = result_line[field_name][0]
426 value = result_line[field_name]
428 # FIXME: this happen when a _inherits object
429 # overwrite a field of it parent. Need
430 # testing to be sure we got the right
431 # object and not the parent one.
432 if not isinstance(value, browse_record):
434 # In some cases the target model is not available yet, so we must ignore it,
435 # which is safe in most cases, this value will just be loaded later when needed.
436 # This situation can be caused by custom fields that connect objects with m2o without
437 # respecting module dependencies, causing relationships to be connected to soon when
438 # the target is not loaded yet.
440 new_data[field_name] = browse_record(self._cr,
441 self._uid, value, obj, self._cache,
442 context=self._context,
443 list_class=self._list_class,
444 fields_process=self._fields_process)
446 new_data[field_name] = value
448 new_data[field_name] = browse_null()
450 new_data[field_name] = browse_null()
451 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
452 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
453 elif field_column._type in ('reference'):
454 if result_line[field_name]:
455 if isinstance(result_line[field_name], browse_record):
456 new_data[field_name] = result_line[field_name]
458 ref_obj, ref_id = result_line[field_name].split(',')
459 ref_id = long(ref_id)
461 obj = self._table.pool.get(ref_obj)
462 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
464 new_data[field_name] = browse_null()
466 new_data[field_name] = browse_null()
468 new_data[field_name] = result_line[field_name]
469 self._data[result_line['id']].update(new_data)
471 if not name in self._data[self._id]:
472 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
473 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
474 "Fields to fetch: %s, Field values: %s"%(field_names, field_values))
475 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
476 "Cached: %s, Table: %s"%(self._data[self._id], self._table))
477 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
478 return self._data[self._id][name]
480 def __getattr__(self, name):
484 raise AttributeError(e)
486 def __contains__(self, name):
487 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
490 raise NotImplementedError("Iteration is not allowed on %s" % self)
492 def __hasattr__(self, name):
499 return "browse_record(%s, %d)" % (self._table_name, self._id)
501 def __eq__(self, other):
502 if not isinstance(other, browse_record):
504 return (self._table_name, self._id) == (other._table_name, other._id)
506 def __ne__(self, other):
507 if not isinstance(other, browse_record):
509 return (self._table_name, self._id) != (other._table_name, other._id)
511 # we need to define __unicode__ even though we've already defined __str__
512 # because we have overridden __getattr__
513 def __unicode__(self):
514 return unicode(str(self))
517 return hash((self._table_name, self._id))
522 """Force refreshing this browse_record's data and all the data of the
523 records that belong to the same cache, by emptying the cache completely,
524 preserving only the record identifiers (for prefetching optimizations).
526 for model, model_cache in self._cache.iteritems():
527 # only preserve the ids of the records that were in the cache
528 cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
529 self._cache[model].clear()
530 self._cache[model].update(cached_ids)
532 def pg_varchar(size=0):
533 """ Returns the VARCHAR declaration for the provided size:
535 * If no size (or an empty or negative size is provided) return an
537 * Otherwise return a VARCHAR(n)
539 :type int size: varchar size, optional
543 if not isinstance(size, int):
544 raise TypeError("VARCHAR parameter should be an int, got %s"
547 return 'VARCHAR(%d)' % size
550 FIELDS_TO_PGTYPES = {
551 fields.boolean: 'bool',
552 fields.integer: 'int4',
553 fields.integer_big: 'int8',
557 fields.datetime: 'timestamp',
558 fields.binary: 'bytea',
559 fields.many2one: 'int4',
562 def get_pg_type(f, type_override=None):
564 :param fields._column f: field to get a Postgres type for
565 :param type type_override: use the provided type for dispatching instead of the field's own type
566 :returns: (postgres_identification_type, postgres_type_specification)
569 field_type = type_override or type(f)
571 if field_type in FIELDS_TO_PGTYPES:
572 pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
573 elif issubclass(field_type, fields.float):
575 pg_type = ('numeric', 'NUMERIC')
577 pg_type = ('float8', 'DOUBLE PRECISION')
578 elif issubclass(field_type, (fields.char, fields.reference)):
579 pg_type = ('varchar', pg_varchar(f.size))
580 elif issubclass(field_type, fields.selection):
581 if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
582 or getattr(f, 'size', None) == -1:
583 pg_type = ('int4', 'INTEGER')
585 pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
586 elif issubclass(field_type, fields.function):
587 if f._type == 'selection':
588 pg_type = ('varchar', pg_varchar())
590 pg_type = get_pg_type(f, getattr(fields, f._type))
592 logging.getLogger('orm').warn('%s type not supported!', field_type)
598 class MetaModel(type):
599 """ Metaclass for the Model.
601 This class is used as the metaclass for the Model class to discover
602 the models defined in a module (i.e. without instanciating them).
603 If the automatic discovery is not needed, it is possible to set the
604 model's _register attribute to False.
608 module_to_models = {}
610 def __init__(self, name, bases, attrs):
611 if not self._register:
612 self._register = True
613 super(MetaModel, self).__init__(name, bases, attrs)
616 module_name = self.__module__.split('.')[0]
617 if not hasattr(self, '_module'):
618 self._module = module_name
620 # Remember which models to instanciate for this module.
621 self.module_to_models.setdefault(self._module, []).append(self)
624 # Definition of log access columns, automatically added to models if
625 # self._log_access is True
626 LOG_ACCESS_COLUMNS = {
627 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
628 'create_date': 'TIMESTAMP',
629 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
630 'write_date': 'TIMESTAMP'
632 # special columns automatically created by the ORM
633 MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
635 class BaseModel(object):
636 """ Base class for OpenERP models.
638 OpenERP models are created by inheriting from this class' subclasses:
640 * Model: for regular database-persisted models
641 * TransientModel: for temporary data, stored in the database but automatically
642 vaccuumed every so often
643 * AbstractModel: for abstract super classes meant to be shared by multiple
644 _inheriting classes (usually Models or TransientModels)
646 The system will later instantiate the class once per database (on
647 which the class' module is installed).
649 To create a class that should not be instantiated, the _register class attribute
652 __metaclass__ = MetaModel
653 _register = False # Set to false if the model shouldn't be automatically discovered.
659 _parent_name = 'parent_id'
660 _parent_store = False
661 _parent_order = False
668 _transient = False # True in a TransientModel
669 _transient_max_count = None
670 _transient_max_hours = None
671 _transient_check_time = 20
674 # { 'parent_model': 'm2o_field', ... }
677 # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
678 # model from which it is inherits'd, r is the (local) field towards m, f
679 # is the _column object itself, and n is the original (i.e. top-most)
682 # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
683 # field_column_obj, origina_parent_model), ... }
686 # Mapping field name/column_info object
687 # This is similar to _inherit_fields but:
688 # 1. includes self fields,
689 # 2. uses column_info instead of a triple.
695 _sql_constraints = []
696 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
697 __logger = logging.getLogger('orm')
698 __schema = logging.getLogger('orm.schema')
700 CONCURRENCY_CHECK_FIELD = '__last_update'
702 def log(self, cr, uid, id, message, secondary=False, context=None):
703 if context and context.get('disable_log'):
705 return self.pool.get('res.log').create(cr, uid,
708 'res_model': self._name,
709 'secondary': secondary,
715 def view_init(self, cr, uid, fields_list, context=None):
716 """Override this method to do specific things when a view on the object is opened."""
719 def _field_create(self, cr, context=None):
720 """ Create entries in ir_model_fields for all the model's fields.
722 If necessary, also create an entry in ir_model, and if called from the
723 modules loading scheme (by receiving 'module' in the context), also
724 create entries in ir_model_data (for the model and the fields).
726 - create an entry in ir_model (if there is not already one),
727 - create an entry in ir_model_data (if there is not already one, and if
728 'module' is in the context),
729 - update ir_model_fields with the fields found in _columns
730 (TODO there is some redundancy as _columns is updated from
731 ir_model_fields in __init__).
736 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
738 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
739 model_id = cr.fetchone()[0]
740 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
742 model_id = cr.fetchone()[0]
743 if 'module' in context:
744 name_id = 'model_'+self._name.replace('.', '_')
745 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
747 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
748 (name_id, context['module'], 'ir.model', model_id)
753 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
755 for rec in cr.dictfetchall():
756 cols[rec['name']] = rec
758 for (k, f) in self._columns.items():
760 'model_id': model_id,
763 'field_description': f.string.replace("'", " "),
765 'relation': f._obj or '',
766 'view_load': (f.view_load and 1) or 0,
767 'select_level': tools.ustr(f.select or 0),
768 'readonly': (f.readonly and 1) or 0,
769 'required': (f.required and 1) or 0,
770 'selectable': (f.selectable and 1) or 0,
771 'translate': (f.translate and 1) or 0,
772 'relation_field': (f._type=='one2many' and isinstance(f, fields.one2many)) and f._fields_id or '',
774 # When its a custom field,it does not contain f.select
775 if context.get('field_state', 'base') == 'manual':
776 if context.get('field_name', '') == k:
777 vals['select_level'] = context.get('select', '0')
778 #setting value to let the problem NOT occur next time
780 vals['select_level'] = cols[k]['select_level']
783 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
784 id = cr.fetchone()[0]
786 cr.execute("""INSERT INTO ir_model_fields (
787 id, model_id, model, name, field_description, ttype,
788 relation,view_load,state,select_level,relation_field, translate
790 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
792 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
793 vals['relation'], bool(vals['view_load']), 'base',
794 vals['select_level'], vals['relation_field'], bool(vals['translate'])
796 if 'module' in context:
797 name1 = 'field_' + self._table + '_' + k
798 cr.execute("select name from ir_model_data where name=%s", (name1,))
800 name1 = name1 + "_" + str(id)
801 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
802 (name1, context['module'], 'ir.model.fields', id)
805 for key, val in vals.items():
806 if cols[k][key] != vals[key]:
807 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
809 cr.execute("""UPDATE ir_model_fields SET
810 model_id=%s, field_description=%s, ttype=%s, relation=%s,
811 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s
813 model=%s AND name=%s""", (
814 vals['model_id'], vals['field_description'], vals['ttype'],
815 vals['relation'], bool(vals['view_load']),
816 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['model'], vals['name']
822 # Goal: try to apply inheritance at the instanciation level and
823 # put objects in the pool var
826 def create_instance(cls, pool, cr):
827 """ Instanciate a given model.
829 This class method instanciates the class of some model (i.e. a class
830 deriving from osv or osv_memory). The class might be the class passed
831 in argument or, if it inherits from another class, a class constructed
832 by combining the two classes.
834 The ``attributes`` argument specifies which parent class attributes
837 TODO: the creation of the combined class is repeated at each call of
838 this method. This is probably unnecessary.
841 attributes = ['_columns', '_defaults', '_inherits', '_constraints',
844 parent_names = getattr(cls, '_inherit', None)
846 if isinstance(parent_names, (str, unicode)):
847 name = cls._name or parent_names
848 parent_names = [parent_names]
853 raise TypeError('_name is mandatory in case of multiple inheritance')
855 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
856 parent_model = pool.get(parent_name)
857 if not getattr(cls, '_original_module', None) and name == parent_model._name:
858 cls._original_module = parent_model._original_module
860 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
861 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
862 parent_class = parent_model.__class__
865 new = copy.copy(getattr(parent_model, s, {}))
867 # Don't _inherit custom fields.
871 if hasattr(new, 'update'):
872 new.update(cls.__dict__.get(s, {}))
873 elif s=='_constraints':
874 for c in cls.__dict__.get(s, []):
876 for c2 in range(len(new)):
877 #For _constraints, we should check field and methods as well
878 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
879 or getattr(new[c2][0],'__name__', True) == \
880 getattr(c[0],'__name__', False)):
881 # If new class defines a constraint with
882 # same function name, we let it override
890 new.extend(cls.__dict__.get(s, []))
892 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
893 if not getattr(cls, '_original_module', None):
894 cls._original_module = cls._module
895 obj = object.__new__(cls)
896 obj.__init__(pool, cr)
900 """Register this model.
902 This doesn't create an instance but simply register the model
903 as being part of the module where it is defined.
908 # Set the module name (e.g. base, sale, accounting, ...) on the class.
909 module = cls.__module__.split('.')[0]
910 if not hasattr(cls, '_module'):
913 # Record this class in the list of models to instantiate for this module,
914 # managed by the metaclass.
915 module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
916 if cls not in module_model_list:
917 module_model_list.append(cls)
919 # Since we don't return an instance here, the __init__
920 # method won't be called.
923 def __init__(self, pool, cr):
924 """ Initialize a model and make it part of the given registry.
926 - copy the stored fields' functions in the osv_pool,
927 - update the _columns with the fields found in ir_model_fields,
928 - ensure there is a many2one for each _inherits'd parent,
929 - update the children's _columns,
930 - give a chance to each field to initialize itself.
933 pool.add(self._name, self)
936 if not self._name and not hasattr(self, '_inherit'):
937 name = type(self).__name__.split('.')[0]
938 msg = "The class %s has to have a _name attribute" % name
940 logger = netsvc.Logger()
941 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
942 raise except_orm('ValueError', msg)
944 if not self._description:
945 self._description = self._name
947 self._table = self._name.replace('.', '_')
949 if not hasattr(self, '_log_access'):
950 # If _log_access is not specified, it is the same value as _auto.
951 self._log_access = getattr(self, "_auto", True)
953 self._columns = self._columns.copy()
954 for store_field in self._columns:
955 f = self._columns[store_field]
956 if hasattr(f, 'digits_change'):
958 def not_this_field(stored_func):
959 x, y, z, e, f, l = stored_func
960 return x != self._name or y != store_field
961 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
962 if not isinstance(f, fields.function):
968 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
969 for object, aa in sm.items():
971 (fnct, fields2, order, length) = aa
973 (fnct, fields2, order) = aa
976 raise except_orm('Error',
977 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
978 self.pool._store_function.setdefault(object, [])
979 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
980 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
982 for (key, _, msg) in self._sql_constraints:
983 self.pool._sql_error[self._table+'_'+key] = msg
987 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
989 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
990 for field in cr.dictfetchall():
991 if field['name'] in self._columns:
994 'string': field['field_description'],
995 'required': bool(field['required']),
996 'readonly': bool(field['readonly']),
997 'domain': eval(field['domain']) if field['domain'] else None,
998 'size': field['size'],
999 'ondelete': field['on_delete'],
1000 'translate': (field['translate']),
1002 #'select': int(field['select_level'])
1005 if field['ttype'] == 'selection':
1006 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
1007 elif field['ttype'] == 'reference':
1008 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
1009 elif field['ttype'] == 'many2one':
1010 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
1011 elif field['ttype'] == 'one2many':
1012 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
1013 elif field['ttype'] == 'many2many':
1014 _rel1 = field['relation'].replace('.', '_')
1015 _rel2 = field['model'].replace('.', '_')
1016 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
1017 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1019 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1020 self._inherits_check()
1021 self._inherits_reload()
1022 if not self._sequence:
1023 self._sequence = self._table + '_id_seq'
1024 for k in self._defaults:
1025 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1026 for f in self._columns:
1027 self._columns[f].restart()
1030 if self.is_transient():
1031 self._transient_check_count = 0
1032 self._transient_max_count = config.get('osv_memory_count_limit')
1033 self._transient_max_hours = config.get('osv_memory_age_limit')
1034 assert self._log_access, "TransientModels must have log_access turned on, "\
1035 "in order to implement their access rights policy"
1037 def __export_row(self, cr, uid, row, fields, context=None):
1041 def check_type(field_type):
1042 if field_type == 'float':
1044 elif field_type == 'integer':
1046 elif field_type == 'boolean':
1050 def selection_field(in_field):
1051 col_obj = self.pool.get(in_field.keys()[0])
1052 if f[i] in col_obj._columns.keys():
1053 return col_obj._columns[f[i]]
1054 elif f[i] in col_obj._inherits.keys():
1055 selection_field(col_obj._inherits)
1060 data = map(lambda x: '', range(len(fields)))
1062 for fpos in range(len(fields)):
1071 model_data = self.pool.get('ir.model.data')
1072 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
1074 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
1076 r = '%s.%s' % (d['module'], d['name'])
1082 n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
1083 if not model_data.search(cr, uid, [('name', '=', n)]):
1086 model_data.create(cr, uid, {
1088 'model': self._name,
1090 'module': '__export__',
1095 # To display external name of selection field when its exported
1097 if f[i] in self._columns.keys():
1098 cols = self._columns[f[i]]
1099 elif f[i] in self._inherit_fields.keys():
1100 cols = selection_field(self._inherits)
1101 if cols and cols._type == 'selection':
1102 sel_list = cols.selection
1103 if r and type(sel_list) == type([]):
1104 r = [x[1] for x in sel_list if r==x[0]]
1105 r = r and r[0] or False
1107 if f[i] in self._columns:
1108 r = check_type(self._columns[f[i]]._type)
1109 elif f[i] in self._inherit_fields:
1110 r = check_type(self._inherit_fields[f[i]][2]._type)
1111 data[fpos] = r or False
1113 if isinstance(r, (browse_record_list, list)):
1115 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
1118 if [x for x in fields2 if x]:
1120 done.append(fields2)
1122 lines2 = self.__export_row(cr, uid, row2, fields2,
1125 for fpos2 in range(len(fields)):
1126 if lines2 and lines2[0][fpos2]:
1127 data[fpos2] = lines2[0][fpos2]
1131 name_relation = self.pool.get(rr._table_name)._rec_name
1132 if isinstance(rr[name_relation], browse_record):
1133 rr = rr[name_relation]
1134 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
1135 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
1136 dt += tools.ustr(rr_name or '') + ','
1137 data[fpos] = dt[:-1]
1146 if isinstance(r, browse_record):
1147 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
1148 r = r and r[0] and r[0][1] or ''
1149 data[fpos] = tools.ustr(r or '')
1150 return [data] + lines
1152 def export_data(self, cr, uid, ids, fields_to_export, context=None):
1154 Export fields for selected objects
1156 :param cr: database cursor
1157 :param uid: current user id
1158 :param ids: list of ids
1159 :param fields_to_export: list of fields
1160 :param context: context arguments, like lang, time zone
1161 :rtype: dictionary with a *datas* matrix
1163 This method is used when exporting data via client menu
1168 cols = self._columns.copy()
1169 for f in self._inherit_fields:
1170 cols.update({f: self._inherit_fields[f][2]})
1171 fields_to_export = map(fix_import_export_id_paths, fields_to_export)
1173 for row in self.browse(cr, uid, ids, context):
1174 datas += self.__export_row(cr, uid, row, fields_to_export, context)
1175 return {'datas': datas}
1177 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
1179 Import given data in given module
1181 This method is used when importing data via client menu.
1183 Example of fields to import for a sale.order::
1186 partner_id, (=name_search)
1187 order_line/.id, (=database_id)
1189 order_line/product_id/id, (=xml id)
1190 order_line/price_unit,
1191 order_line/product_uom_qty,
1192 order_line/product_uom/id (=xml_id)
1194 This method returns a 4-tuple with the following structure:
1196 * The first item is a return code, it returns either ``-1`` in case o
1198 :param cr: database cursor
1199 :param uid: current user id
1200 :param fields: list of fields
1201 :param data: data to import
1202 :param mode: 'init' or 'update' for record creation
1203 :param current_module: module name
1204 :param noupdate: flag for record creation
1205 :param context: context arguments, like lang, time zone,
1206 :param filename: optional file to store partial import state for recovery
1207 :returns: 4-tuple of a return code, an errored resource, an error message and ???
1208 :rtype: (int, dict|0, str|0, ''|0)
1212 fields = map(fix_import_export_id_paths, fields)
1213 logger = netsvc.Logger()
1214 ir_model_data_obj = self.pool.get('ir.model.data')
1216 # mode: id (XML id) or .id (database id) or False for name_get
1217 def _get_id(model_name, id, current_module=False, mode='id'):
1220 obj_model = self.pool.get(model_name)
1221 ids = obj_model.search(cr, uid, [('id', '=', int(id))])
1223 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, id))
1226 module, xml_id = id.rsplit('.', 1)
1228 module, xml_id = current_module, id
1229 record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
1230 ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id'])
1231 if not ir_model_data:
1232 raise ValueError('No references to %s.%s' % (module, xml_id))
1233 id = ir_model_data[0]['res_id']
1235 obj_model = self.pool.get(model_name)
1236 ids = obj_model.name_search(cr, uid, id, operator='=', context=context)
1238 raise ValueError('No record found for %s' % (id,))
1243 # datas: a list of records, each record is defined by a list of values
1244 # prefix: a list of prefix fields ['line_ids']
1245 # position: the line to process, skip is False if it's the first line of the current record
1247 # (res, position, warning, res_id) with
1248 # res: the record for the next line to process (including it's one2many)
1249 # position: the new position for the next line
1250 # res_id: the ID of the record if it's a modification
1251 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0, skip=0):
1252 line = datas[position]
1260 for i in range(len(fields)):
1263 raise Exception(_('Please check that all your lines have %d columns.'
1264 'Stopped around line %d having %d columns.') % \
1265 (len(fields), position+2, len(line)))
1270 if field[:len(prefix)] <> prefix:
1271 if line[i] and skip:
1275 #set the mode for m2o, o2m, m2m : xml_id/id/name
1276 if len(field) == len(prefix)+1:
1279 mode = field[len(prefix)+1]
1281 # TODO: improve this by using csv.csv_reader
1282 def many_ids(line, relation, current_module, mode):
1284 for db_id in line.split(config.get('csv_internal_sep')):
1285 res.append(_get_id(relation, db_id, current_module, mode))
1288 # ID of the record using a XML ID
1289 if field[len(prefix)]=='id':
1291 data_res_id = _get_id(model_name, line[i], current_module, 'id')
1297 # ID of the record using a database ID
1298 elif field[len(prefix)]=='.id':
1299 data_res_id = _get_id(model_name, line[i], current_module, '.id')
1302 # recursive call for getting children and returning [(0,0,{})] or [(1,ID,{})]
1303 if fields_def[field[len(prefix)]]['type']=='one2many':
1304 if field[len(prefix)] in done:
1306 done[field[len(prefix)]] = True
1307 relation = fields_def[field[len(prefix)]]['relation']
1308 relation_obj = self.pool.get(relation)
1309 newfd = relation_obj.fields_get( cr, uid, context=context )
1312 res = many_ids(line[i], relation, current_module, mode)
1315 while pos < len(datas):
1316 res2 = process_liness(self, datas, prefix + [field[len(prefix)]], current_module, relation_obj._name, newfd, pos, first)
1319 (newrow, pos, w2, data_res_id2, xml_id2) = res2
1320 nbrmax = max(nbrmax, pos)
1325 res.append((4, data_res_id2))
1327 if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
1330 res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
1333 elif fields_def[field[len(prefix)]]['type']=='many2one':
1334 relation = fields_def[field[len(prefix)]]['relation']
1335 res = _get_id(relation, line[i], current_module, mode)
1337 elif fields_def[field[len(prefix)]]['type']=='many2many':
1338 relation = fields_def[field[len(prefix)]]['relation']
1339 res = many_ids(line[i], relation, current_module, mode)
1341 elif fields_def[field[len(prefix)]]['type'] == 'integer':
1342 res = line[i] and int(line[i]) or 0
1343 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
1344 res = line[i].lower() not in ('0', 'false', 'off')
1345 elif fields_def[field[len(prefix)]]['type'] == 'float':
1346 res = line[i] and float(line[i]) or 0.0
1347 elif fields_def[field[len(prefix)]]['type'] == 'selection':
1348 for key, val in fields_def[field[len(prefix)]]['selection']:
1349 if tools.ustr(line[i]) in [tools.ustr(key), tools.ustr(val)]:
1352 if line[i] and not res:
1353 logger.notifyChannel("import", netsvc.LOG_WARNING,
1354 _("key '%s' not found in selection field '%s'") % \
1355 (tools.ustr(line[i]), tools.ustr(field[len(prefix)])))
1356 warning += [_("Key/value '%s' not found in selection field '%s'") % (tools.ustr(line[i]), tools.ustr(field[len(prefix)]))]
1361 row[field[len(prefix)]] = res or False
1363 result = (row, nbrmax, warning, data_res_id, xml_id)
1366 fields_def = self.fields_get(cr, uid, context=context)
1368 if config.get('import_partial', False) and filename:
1369 data = pickle.load(file(config.get('import_partial')))
1372 while position<len(datas):
1375 (res, position, warning, res_id, xml_id) = \
1376 process_liness(self, datas, [], current_module, self._name, fields_def, position=position)
1379 return (-1, res, 'Line ' + str(position) +' : ' + '!\n'.join(warning), '')
1382 ir_model_data_obj._update(cr, uid, self._name,
1383 current_module, res, mode=mode, xml_id=xml_id,
1384 noupdate=noupdate, res_id=res_id, context=context)
1385 except Exception, e:
1386 return (-1, res, 'Line ' + str(position) + ' : ' + tools.ustr(e), '')
1388 if config.get('import_partial', False) and filename and (not (position%100)):
1389 data = pickle.load(file(config.get('import_partial')))
1390 data[filename] = position
1391 pickle.dump(data, file(config.get('import_partial'), 'wb'))
1392 if context.get('defer_parent_store_computation'):
1393 self._parent_store_compute(cr)
1396 if context.get('defer_parent_store_computation'):
1397 self._parent_store_compute(cr)
1398 return (position, 0, 0, 0)
1400 def get_invalid_fields(self, cr, uid):
1401 return list(self._invalids)
1403 def _validate(self, cr, uid, ids, context=None):
1404 context = context or {}
1405 lng = context.get('lang', False) or 'en_US'
1406 trans = self.pool.get('ir.translation')
1408 for constraint in self._constraints:
1409 fun, msg, fields = constraint
1410 if not fun(self, cr, uid, ids):
1411 # Check presence of __call__ directly instead of using
1412 # callable() because it will be deprecated as of Python 3.0
1413 if hasattr(msg, '__call__'):
1414 tmp_msg = msg(self, cr, uid, ids, context=context)
1415 if isinstance(tmp_msg, tuple):
1416 tmp_msg, params = tmp_msg
1417 translated_msg = tmp_msg % params
1419 translated_msg = tmp_msg
1421 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg) or msg
1423 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1425 self._invalids.update(fields)
1428 raise except_orm('ValidateError', '\n'.join(error_msgs))
1430 self._invalids.clear()
1432 def default_get(self, cr, uid, fields_list, context=None):
1434 Returns default values for the fields in fields_list.
1436 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1437 :type fields_list: list
1438 :param context: optional context dictionary - it may contains keys for specifying certain options
1439 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1440 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1441 or override a default value for a field.
1442 A special ``bin_size`` boolean flag may also be passed in the context to request the
1443 value of all fields.binary columns to be returned as the size of the binary instead of its
1444 contents. This can also be selectively overriden by passing a field-specific flag
1445 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1446 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1447 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1449 # trigger view init hook
1450 self.view_init(cr, uid, fields_list, context)
1456 # get the default values for the inherited fields
1457 for t in self._inherits.keys():
1458 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1461 # get the default values defined in the object
1462 for f in fields_list:
1463 if f in self._defaults:
1464 if callable(self._defaults[f]):
1465 defaults[f] = self._defaults[f](self, cr, uid, context)
1467 defaults[f] = self._defaults[f]
1469 fld_def = ((f in self._columns) and self._columns[f]) \
1470 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1473 if isinstance(fld_def, fields.property):
1474 property_obj = self.pool.get('ir.property')
1475 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1477 if isinstance(prop_value, (browse_record, browse_null)):
1478 defaults[f] = prop_value.id
1480 defaults[f] = prop_value
1482 if f not in defaults:
1485 # get the default values set by the user and override the default
1486 # values defined in the object
1487 ir_values_obj = self.pool.get('ir.values')
1488 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1489 for id, field, field_value in res:
1490 if field in fields_list:
1491 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1492 if fld_def._type in ('many2one', 'one2one'):
1493 obj = self.pool.get(fld_def._obj)
1494 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1496 if fld_def._type in ('many2many'):
1497 obj = self.pool.get(fld_def._obj)
1499 for i in range(len(field_value)):
1500 if not obj.search(cr, uid, [('id', '=',
1503 field_value2.append(field_value[i])
1504 field_value = field_value2
1505 if fld_def._type in ('one2many'):
1506 obj = self.pool.get(fld_def._obj)
1508 for i in range(len(field_value)):
1509 field_value2.append({})
1510 for field2 in field_value[i]:
1511 if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'):
1512 obj2 = self.pool.get(obj._columns[field2]._obj)
1513 if not obj2.search(cr, uid,
1514 [('id', '=', field_value[i][field2])]):
1516 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type in ('many2one', 'one2one'):
1517 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1518 if not obj2.search(cr, uid,
1519 [('id', '=', field_value[i][field2])]):
1521 # TODO add test for many2many and one2many
1522 field_value2[i][field2] = field_value[i][field2]
1523 field_value = field_value2
1524 defaults[field] = field_value
1526 # get the default values from the context
1527 for key in context or {}:
1528 if key.startswith('default_') and (key[8:] in fields_list):
1529 defaults[key[8:]] = context[key]
1532 def fields_get_keys(self, cr, user, context=None):
1533 res = self._columns.keys()
1534 # TODO I believe this loop can be replace by
1535 # res.extend(self._inherit_fields.key())
1536 for parent in self._inherits:
1537 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1541 # Overload this method if you need a window title which depends on the context
1543 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1546 def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
1547 """ Return the description of the fields in the node.
1549 In a normal call to this method, node is a complete view architecture
1550 but it is actually possible to give some sub-node (this is used so
1551 that the method can call itself recursively).
1553 Originally, the field descriptions are drawn from the node itself.
1554 But there is now some code calling fields_get() in order to merge some
1555 of those information in the architecture.
1567 if isinstance(s, unicode):
1568 return s.encode('utf8')
1571 def check_group(node):
1572 """ Set invisible to true if the user is not in the specified groups. """
1573 if node.get('groups'):
1574 groups = node.get('groups').split(',')
1575 ir_model_access = self.pool.get('ir.model.access')
1576 can_see = any(ir_model_access.check_groups(cr, user, group) for group in groups)
1578 node.set('invisible', '1')
1579 modifiers['invisible'] = True
1580 if 'attrs' in node.attrib:
1581 del(node.attrib['attrs']) #avoid making field visible later
1582 del(node.attrib['groups'])
1584 if node.tag in ('field', 'node', 'arrow'):
1585 if node.get('object'):
1590 if f.tag in ('field'):
1591 xml += etree.tostring(f, encoding="utf-8")
1593 new_xml = etree.fromstring(encode(xml))
1594 ctx = context.copy()
1595 ctx['base_model_name'] = self._name
1596 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1601 attrs = {'views': views}
1603 if node.get('name'):
1606 if node.get('name') in self._columns:
1607 column = self._columns[node.get('name')]
1609 column = self._inherit_fields[node.get('name')][2]
1614 relation = self.pool.get(column._obj)
1619 if f.tag in ('form', 'tree', 'graph'):
1621 ctx = context.copy()
1622 ctx['base_model_name'] = self._name
1623 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1624 views[str(f.tag)] = {
1628 attrs = {'views': views}
1629 if node.get('widget') and node.get('widget') == 'selection':
1630 # Prepare the cached selection list for the client. This needs to be
1631 # done even when the field is invisible to the current user, because
1632 # other events could need to change its value to any of the selectable ones
1633 # (such as on_change events, refreshes, etc.)
1635 # If domain and context are strings, we keep them for client-side, otherwise
1636 # we evaluate them server-side to consider them when generating the list of
1638 # TODO: find a way to remove this hack, by allow dynamic domains
1640 if column._domain and not isinstance(column._domain, basestring):
1641 dom = column._domain
1642 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1643 search_context = dict(context)
1644 if column._context and not isinstance(column._context, basestring):
1645 search_context.update(column._context)
1646 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1647 if (node.get('required') and not int(node.get('required'))) or not column.required:
1648 attrs['selection'].append((False, ''))
1649 fields[node.get('name')] = attrs
1651 field = model_fields.get(node.get('name'))
1653 transfer_field_to_modifiers(field, modifiers)
1656 elif node.tag in ('form', 'tree'):
1657 result = self.view_header_get(cr, user, False, node.tag, context)
1659 node.set('string', result)
1660 in_tree_view = node.tag == 'tree'
1662 elif node.tag == 'calendar':
1663 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1664 if node.get(additional_field):
1665 fields[node.get(additional_field)] = {}
1669 # The view architeture overrides the python model.
1670 # Get the attrs before they are (possibly) deleted by check_group below
1671 transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
1673 # TODO remove attrs couterpart in modifiers when invisible is true ?
1676 if 'lang' in context:
1677 if node.get('string') and not result:
1678 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1679 if trans == node.get('string') and ('base_model_name' in context):
1680 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1681 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1682 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1684 node.set('string', trans)
1685 if node.get('confirm'):
1686 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm'))
1688 node.set('confirm', trans)
1690 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
1692 node.set('sum', trans)
1693 if node.get('help'):
1694 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('help'))
1696 node.set('help', trans)
1699 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1700 fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
1702 transfer_modifiers_to_node(modifiers, node)
1705 def _disable_workflow_buttons(self, cr, user, node):
1706 """ Set the buttons in node to readonly if the user can't activate them. """
1708 # admin user can always activate workflow buttons
1711 # TODO handle the case of more than one workflow for a model or multiple
1712 # transitions with different groups and same signal
1713 usersobj = self.pool.get('res.users')
1714 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1715 for button in buttons:
1716 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1717 cr.execute("""SELECT DISTINCT t.group_id
1719 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1720 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1723 AND t.group_id is NOT NULL
1724 """, (self._name, button.get('name')))
1725 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1726 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1727 button.set('readonly', str(int(not can_click)))
1730 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1731 """ Return an architecture and a description of all the fields.
1733 The field description combines the result of fields_get() and
1736 :param node: the architecture as as an etree
1737 :return: a tuple (arch, fields) where arch is the given node as a
1738 string and fields is the description of all the fields.
1742 if node.tag == 'diagram':
1743 if node.getchildren()[0].tag == 'node':
1744 node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, None, context)
1745 fields.update(node_fields)
1746 if node.getchildren()[1].tag == 'arrow':
1747 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
1748 fields.update(arrow_fields)
1750 fields = self.fields_get(cr, user, None, context)
1751 fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
1752 node = self._disable_workflow_buttons(cr, user, node)
1753 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1754 for k in fields.keys():
1755 if k not in fields_def:
1757 for field in fields_def:
1759 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1760 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1761 elif field in fields:
1762 fields[field].update(fields_def[field])
1764 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1765 res = cr.fetchall()[:]
1767 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1768 msg = "\n * ".join([r[0] for r in res])
1769 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1770 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1771 raise except_orm('View error', msg)
1774 def _get_default_form_view(self, cr, user, context=None):
1775 """ Generates a default single-line form view using all fields
1776 of the current model except the m2m and o2m ones.
1778 :param cr: database cursor
1779 :param int user: user id
1780 :param dict context: connection context
1781 :returns: a form view as an lxml document
1782 :rtype: etree._Element
1784 view = etree.Element('form', string=self._description)
1785 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1786 for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
1787 if descriptor['type'] in ('one2many', 'many2many'):
1789 etree.SubElement(view, 'field', name=field)
1790 if descriptor['type'] == 'text':
1791 etree.SubElement(view, 'newline')
1794 def _get_default_tree_view(self, cr, user, context=None):
1795 """ Generates a single-field tree view, using _rec_name if
1796 it's one of the columns or the first column it finds otherwise
1798 :param cr: database cursor
1799 :param int user: user id
1800 :param dict context: connection context
1801 :returns: a tree view as an lxml document
1802 :rtype: etree._Element
1804 _rec_name = self._rec_name
1805 if _rec_name not in self._columns:
1806 _rec_name = self._columns.keys()[0]
1808 view = etree.Element('tree', string=self._description)
1809 etree.SubElement(view, 'field', name=_rec_name)
1812 def _get_default_calendar_view(self, cr, user, context=None):
1813 """ Generates a default calendar view by trying to infer
1814 calendar fields from a number of pre-set attribute names
1816 :param cr: database cursor
1817 :param int user: user id
1818 :param dict context: connection context
1819 :returns: a calendar view
1820 :rtype: etree._Element
1822 def set_first_of(seq, in_, to):
1823 """Sets the first value of ``seq`` also found in ``in_`` to
1824 the ``to`` attribute of the view being closed over.
1826 Returns whether it's found a suitable value (and set it on
1827 the attribute) or not
1835 view = etree.Element('calendar', string=self._description)
1836 etree.SubElement(view, 'field', name=self._rec_name)
1838 if (self._date_name not in self._columns):
1840 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
1841 if dt in self._columns:
1842 self._date_name = dt
1847 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
1848 view.set('date_start', self._date_name)
1850 set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
1851 self._columns, 'color')
1853 if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
1854 self._columns, 'date_stop'):
1855 if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
1856 self._columns, 'date_delay'):
1858 _('Invalid Object Architecture!'),
1859 _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % (self._name)))
1863 def _get_default_search_view(self, cr, uid, context=None):
1865 :param cr: database cursor
1866 :param int user: user id
1867 :param dict context: connection context
1868 :returns: an lxml document of the view
1869 :rtype: etree._Element
1871 form_view = self.fields_view_get(cr, uid, False, 'form', context=context)
1872 tree_view = self.fields_view_get(cr, uid, False, 'tree', context=context)
1874 # TODO it seems _all_columns could be used instead of fields_get (no need for translated fields info)
1875 fields = self.fields_get(cr, uid, context=context)
1876 fields_to_search = set(
1877 field for field, descriptor in fields.iteritems()
1878 if descriptor.get('select'))
1880 for view in (form_view, tree_view):
1881 view_root = etree.fromstring(view['arch'])
1882 # Only care about select=1 in xpath below, because select=2 is covered
1883 # by the custom advanced search in clients
1884 fields_to_search.update(view_root.xpath("//field[@select=1]/@name"))
1886 tree_view_root = view_root # as provided by loop above
1887 search_view = etree.Element("search", string=tree_view_root.get("string", ""))
1889 field_group = etree.SubElement(search_view, "group")
1890 for field_name in fields_to_search:
1891 etree.SubElement(field_group, "field", name=field_name)
1896 # if view_id, view_type is not required
1898 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1900 Get the detailed composition of the requested view like fields, model, view architecture
1902 :param cr: database cursor
1903 :param user: current user id
1904 :param view_id: id of the view or None
1905 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
1906 :param context: context arguments, like lang, time zone
1907 :param toolbar: true to include contextual actions
1908 :param submenu: deprecated
1909 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
1910 :raise AttributeError:
1911 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
1912 * if some tag other than 'position' is found in parent view
1913 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
1920 if isinstance(s, unicode):
1921 return s.encode('utf8')
1924 def raise_view_error(error_msg, child_view_id):
1925 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
1926 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
1927 % (child_view.xml_id, self._name, error_msg))
1929 def locate(source, spec):
1930 """ Locate a node in a source (parent) architecture.
1932 Given a complete source (parent) architecture (i.e. the field
1933 `arch` in a view), and a 'spec' node (a node in an inheriting
1934 view that specifies the location in the source view of what
1935 should be changed), return (if it exists) the node in the
1936 source view matching the specification.
1938 :param source: a parent architecture to modify
1939 :param spec: a modifying node in an inheriting view
1940 :return: a node in the source matching the spec
1943 if spec.tag == 'xpath':
1944 nodes = source.xpath(spec.get('expr'))
1945 return nodes[0] if nodes else None
1946 elif spec.tag == 'field':
1947 # Only compare the field name: a field can be only once in a given view
1948 # at a given level (and for multilevel expressions, we should use xpath
1949 # inheritance spec anyway).
1950 for node in source.getiterator('field'):
1951 if node.get('name') == spec.get('name'):
1955 for node in source.getiterator(spec.tag):
1957 for attr in spec.attrib:
1958 if attr != 'position' and (not node.get(attr) or node.get(attr) != spec.get(attr)):
1965 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
1966 """ Apply an inheriting view.
1968 Apply to a source architecture all the spec nodes (i.e. nodes
1969 describing where and what changes to apply to some parent
1970 architecture) given by an inheriting view.
1972 :param source: a parent architecture to modify
1973 :param specs_arch: a modifying architecture in an inheriting view
1974 :param inherit_id: the database id of the inheriting view
1975 :return: a modified source where the specs are applied
1978 specs_tree = etree.fromstring(encode(specs_arch))
1979 # Queue of specification nodes (i.e. nodes describing where and
1980 # changes to apply to some parent architecture).
1981 specs = [specs_tree]
1985 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
1987 if spec.tag == 'data':
1988 specs += [ c for c in specs_tree ]
1990 node = locate(source, spec)
1991 if node is not None:
1992 pos = spec.get('position', 'inside')
1993 if pos == 'replace':
1994 if node.getparent() is None:
1995 source = copy.deepcopy(spec[0])
1998 node.addprevious(child)
1999 node.getparent().remove(node)
2000 elif pos == 'attributes':
2001 for child in spec.getiterator('attribute'):
2002 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
2004 node.set(attribute[0], attribute[1])
2006 del(node.attrib[attribute[0]])
2008 sib = node.getnext()
2012 elif pos == 'after':
2017 sib.addprevious(child)
2018 elif pos == 'before':
2019 node.addprevious(child)
2021 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
2024 ' %s="%s"' % (attr, spec.get(attr))
2025 for attr in spec.attrib
2026 if attr != 'position'
2028 tag = "<%s%s>" % (spec.tag, attrs)
2029 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
2032 def apply_view_inheritance(cr, user, source, inherit_id):
2033 """ Apply all the (directly and indirectly) inheriting views.
2035 :param source: a parent architecture to modify (with parent
2036 modifications already applied)
2037 :param inherit_id: the database view_id of the parent view
2038 :return: a modified source where all the modifying architecture
2042 sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
2043 for (view_arch, view_id) in sql_inherit:
2044 source = apply_inheritance_specs(source, view_arch, view_id)
2045 source = apply_view_inheritance(cr, user, source, view_id)
2048 result = {'type': view_type, 'model': self._name}
2051 parent_view_model = None
2052 view_ref = context.get(view_type + '_view_ref')
2053 # Search for a root (i.e. without any parent) view.
2055 if view_ref and not view_id:
2057 module, view_ref = view_ref.split('.', 1)
2058 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
2059 view_ref_res = cr.fetchone()
2061 view_id = view_ref_res[0]
2064 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2066 WHERE id=%s""", (view_id,))
2068 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2070 WHERE model=%s AND type=%s AND inherit_id IS NULL
2071 ORDER BY priority""", (self._name, view_type))
2072 sql_res = cr.dictfetchone()
2077 view_id = sql_res['inherit_id'] or sql_res['id']
2078 parent_view_model = sql_res['model']
2079 if not sql_res['inherit_id']:
2082 # if a view was found
2084 source = etree.fromstring(encode(sql_res['arch']))
2086 arch=apply_view_inheritance(cr, user, source, sql_res['id']),
2087 type=sql_res['type'],
2088 view_id=sql_res['id'],
2089 name=sql_res['name'],
2090 field_parent=sql_res['field_parent'] or False)
2092 # otherwise, build some kind of default view
2094 view = getattr(self, '_get_default_%s_view' % view_type)(
2096 except AttributeError:
2097 # what happens here, graph case?
2098 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
2106 if parent_view_model != self._name:
2107 ctx = context.copy()
2108 ctx['base_model_name'] = parent_view_model
2111 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
2112 result['arch'] = xarch
2113 result['fields'] = xfields
2118 for key in ('report_sxw_content', 'report_rml_content',
2119 'report_sxw', 'report_rml',
2120 'report_sxw_content_data', 'report_rml_content_data'):
2124 ir_values_obj = self.pool.get('ir.values')
2125 resprint = ir_values_obj.get(cr, user, 'action',
2126 'client_print_multi', [(self._name, False)], False,
2128 resaction = ir_values_obj.get(cr, user, 'action',
2129 'client_action_multi', [(self._name, False)], False,
2132 resrelate = ir_values_obj.get(cr, user, 'action',
2133 'client_action_relate', [(self._name, False)], False,
2135 resaction = [clean(action) for action in resaction
2136 if view_type == 'tree' or not action[2].get('multi')]
2137 resprint = [clean(print_) for print_ in resprint
2138 if view_type == 'tree' or not print_[2].get('multi')]
2139 resrelate = map(lambda x: x[2], resrelate)
2141 for x in itertools.chain(resprint, resaction, resrelate):
2142 x['string'] = x['name']
2144 result['toolbar'] = {
2146 'action': resaction,
2151 _view_look_dom_arch = __view_look_dom_arch
2153 def search_count(self, cr, user, args, context=None):
2156 res = self.search(cr, user, args, context=context, count=True)
2157 if isinstance(res, list):
2161 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
2163 Search for records based on a search domain.
2165 :param cr: database cursor
2166 :param user: current user id
2167 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
2168 :param offset: optional number of results to skip in the returned values (default: 0)
2169 :param limit: optional max number of records to return (default: **None**)
2170 :param order: optional columns to sort by (default: self._order=id )
2171 :param context: optional context arguments, like lang, time zone
2172 :type context: dictionary
2173 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
2174 :return: id or list of ids of records matching the criteria
2175 :rtype: integer or list of integers
2176 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
2178 **Expressing a search domain (args)**
2180 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
2182 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
2183 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
2184 The semantics of most of these operators are obvious.
2185 The ``child_of`` operator will look for records who are children or grand-children of a given record,
2186 according to the semantics of this model (i.e following the relationship field named by
2187 ``self._parent_name``, by default ``parent_id``.
2188 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
2190 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
2191 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
2192 Be very careful about this when you combine them the first time.
2194 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
2196 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
2198 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
2200 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
2203 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
2205 def name_get(self, cr, user, ids, context=None):
2206 """Returns the preferred display value (text representation) for the records with the
2207 given ``ids``. By default this will be the value of the ``name`` column, unless
2208 the model implements a custom behavior.
2209 Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
2213 :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
2217 if isinstance(ids, (int, long)):
2219 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
2220 [self._rec_name], context, load='_classic_write')]
2222 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
2223 """Search for records that have a display name matching the given ``name`` pattern if compared
2224 with the given ``operator``, while also matching the optional search domain (``args``).
2225 This is used for example to provide suggestions based on a partial value for a relational
2227 Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
2230 This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
2231 and then :meth:`~.name_get` on the result of the search.
2233 :param list args: optional search domain (see :meth:`~.search` for syntax),
2234 specifying further restrictions
2235 :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
2237 :param int limit: optional max number of records to return
2239 :return: list of pairs ``(id,text_repr)`` for all matching records.
2241 return self._name_search(cr, user, name, args, operator, context, limit)
2243 def name_create(self, cr, uid, name, context=None):
2244 """Creates a new record by calling :meth:`~.create` with only one
2245 value provided: the name of the new record (``_rec_name`` field).
2246 The new record will also be initialized with any default values applicable
2247 to this model, or provided through the context. The usual behavior of
2248 :meth:`~.create` applies.
2249 Similarly, this method may raise an exception if the model has multiple
2250 required fields and some do not have default values.
2252 :param name: name of the record to create
2255 :return: the :meth:`~.name_get` pair value for the newly-created record.
2257 rec_id = self.create(cr, uid, {self._rec_name: name}, context);
2258 return self.name_get(cr, uid, [rec_id], context)[0]
2260 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2261 # solve some access rights issues
2262 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2268 # optimize out the default criterion of ``ilike ''`` that matches everything
2269 if not (name == '' and operator == 'ilike'):
2270 args += [(self._rec_name, operator, name)]
2271 access_rights_uid = name_get_uid or user
2272 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2273 res = self.name_get(cr, access_rights_uid, ids, context)
2276 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2279 self.pool.get('ir.translation').check_read(cr, uid)
2281 fields = self._columns.keys() + self._inherit_fields.keys()
2282 #FIXME: collect all calls to _get_source into one SQL call.
2284 res[lang] = {'code': lang}
2286 if f in self._columns:
2287 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2289 res[lang][f] = res_trans
2291 res[lang][f] = self._columns[f].string
2292 for table in self._inherits:
2293 cols = intersect(self._inherit_fields.keys(), fields)
2294 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2297 res[lang]['code'] = lang
2298 for f in res2[lang]:
2299 res[lang][f] = res2[lang][f]
2302 def write_string(self, cr, uid, id, langs, vals, context=None):
2303 self.pool.get('ir.translation').check_write(cr, uid)
2304 #FIXME: try to only call the translation in one SQL
2307 if field in self._columns:
2308 src = self._columns[field].string
2309 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2310 for table in self._inherits:
2311 cols = intersect(self._inherit_fields.keys(), vals)
2313 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2316 def _add_missing_default_values(self, cr, uid, values, context=None):
2317 missing_defaults = []
2318 avoid_tables = [] # avoid overriding inherited values when parent is set
2319 for tables, parent_field in self._inherits.items():
2320 if parent_field in values:
2321 avoid_tables.append(tables)
2322 for field in self._columns.keys():
2323 if not field in values:
2324 missing_defaults.append(field)
2325 for field in self._inherit_fields.keys():
2326 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2327 missing_defaults.append(field)
2329 if len(missing_defaults):
2330 # override defaults with the provided values, never allow the other way around
2331 defaults = self.default_get(cr, uid, missing_defaults, context)
2333 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2334 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2335 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2336 defaults[dv] = [(6, 0, defaults[dv])]
2337 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2338 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2339 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2340 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2341 defaults.update(values)
2345 def clear_caches(self):
2346 """ Clear the caches
2348 This clears the caches associated to methods decorated with
2349 ``tools.ormcache`` or ``tools.ormcache_multi``.
2352 getattr(self, '_ormcache')
2354 except AttributeError:
2357 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2359 Get the list of records in list view grouped by the given ``groupby`` fields
2361 :param cr: database cursor
2362 :param uid: current user id
2363 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2364 :param list fields: list of fields present in the list view specified on the object
2365 :param list groupby: fields by which the records will be grouped
2366 :param int offset: optional number of records to skip
2367 :param int limit: optional max number of records to return
2368 :param dict context: context arguments, like lang, time zone
2369 :param list orderby: optional ``order by`` specification, for
2370 overriding the natural sort ordering of the
2371 groups, see also :py:meth:`~osv.osv.osv.search`
2372 (supported only for many2one fields currently)
2373 :return: list of dictionaries(one dictionary for each record) containing:
2375 * the values of fields grouped by the fields in ``groupby`` argument
2376 * __domain: list of tuples specifying the search criteria
2377 * __context: dictionary with argument like ``groupby``
2378 :rtype: [{'field_name_1': value, ...]
2379 :raise AccessError: * if user has no read rights on the requested object
2380 * if user tries to bypass access rules for read on the requested object
2383 context = context or {}
2384 self.check_read(cr, uid)
2386 fields = self._columns.keys()
2388 query = self._where_calc(cr, uid, domain, context=context)
2389 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2391 # Take care of adding join(s) if groupby is an '_inherits'ed field
2392 groupby_list = groupby
2393 qualified_groupby_field = groupby
2395 if isinstance(groupby, list):
2396 groupby = groupby[0]
2397 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2400 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2401 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2402 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2404 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2405 fget = self.fields_get(cr, uid, fields)
2406 float_int_fields = filter(lambda x: fget[x]['type'] in ('float', 'integer'), fields)
2408 group_count = group_by = groupby
2410 if fget.get(groupby):
2411 if fget[groupby]['type'] in ('date', 'datetime'):
2412 flist = "to_char(%s,'yyyy-mm') as %s " % (qualified_groupby_field, groupby)
2413 groupby = "to_char(%s,'yyyy-mm')" % (qualified_groupby_field)
2414 qualified_groupby_field = groupby
2416 flist = qualified_groupby_field
2418 # Don't allow arbitrary values, as this would be a SQL injection vector!
2419 raise except_orm(_('Invalid group_by'),
2420 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2423 fields_pre = [f for f in float_int_fields if
2424 f == self.CONCURRENCY_CHECK_FIELD
2425 or (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2426 for f in fields_pre:
2427 if f not in ['id', 'sequence']:
2428 group_operator = fget[f].get('group_operator', 'sum')
2431 qualified_field = '"%s"."%s"' % (self._table, f)
2432 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2434 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2436 from_clause, where_clause, where_clause_params = query.get_sql()
2437 where_clause = where_clause and ' WHERE ' + where_clause
2438 limit_str = limit and ' limit %d' % limit or ''
2439 offset_str = offset and ' offset %d' % offset or ''
2440 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2442 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2445 for r in cr.dictfetchall():
2446 for fld, val in r.items():
2447 if val == None: r[fld] = False
2448 alldata[r['id']] = r
2451 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=orderby or groupby, context=context)
2452 # the IDS of records that have groupby field value = False or '' should be sorted too
2453 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2454 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2455 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2456 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2460 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2461 if not isinstance(groupby_list, (str, unicode)):
2462 if groupby or not context.get('group_by_no_leaf', False):
2463 d['__context'] = {'group_by': groupby_list[1:]}
2464 if groupby and groupby in fget:
2465 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2466 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2467 days = calendar.monthrange(dt.year, dt.month)[1]
2469 d[groupby] = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d').strftime('%B %Y')
2470 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2471 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2472 del alldata[d['id']][groupby]
2473 d.update(alldata[d['id']])
2477 def _inherits_join_add(self, current_table, parent_model_name, query):
2479 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2480 :param current_table: current model object
2481 :param parent_model_name: name of the parent model for which the clauses should be added
2482 :param query: query object on which the JOIN should be added
2484 inherits_field = current_table._inherits[parent_model_name]
2485 parent_model = self.pool.get(parent_model_name)
2486 parent_table_name = parent_model._table
2487 quoted_parent_table_name = '"%s"' % parent_table_name
2488 if quoted_parent_table_name not in query.tables:
2489 query.tables.append(quoted_parent_table_name)
2490 query.where_clause.append('(%s.%s = %s.id)' % (current_table._table, inherits_field, parent_table_name))
2494 def _inherits_join_calc(self, field, query):
2496 Adds missing table select and join clause(s) to ``query`` for reaching
2497 the field coming from an '_inherits' parent table (no duplicates).
2499 :param field: name of inherited field to reach
2500 :param query: query object on which the JOIN should be added
2501 :return: qualified name of field, to be used in SELECT clause
2503 current_table = self
2504 while field in current_table._inherit_fields and not field in current_table._columns:
2505 parent_model_name = current_table._inherit_fields[field][0]
2506 parent_table = self.pool.get(parent_model_name)
2507 self._inherits_join_add(current_table, parent_model_name, query)
2508 current_table = parent_table
2509 return '"%s".%s' % (current_table._table, field)
2511 def _parent_store_compute(self, cr):
2512 if not self._parent_store:
2514 logger = netsvc.Logger()
2515 logger.notifyChannel('data', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
2516 def browse_rec(root, pos=0):
2518 where = self._parent_name+'='+str(root)
2520 where = self._parent_name+' IS NULL'
2521 if self._parent_order:
2522 where += ' order by '+self._parent_order
2523 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2525 for id in cr.fetchall():
2526 pos2 = browse_rec(id[0], pos2)
2527 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2529 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2530 if self._parent_order:
2531 query += ' order by ' + self._parent_order
2534 for (root,) in cr.fetchall():
2535 pos = browse_rec(root, pos)
2538 def _update_store(self, cr, f, k):
2539 logger = netsvc.Logger()
2540 logger.notifyChannel('data', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
2541 ss = self._columns[k]._symbol_set
2542 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2543 cr.execute('select id from '+self._table)
2544 ids_lst = map(lambda x: x[0], cr.fetchall())
2547 ids_lst = ids_lst[40:]
2548 res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
2549 for key, val in res.items():
2552 # if val is a many2one, just write the ID
2553 if type(val) == tuple:
2555 if (val<>False) or (type(val)<>bool):
2556 cr.execute(update_query, (ss[1](val), key))
2558 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2559 """Raise except_orm if value is not among the valid values for the selection field"""
2560 if self._columns[field]._type == 'reference':
2561 val_model, val_id_str = value.split(',', 1)
2564 val_id = long(val_id_str)
2568 raise except_orm(_('ValidateError'),
2569 _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
2573 if isinstance(self._columns[field].selection, (tuple, list)):
2574 if val in dict(self._columns[field].selection):
2576 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2578 raise except_orm(_('ValidateError'),
2579 _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
2581 def _check_removed_columns(self, cr, log=False):
2582 # iterate on the database columns to drop the NOT NULL constraints
2583 # of fields which were required but have been removed (or will be added by another module)
2584 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2585 columns += MAGIC_COLUMNS
2586 cr.execute("SELECT a.attname, a.attnotnull"
2587 " FROM pg_class c, pg_attribute a"
2588 " WHERE c.relname=%s"
2589 " AND c.oid=a.attrelid"
2590 " AND a.attisdropped=%s"
2591 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2592 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2594 for column in cr.dictfetchall():
2596 self.__logger.debug("column %s is in the table %s but not in the corresponding object %s",
2597 column['attname'], self._table, self._name)
2598 if column['attnotnull']:
2599 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2600 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2601 self._table, column['attname'])
2603 # checked version: for direct m2o starting from `self`
2604 def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
2605 assert self.is_transient() or not dest_model.is_transient(), \
2606 'Many2One relationships from non-transient Model to TransientModel are forbidden'
2607 if self.is_transient() and not dest_model.is_transient():
2608 # TransientModel relationships to regular Models are annoying
2609 # usually because they could block deletion due to the FKs.
2610 # So unless stated otherwise we default them to ondelete=cascade.
2611 ondelete = ondelete or 'cascade'
2612 self._foreign_keys.append((self._table, source_field, dest_model._table, ondelete or 'set null'))
2613 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2614 self._table, source_field, dest_model._table, ondelete)
2616 # unchecked version: for custom cases, such as m2m relationships
2617 def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
2618 self._foreign_keys.append((source_table, source_field, dest_model._table, ondelete or 'set null'))
2619 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2620 source_table, source_field, dest_model._table, ondelete)
2622 def _auto_init(self, cr, context=None):
2625 Call _field_create and, unless _auto is False:
2627 - create the corresponding table in database for the model,
2628 - possibly add the parent columns in database,
2629 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2630 'write_date' in database if _log_access is True (the default),
2631 - report on database columns no more existing in _columns,
2632 - remove no more existing not null constraints,
2633 - alter existing database columns to match _columns,
2634 - create database tables to match _columns,
2635 - add database indices to match _columns,
2636 - save in self._foreign_keys a list a foreign keys to create (see
2640 self._foreign_keys = []
2641 raise_on_invalid_object_name(self._name)
2644 store_compute = False
2646 update_custom_fields = context.get('update_custom_fields', False)
2647 self._field_create(cr, context=context)
2648 create = not self._table_exist(cr)
2650 if getattr(self, '_auto', True):
2653 self._create_table(cr)
2656 if self._parent_store:
2657 if not self._parent_columns_exist(cr):
2658 self._create_parent_columns(cr)
2659 store_compute = True
2661 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2662 if self._log_access:
2663 self._add_log_columns(cr)
2665 self._check_removed_columns(cr, log=False)
2667 # iterate on the "object columns"
2668 column_data = self._select_column_data(cr)
2670 for k, f in self._columns.iteritems():
2671 if k in MAGIC_COLUMNS:
2673 # Don't update custom (also called manual) fields
2674 if f.manual and not update_custom_fields:
2677 if isinstance(f, fields.one2many):
2678 self._o2m_raise_on_missing_reference(cr, f)
2680 elif isinstance(f, fields.many2many):
2681 self._m2m_raise_or_create_relation(cr, f)
2684 res = column_data.get(k)
2686 # The field is not found as-is in database, try if it
2687 # exists with an old name.
2688 if not res and hasattr(f, 'oldname'):
2689 res = column_data.get(f.oldname)
2691 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
2693 column_data[k] = res
2694 self.__schema.debug("Table '%s': renamed column '%s' to '%s'",
2695 self._table, f.oldname, k)
2697 # The field already exists in database. Possibly
2698 # change its type, rename it, drop it or change its
2701 f_pg_type = res['typname']
2702 f_pg_size = res['size']
2703 f_pg_notnull = res['attnotnull']
2704 if isinstance(f, fields.function) and not f.store and\
2705 not getattr(f, 'nodrop', False):
2706 self.__logger.info('column %s (%s) in table %s removed: converted to a function !\n',
2707 k, f.string, self._table)
2708 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
2710 self.__schema.debug("Table '%s': dropped column '%s' with cascade",
2714 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
2719 ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
2720 ('varchar', 'text', 'TEXT', ''),
2721 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2722 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
2723 ('timestamp', 'date', 'date', '::date'),
2724 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2725 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2727 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
2728 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2729 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
2730 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
2731 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2733 self.__schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
2734 self._table, k, f_pg_size, f.size)
2736 if (f_pg_type==c[0]) and (f._type==c[1]):
2737 if f_pg_type != f_obj_type:
2739 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2740 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
2741 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
2742 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2744 self.__schema.debug("Table '%s': column '%s' changed type from %s to %s",
2745 self._table, k, c[0], c[1])
2748 if f_pg_type != f_obj_type:
2752 newname = k + '_moved' + str(i)
2753 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
2754 "WHERE c.relname=%s " \
2755 "AND a.attname=%s " \
2756 "AND c.oid=a.attrelid ", (self._table, newname))
2757 if not cr.fetchone()[0]:
2761 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2762 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
2763 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2764 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
2765 self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
2766 self._table, k, f_pg_type, f._type, newname)
2768 # if the field is required and hasn't got a NOT NULL constraint
2769 if f.required and f_pg_notnull == 0:
2770 # set the field to the default value if any
2771 if k in self._defaults:
2772 if callable(self._defaults[k]):
2773 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
2775 default = self._defaults[k]
2777 if (default is not None):
2778 ss = self._columns[k]._symbol_set
2779 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
2780 cr.execute(query, (ss[1](default),))
2781 # add the NOT NULL constraint
2784 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2786 self.__schema.debug("Table '%s': column '%s': added NOT NULL constraint",
2789 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
2790 "If you want to have it, you should update the records and execute manually:\n"\
2791 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2792 self.__schema.warn(msg, self._table, k, self._table, k)
2794 elif not f.required and f_pg_notnull == 1:
2795 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2797 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2800 indexname = '%s_%s_index' % (self._table, k)
2801 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
2802 res2 = cr.dictfetchall()
2803 if not res2 and f.select:
2804 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2806 if f._type == 'text':
2807 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
2808 msg = "Table '%s': Adding (b-tree) index for text column '%s'."\
2809 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
2810 " because there is a length limit for indexable btree values!\n"\
2811 "Use a search view instead if you simply want to make the field searchable."
2812 self.__schema.warn(msg, self._table, k, f._type)
2813 if res2 and not f.select:
2814 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
2816 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
2817 self.__schema.debug(msg, self._table, k, f._type)
2819 if isinstance(f, fields.many2one):
2820 dest_model = self.pool.get(f._obj)
2821 ref = dest_model._table
2822 if ref != 'ir_actions':
2823 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
2824 'pg_attribute as att1, pg_attribute as att2 '
2825 'WHERE con.conrelid = cl1.oid '
2826 'AND cl1.relname = %s '
2827 'AND con.confrelid = cl2.oid '
2828 'AND cl2.relname = %s '
2829 'AND array_lower(con.conkey, 1) = 1 '
2830 'AND con.conkey[1] = att1.attnum '
2831 'AND att1.attrelid = cl1.oid '
2832 'AND att1.attname = %s '
2833 'AND array_lower(con.confkey, 1) = 1 '
2834 'AND con.confkey[1] = att2.attnum '
2835 'AND att2.attrelid = cl2.oid '
2836 'AND att2.attname = %s '
2837 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
2838 res2 = cr.dictfetchall()
2840 if res2[0]['confdeltype'] != POSTGRES_CONFDELTYPES.get((f.ondelete or 'set null').upper(), 'a'):
2841 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"')
2842 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
2844 self.__schema.debug("Table '%s': column '%s': XXX",
2847 # The field doesn't exist in database. Create it if necessary.
2849 if not isinstance(f, fields.function) or f.store:
2850 # add the missing field
2851 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2852 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
2853 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2854 self._table, k, get_pg_type(f)[1])
2857 if not create and k in self._defaults:
2858 if callable(self._defaults[k]):
2859 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
2861 default = self._defaults[k]
2863 ss = self._columns[k]._symbol_set
2864 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
2865 cr.execute(query, (ss[1](default),))
2867 netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
2869 # remember the functions to call for the stored fields
2870 if isinstance(f, fields.function):
2872 if f.store is not True: # i.e. if f.store is a dict
2873 order = f.store[f.store.keys()[0]][2]
2874 todo_end.append((order, self._update_store, (f, k)))
2876 # and add constraints if needed
2877 if isinstance(f, fields.many2one):
2878 if not self.pool.get(f._obj):
2879 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2880 dest_model = self.pool.get(f._obj)
2881 ref = dest_model._table
2882 # ir_actions is inherited so foreign key doesn't work on it
2883 if ref != 'ir_actions':
2884 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
2886 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2890 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2891 self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
2894 msg = "WARNING: unable to set column %s of table %s not null !\n"\
2895 "Try to re-run: openerp-server --update=module\n"\
2896 "If it doesn't work, update records and execute manually:\n"\
2897 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2898 self.__logger.warn(msg, k, self._table, self._table, k)
2902 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2903 create = not bool(cr.fetchone())
2905 cr.commit() # start a new transaction
2907 self._add_sql_constraints(cr)
2910 self._execute_sql(cr)
2913 self._parent_store_compute(cr)
2919 def _auto_end(self, cr, context=None):
2920 """ Create the foreign keys recorded by _auto_init. """
2921 for t, k, r, d in self._foreign_keys:
2922 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
2924 del self._foreign_keys
2927 def _table_exist(self, cr):
2928 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2932 def _create_table(self, cr):
2933 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
2934 cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
2935 self.__schema.debug("Table '%s': created", self._table)
2938 def _parent_columns_exist(self, cr):
2939 cr.execute("""SELECT c.relname
2940 FROM pg_class c, pg_attribute a
2941 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2942 """, (self._table, 'parent_left'))
2946 def _create_parent_columns(self, cr):
2947 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
2948 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
2949 if 'parent_left' not in self._columns:
2950 self.__logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
2952 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2953 self._table, 'parent_left', 'INTEGER')
2954 elif not self._columns['parent_left'].select:
2955 self.__logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
2957 if 'parent_right' not in self._columns:
2958 self.__logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
2960 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2961 self._table, 'parent_right', 'INTEGER')
2962 elif not self._columns['parent_right'].select:
2963 self.__logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
2965 if self._columns[self._parent_name].ondelete != 'cascade':
2966 self.__logger.error("The column %s on object %s must be set as ondelete='cascade'",
2967 self._parent_name, self._name)
2972 def _add_log_columns(self, cr):
2973 for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
2976 FROM pg_class c, pg_attribute a
2977 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2978 """, (self._table, field))
2980 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
2982 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2983 self._table, field, field_def)
2986 def _select_column_data(self, cr):
2987 # attlen is the number of bytes necessary to represent the type when
2988 # the type has a fixed size. If the type has a varying size attlen is
2989 # -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
2990 # Thus the query can return a negative size for a unlimited varchar.
2991 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2992 "FROM pg_class c,pg_attribute a,pg_type t " \
2993 "WHERE c.relname=%s " \
2994 "AND c.oid=a.attrelid " \
2995 "AND a.atttypid=t.oid", (self._table,))
2996 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
2999 def _o2m_raise_on_missing_reference(self, cr, f):
3000 # TODO this check should be a method on fields.one2many.
3001 other = self.pool.get(f._obj)
3003 # TODO the condition could use fields_get_keys().
3004 if f._fields_id not in other._columns.keys():
3005 if f._fields_id not in other._inherit_fields.keys():
3006 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
3009 def _m2m_raise_or_create_relation(self, cr, f):
3010 m2m_tbl, col1, col2 = f._sql_names(self)
3011 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
3012 if not cr.dictfetchall():
3013 if not self.pool.get(f._obj):
3014 raise except_orm('Programming Error', ('Many2Many destination model does not exist: `%s`') % (f._obj,))
3015 dest_model = self.pool.get(f._obj)
3016 ref = dest_model._table
3017 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s")) WITH OIDS' % (m2m_tbl, col1, col2, col1, col2))
3019 # create foreign key references with ondelete=cascade, unless the targets are SQL views
3020 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
3021 if not cr.fetchall():
3022 self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
3023 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
3024 if not cr.fetchall():
3025 self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
3027 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
3028 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
3029 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
3031 self.__schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
3034 def _add_sql_constraints(self, cr):
3037 Modify this model's database table constraints so they match the one in
3041 for (key, con, _) in self._sql_constraints:
3042 conname = '%s_%s' % (self._table, key)
3044 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
3045 existing_constraints = cr.dictfetchall()
3050 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
3051 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
3052 self._table, conname, con),
3053 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3058 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3059 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3060 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3066 if not existing_constraints:
3067 # constraint does not exists:
3068 sql_actions['add']['execute'] = True
3069 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3070 elif con.lower() not in [item['condef'].lower() for item in existing_constraints]:
3071 # constraint exists but its definition has changed:
3072 sql_actions['drop']['execute'] = True
3073 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3074 sql_actions['add']['execute'] = True
3075 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3077 # we need to add the constraint:
3078 sql_actions = [item for item in sql_actions.values()]
3079 sql_actions.sort(key=lambda x: x['order'])
3080 for sql_action in [action for action in sql_actions if action['execute']]:
3082 cr.execute(sql_action['query'])
3084 self.__schema.debug(sql_action['msg_ok'])
3086 self.__schema.warn(sql_action['msg_err'])
3090 def _execute_sql(self, cr):
3091 """ Execute the SQL code from the _sql attribute (if any)."""
3092 if hasattr(self, "_sql"):
3093 for line in self._sql.split(';'):
3094 line2 = line.replace('\n', '').strip()
3100 # Update objects that uses this one to update their _inherits fields
3103 def _inherits_reload_src(self):
3104 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3105 for obj in self.pool.models.values():
3106 if self._name in obj._inherits:
3107 obj._inherits_reload()
3110 def _inherits_reload(self):
3111 """ Recompute the _inherit_fields mapping.
3113 This will also call itself on each inherits'd child model.
3117 for table in self._inherits:
3118 other = self.pool.get(table)
3119 for col in other._columns.keys():
3120 res[col] = (table, self._inherits[table], other._columns[col], table)
3121 for col in other._inherit_fields.keys():
3122 res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
3123 self._inherit_fields = res
3124 self._all_columns = self._get_column_infos()
3125 self._inherits_reload_src()
3128 def _get_column_infos(self):
3129 """Returns a dict mapping all fields names (direct fields and
3130 inherited field via _inherits) to a ``column_info`` struct
3131 giving detailed columns """
3133 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
3134 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
3135 for k, col in self._columns.iteritems():
3136 result[k] = fields.column_info(k, col)
3140 def _inherits_check(self):
3141 for table, field_name in self._inherits.items():
3142 if field_name not in self._columns:
3143 logging.getLogger('init').info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.' % (field_name, self._name))
3144 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3145 required=True, ondelete="cascade")
3146 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
3147 logging.getLogger('init').warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.' % (field_name, self._name))
3148 self._columns[field_name].required = True
3149 self._columns[field_name].ondelete = "cascade"
3151 #def __getattr__(self, name):
3153 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3154 # (though inherits doesn't use Python inheritance).
3155 # Handles translating between local ids and remote ids.
3156 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3157 # when you have inherits.
3159 # for model, field in self._inherits.iteritems():
3160 # proxy = self.pool.get(model)
3161 # if hasattr(proxy, name):
3162 # attribute = getattr(proxy, name)
3163 # if not hasattr(attribute, '__call__'):
3167 # return super(orm, self).__getattr__(name)
3169 # def _proxy(cr, uid, ids, *args, **kwargs):
3170 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3171 # lst = [obj[field].id for obj in objects if obj[field]]
3172 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3177 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
3178 """ Return the definition of each field.
3180 The returned value is a dictionary (indiced by field name) of
3181 dictionaries. The _inherits'd fields are included. The string, help,
3182 and selection (if present) attributes are translated.
3184 :param cr: database cursor
3185 :param user: current user id
3186 :param fields: list of fields
3187 :param context: context arguments, like lang, time zone
3188 :return: dictionary of field dictionaries, each one describing a field of the business object
3189 :raise AccessError: * if user has no create/write rights on the requested object
3195 write_access = self.check_write(cr, user, False) or \
3196 self.check_create(cr, user, False)
3200 translation_obj = self.pool.get('ir.translation')
3201 for parent in self._inherits:
3202 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
3204 for f, field in self._columns.iteritems():
3205 if allfields and f not in allfields:
3208 res[f] = fields.field_to_dict(self, cr, user, field, context=context)
3210 if not write_access:
3211 res[f]['readonly'] = True
3212 res[f]['states'] = {}
3214 if 'string' in res[f]:
3215 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
3217 res[f]['string'] = res_trans
3218 if 'help' in res[f]:
3219 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
3221 res[f]['help'] = help_trans
3222 if 'selection' in res[f]:
3223 if isinstance(field.selection, (tuple, list)):
3224 sel = field.selection
3226 for key, val in sel:
3229 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
3230 sel2.append((key, val2 or val))
3231 res[f]['selection'] = sel2
3235 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3236 """ Read records with given ids with the given fields
3238 :param cr: database cursor
3239 :param user: current user id
3240 :param ids: id or list of the ids of the records to read
3241 :param fields: optional list of field names to return (default: all fields would be returned)
3242 :type fields: list (example ['field_name_1', ...])
3243 :param context: optional context dictionary - it may contains keys for specifying certain options
3244 like ``context_lang``, ``context_tz`` to alter the results of the call.
3245 A special ``bin_size`` boolean flag may also be passed in the context to request the
3246 value of all fields.binary columns to be returned as the size of the binary instead of its
3247 contents. This can also be selectively overriden by passing a field-specific flag
3248 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
3249 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
3250 :return: list of dictionaries((dictionary per record asked)) with requested field values
3251 :rtype: [{‘name_of_the_field’: value, ...}, ...]
3252 :raise AccessError: * if user has no read rights on the requested object
3253 * if user tries to bypass access rules for read on the requested object
3259 self.check_read(cr, user)
3261 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
3262 if isinstance(ids, (int, long)):
3266 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3267 result = self._read_flat(cr, user, select, fields, context, load)
3270 for key, v in r.items():
3274 if isinstance(ids, (int, long, dict)):
3275 return result and result[0] or False
3278 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3283 if fields_to_read == None:
3284 fields_to_read = self._columns.keys()
3286 # Construct a clause for the security rules.
3287 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3288 # or will at least contain self._table.
3289 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3291 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3292 fields_pre = [f for f in fields_to_read if
3293 f == self.CONCURRENCY_CHECK_FIELD
3294 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3295 ] + self._inherits.values()
3299 def convert_field(f):
3300 f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
3301 if f in ('create_date', 'write_date'):
3302 return "date_trunc('second', %s) as %s" % (f_qual, f)
3303 if f == self.CONCURRENCY_CHECK_FIELD:
3304 if self._log_access:
3305 return "COALESCE(%s.write_date, %s.create_date, now())::timestamp AS %s" % (self._table, self._table, f,)
3306 return "now()::timestamp AS %s" % (f,)
3307 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3308 return 'length(%s) as "%s"' % (f_qual, f)
3311 fields_pre2 = map(convert_field, fields_pre)
3312 order_by = self._parent_order or self._order
3313 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
3314 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3316 query += " AND " + (' OR '.join(rule_clause))
3317 query += " ORDER BY " + order_by
3318 for sub_ids in cr.split_for_in_conditions(ids):
3320 cr.execute(query, [tuple(sub_ids)] + rule_params)
3321 if cr.rowcount != len(sub_ids):
3322 raise except_orm(_('AccessError'),
3323 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: read, Document type: %s).')
3324 % (self._description,))
3326 cr.execute(query, (tuple(sub_ids),))
3327 res.extend(cr.dictfetchall())
3329 res = map(lambda x: {'id': x}, ids)
3331 for f in fields_pre:
3332 if f == self.CONCURRENCY_CHECK_FIELD:
3334 if self._columns[f].translate:
3335 ids = [x['id'] for x in res]
3336 #TODO: optimize out of this loop
3337 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3339 r[f] = res_trans.get(r['id'], False) or r[f]
3341 for table in self._inherits:
3342 col = self._inherits[table]
3343 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3346 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3354 if not record[col]: # if the record is deleted from _inherits table?
3356 record.update(res3[record[col]])
3357 if col not in fields_to_read:
3360 # all fields which need to be post-processed by a simple function (symbol_get)
3361 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3364 for f in fields_post:
3365 r[f] = self._columns[f]._symbol_get(r[f])
3366 ids = [x['id'] for x in res]
3368 # all non inherited fields for which the attribute whose name is in load is False
3369 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3371 # Compute POST fields
3373 for f in fields_post:
3374 todo.setdefault(self._columns[f]._multi, [])
3375 todo[self._columns[f]._multi].append(f)
3376 for key, val in todo.items():
3378 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3379 assert res2 is not None, \
3380 'The function field "%s" on the "%s" model returned None\n' \
3381 '(a dictionary was expected).' % (val[0], self._name)
3384 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3385 multi_fields = res2.get(record['id'],{})
3387 record[pos] = multi_fields.get(pos,[])
3390 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3393 record[f] = res2[record['id']]
3398 for field in vals.copy():
3400 if field in self._columns:
3401 fobj = self._columns[field]
3408 for group in groups:
3409 module = group.split(".")[0]
3410 grp = group.split(".")[1]
3411 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3412 (grp, module, 'res.groups', user))
3413 readonly = cr.fetchall()
3414 if readonly[0][0] >= 1:
3417 elif readonly[0][0] == 0:
3423 if type(vals[field]) == type([]):
3425 elif type(vals[field]) == type(0.0):
3427 elif type(vals[field]) == type(''):
3428 vals[field] = '=No Permission='
3433 # TODO check READ access
3434 def perm_read(self, cr, user, ids, context=None, details=True):
3436 Returns some metadata about the given records.
3438 :param details: if True, \*_uid fields are replaced with the name of the user
3439 :return: list of ownership dictionaries for each requested record
3440 :rtype: list of dictionaries with the following keys:
3443 * create_uid: user who created the record
3444 * create_date: date when the record was created
3445 * write_uid: last user who changed the record
3446 * write_date: date of the last change to the record
3447 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3454 uniq = isinstance(ids, (int, long))
3458 if self._log_access:
3459 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3460 quoted_table = '"%s"' % self._table
3461 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3462 query = '''SELECT %s, __imd.module, __imd.name
3463 FROM %s LEFT JOIN ir_model_data __imd
3464 ON (__imd.model = %%s and __imd.res_id = %s.id)
3465 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3466 cr.execute(query, (self._name, tuple(ids)))
3467 res = cr.dictfetchall()
3470 r[key] = r[key] or False
3471 if details and key in ('write_uid', 'create_uid') and r[key]:
3473 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3475 pass # Leave the numeric uid there
3476 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3477 del r['name'], r['module']
3482 def _check_concurrency(self, cr, ids, context):
3485 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3487 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
3488 for sub_ids in cr.split_for_in_conditions(ids):
3491 id_ref = "%s,%s" % (self._name, id)
3492 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3494 ids_to_check.extend([id, update_date])
3495 if not ids_to_check:
3497 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3500 # mention the first one only to keep the error message readable
3501 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3503 def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
3504 """Verifies that the operation given by ``operation`` is allowed for the user
3505 according to the access rights."""
3506 return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
3508 def check_create(self, cr, uid, raise_exception=True):
3509 return self.check_access_rights(cr, uid, 'create', raise_exception)
3511 def check_read(self, cr, uid, raise_exception=True):
3512 return self.check_access_rights(cr, uid, 'read', raise_exception)
3514 def check_unlink(self, cr, uid, raise_exception=True):
3515 return self.check_access_rights(cr, uid, 'unlink', raise_exception)
3517 def check_write(self, cr, uid, raise_exception=True):
3518 return self.check_access_rights(cr, uid, 'write', raise_exception)
3520 def check_access_rule(self, cr, uid, ids, operation, context=None):
3521 """Verifies that the operation given by ``operation`` is allowed for the user
3522 according to ir.rules.
3524 :param operation: one of ``write``, ``unlink``
3525 :raise except_orm: * if current ir.rules do not permit this operation.
3526 :return: None if the operation is allowed
3528 if uid == SUPERUSER_ID:
3531 if self.is_transient():
3532 # Only one single implicit access rule for transient models: owner only!
3533 # This is ok to hardcode because we assert that TransientModels always
3534 # have log_access enabled and this the create_uid column is always there.
3535 # And even with _inherits, these fields are always present in the local
3536 # table too, so no need for JOINs.
3537 cr.execute("""SELECT distinct create_uid
3539 WHERE id IN %%s""" % self._table, (tuple(ids),))
3540 uids = [x[0] for x in cr.fetchall()]
3541 if len(uids) != 1 or uids[0] != uid:
3542 raise except_orm(_('AccessError'), '%s access is '
3543 'restricted to your own records for transient models '
3544 '(except for the super-user).' % operation.capitalize())
3546 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3548 where_clause = ' and ' + ' and '.join(where_clause)
3549 for sub_ids in cr.split_for_in_conditions(ids):
3550 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3551 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3552 [sub_ids] + where_params)
3553 if cr.rowcount != len(sub_ids):
3554 raise except_orm(_('AccessError'),
3555 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: %s, Document type: %s).')
3556 % (operation, self._description))
3558 def unlink(self, cr, uid, ids, context=None):
3560 Delete records with given ids
3562 :param cr: database cursor
3563 :param uid: current user id
3564 :param ids: id or list of ids
3565 :param context: (optional) context arguments, like lang, time zone
3567 :raise AccessError: * if user has no unlink rights on the requested object
3568 * if user tries to bypass access rules for unlink on the requested object
3569 :raise UserError: if the record is default property for other records
3574 if isinstance(ids, (int, long)):
3577 result_store = self._store_get_values(cr, uid, ids, None, context)
3579 self._check_concurrency(cr, ids, context)
3581 self.check_unlink(cr, uid)
3583 properties = self.pool.get('ir.property')
3584 domain = [('res_id', '=', False),
3585 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3587 if properties.search(cr, uid, domain, context=context):
3588 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3590 wf_service = netsvc.LocalService("workflow")
3592 wf_service.trg_delete(uid, self._name, oid, cr)
3595 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3596 pool_model_data = self.pool.get('ir.model.data')
3597 ir_values_obj = self.pool.get('ir.values')
3598 for sub_ids in cr.split_for_in_conditions(ids):
3599 cr.execute('delete from ' + self._table + ' ' \
3600 'where id IN %s', (sub_ids,))
3602 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3603 # as these are not connected with real database foreign keys, and would be dangling references.
3604 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
3605 # to avoid possible side-effects during admin calls.
3606 # Step 1. Calling unlink of ir_model_data only for the affected IDS
3607 reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
3608 # Step 2. Marching towards the real deletion of referenced records
3610 pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
3612 # For the same reason, removing the record relevant to ir_values
3613 ir_value_ids = ir_values_obj.search(cr, uid,
3614 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3617 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3619 for order, object, store_ids, fields in result_store:
3620 if object != self._name:
3621 obj = self.pool.get(object)
3622 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3623 rids = map(lambda x: x[0], cr.fetchall())
3625 obj._store_set_values(cr, uid, rids, fields, context)
3632 def write(self, cr, user, ids, vals, context=None):
3634 Update records with given ids with the given field values
3636 :param cr: database cursor
3637 :param user: current user id
3639 :param ids: object id or list of object ids to update according to **vals**
3640 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3641 :type vals: dictionary
3642 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3643 :type context: dictionary
3645 :raise AccessError: * if user has no write rights on the requested object
3646 * if user tries to bypass access rules for write on the requested object
3647 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3648 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3650 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3652 + For a many2many field, a list of tuples is expected.
3653 Here is the list of tuple that are accepted, with the corresponding semantics ::
3655 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3656 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3657 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3658 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3659 (4, ID) link to existing record with id = ID (adds a relationship)
3660 (5) unlink all (like using (3,ID) for all linked records)
3661 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3664 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3666 + For a one2many field, a lits of tuples is expected.
3667 Here is the list of tuple that are accepted, with the corresponding semantics ::
3669 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3670 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3671 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3674 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
3676 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
3677 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
3681 for field in vals.copy():
3683 if field in self._columns:
3684 fobj = self._columns[field]
3685 elif field in self._inherit_fields:
3686 fobj = self._inherit_fields[field][2]
3693 for group in groups:
3694 module = group.split(".")[0]
3695 grp = group.split(".")[1]
3696 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3697 (grp, module, 'res.groups', user))
3698 readonly = cr.fetchall()
3699 if readonly[0][0] >= 1:
3702 elif readonly[0][0] == 0:
3714 if isinstance(ids, (int, long)):
3717 self._check_concurrency(cr, ids, context)
3718 self.check_write(cr, user)
3720 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
3722 # No direct update of parent_left/right
3723 vals.pop('parent_left', None)
3724 vals.pop('parent_right', None)
3726 parents_changed = []
3727 parent_order = self._parent_order or self._order
3728 if self._parent_store and (self._parent_name in vals):
3729 # The parent_left/right computation may take up to
3730 # 5 seconds. No need to recompute the values if the
3731 # parent is the same.
3732 # Note: to respect parent_order, nodes must be processed in
3733 # order, so ``parents_changed`` must be ordered properly.
3734 parent_val = vals[self._parent_name]
3736 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
3737 (self._table, self._parent_name, self._parent_name, parent_order)
3738 cr.execute(query, (tuple(ids), parent_val))
3740 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
3741 (self._table, self._parent_name, parent_order)
3742 cr.execute(query, (tuple(ids),))
3743 parents_changed = map(operator.itemgetter(0), cr.fetchall())
3750 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
3752 if field in self._columns:
3753 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
3754 if (not totranslate) or not self._columns[field].translate:
3755 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
3756 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
3757 direct.append(field)
3759 upd_todo.append(field)
3761 updend.append(field)
3762 if field in self._columns \
3763 and hasattr(self._columns[field], 'selection') \
3765 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3767 if self._log_access:
3768 upd0.append('write_uid=%s')
3769 upd0.append('write_date=now()')
3773 self.check_access_rule(cr, user, ids, 'write', context=context)
3774 for sub_ids in cr.split_for_in_conditions(ids):
3775 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
3776 'where id IN %s', upd1 + [sub_ids])
3777 if cr.rowcount != len(sub_ids):
3778 raise except_orm(_('AccessError'),
3779 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
3784 if self._columns[f].translate:
3785 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
3788 # Inserting value to DB
3789 self.write(cr, user, ids, {f: vals[f]})
3790 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
3793 # call the 'set' method of fields which are not classic_write
3794 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3796 # default element in context must be removed when call a one2many or many2many
3797 rel_context = context.copy()
3798 for c in context.items():
3799 if c[0].startswith('default_'):
3800 del rel_context[c[0]]
3802 for field in upd_todo:
3804 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
3806 for table in self._inherits:
3807 col = self._inherits[table]
3809 for sub_ids in cr.split_for_in_conditions(ids):
3810 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
3811 'where id IN %s', (sub_ids,))
3812 nids.extend([x[0] for x in cr.fetchall()])
3816 if self._inherit_fields[val][0] == table:
3819 self.pool.get(table).write(cr, user, nids, v, context)
3821 self._validate(cr, user, ids, context)
3823 # TODO: use _order to set dest at the right position and not first node of parent
3824 # We can't defer parent_store computation because the stored function
3825 # fields that are computer may refer (directly or indirectly) to
3826 # parent_left/right (via a child_of domain)
3829 self.pool._init_parent[self._name] = True
3831 order = self._parent_order or self._order
3832 parent_val = vals[self._parent_name]
3834 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
3836 clause, params = '%s IS NULL' % (self._parent_name,), ()
3838 for id in parents_changed:
3839 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
3840 pleft, pright = cr.fetchone()
3841 distance = pright - pleft + 1
3843 # Positions of current siblings, to locate proper insertion point;
3844 # this can _not_ be fetched outside the loop, as it needs to be refreshed
3845 # after each update, in case several nodes are sequentially inserted one
3846 # next to the other (i.e computed incrementally)
3847 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
3848 parents = cr.fetchall()
3850 # Find Position of the element
3852 for (parent_pright, parent_id) in parents:
3855 position = parent_pright + 1
3857 # It's the first node of the parent
3862 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
3863 position = cr.fetchone()[0] + 1
3865 if pleft < position <= pright:
3866 raise except_orm(_('UserError'), _('Recursivity Detected.'))
3868 if pleft < position:
3869 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3870 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3871 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
3873 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3874 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3875 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
3877 result += self._store_get_values(cr, user, ids, vals.keys(), context)
3881 for order, object, ids_to_update, fields_to_recompute in result:
3882 key = (object, tuple(fields_to_recompute))
3883 done.setdefault(key, {})
3884 # avoid to do several times the same computation
3886 for id in ids_to_update:
3887 if id not in done[key]:
3888 done[key][id] = True
3890 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
3892 wf_service = netsvc.LocalService("workflow")
3894 wf_service.trg_write(user, self._name, id, cr)
3898 # TODO: Should set perm to user.xxx
3900 def create(self, cr, user, vals, context=None):
3902 Create a new record for the model.
3904 The values for the new record are initialized using the ``vals``
3905 argument, and if necessary the result of ``default_get()``.
3907 :param cr: database cursor
3908 :param user: current user id
3910 :param vals: field values for new record, e.g {'field_name': field_value, ...}
3911 :type vals: dictionary
3912 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3913 :type context: dictionary
3914 :return: id of new record created
3915 :raise AccessError: * if user has no create rights on the requested object
3916 * if user tries to bypass access rules for create on the requested object
3917 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3918 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3920 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
3921 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
3928 if self.is_transient():
3929 self._transient_vacuum(cr, user)
3931 self.check_create(cr, user)
3933 vals = self._add_missing_default_values(cr, user, vals, context)
3936 for v in self._inherits:
3937 if self._inherits[v] not in vals:
3940 tocreate[v] = {'id': vals[self._inherits[v]]}
3941 (upd0, upd1, upd2) = ('', '', [])
3943 for v in vals.keys():
3944 if v in self._inherit_fields:
3945 (table, col, col_detail, original_parent) = self._inherit_fields[v]
3946 tocreate[table][v] = vals[v]
3949 if (v not in self._inherit_fields) and (v not in self._columns):
3952 # Try-except added to filter the creation of those records whose filds are readonly.
3953 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
3955 cr.execute("SELECT nextval('"+self._sequence+"')")
3957 raise except_orm(_('UserError'),
3958 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
3960 id_new = cr.fetchone()[0]
3961 for table in tocreate:
3962 if self._inherits[table] in vals:
3963 del vals[self._inherits[table]]
3965 record_id = tocreate[table].pop('id', None)
3967 if record_id is None or not record_id:
3968 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
3970 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
3972 upd0 += ',' + self._inherits[table]
3974 upd2.append(record_id)
3976 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
3977 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
3979 for bool_field in bool_fields:
3980 if bool_field not in vals:
3981 vals[bool_field] = False
3983 for field in vals.copy():
3985 if field in self._columns:
3986 fobj = self._columns[field]
3988 fobj = self._inherit_fields[field][2]
3994 for group in groups:
3995 module = group.split(".")[0]
3996 grp = group.split(".")[1]
3997 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
3998 (grp, module, 'res.groups', user))
3999 readonly = cr.fetchall()
4000 if readonly[0][0] >= 1:
4003 elif readonly[0][0] == 0:
4011 if self._columns[field]._classic_write:
4012 upd0 = upd0 + ',"' + field + '"'
4013 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
4014 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
4016 if not isinstance(self._columns[field], fields.related):
4017 upd_todo.append(field)
4018 if field in self._columns \
4019 and hasattr(self._columns[field], 'selection') \
4021 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4022 if self._log_access:
4023 upd0 += ',create_uid,create_date'
4026 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
4027 self.check_access_rule(cr, user, [id_new], 'create', context=context)
4028 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4030 if self._parent_store and not context.get('defer_parent_store_computation'):
4032 self.pool._init_parent[self._name] = True
4034 parent = vals.get(self._parent_name, False)
4036 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
4038 result_p = cr.fetchall()
4039 for (pleft,) in result_p:
4044 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
4045 pleft_old = cr.fetchone()[0]
4048 cr.execute('select max(parent_right) from '+self._table)
4049 pleft = cr.fetchone()[0] or 0
4050 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
4051 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
4052 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4054 # default element in context must be remove when call a one2many or many2many
4055 rel_context = context.copy()
4056 for c in context.items():
4057 if c[0].startswith('default_'):
4058 del rel_context[c[0]]
4061 for field in upd_todo:
4062 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4063 self._validate(cr, user, [id_new], context)
4065 if not context.get('no_store_function', False):
4066 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
4069 for order, object, ids, fields2 in result:
4070 if not (object, ids, fields2) in done:
4071 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
4072 done.append((object, ids, fields2))
4074 if self._log_create and not (context and context.get('no_store_function', False)):
4075 message = self._description + \
4077 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4078 "' " + _("created.")
4079 self.log(cr, user, id_new, message, True, context=context)
4080 wf_service = netsvc.LocalService("workflow")
4081 wf_service.trg_create(user, self._name, id_new, cr)
4084 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
4085 """Fetch records as objects allowing to use dot notation to browse fields and relations
4087 :param cr: database cursor
4088 :param user: current user id
4089 :param select: id or list of ids.
4090 :param context: context arguments, like lang, time zone
4091 :rtype: object or list of objects requested
4094 self._list_class = list_class or browse_record_list
4096 # need to accepts ints and longs because ids coming from a method
4097 # launched by button in the interface have a type long...
4098 if isinstance(select, (int, long)):
4099 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
4100 elif isinstance(select, list):
4101 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
4103 return browse_null()
4105 def _store_get_values(self, cr, uid, ids, fields, context):
4106 """Returns an ordered list of fields.functions to call due to
4107 an update operation on ``fields`` of records with ``ids``,
4108 obtained by calling the 'store' functions of these fields,
4109 as setup by their 'store' attribute.
4111 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4113 if fields is None: fields = []
4114 stored_functions = self.pool._store_function.get(self._name, [])
4116 # use indexed names for the details of the stored_functions:
4117 model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
4119 # only keep functions that should be triggered for the ``fields``
4121 to_compute = [f for f in stored_functions \
4122 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
4125 for function in to_compute:
4126 # use admin user for accessing objects having rules defined on store fields
4127 target_ids = [id for id in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id]
4129 # the compound key must consider the priority and model name
4130 key = (function[priority_], function[model_name_])
4131 for target_id in target_ids:
4132 mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
4134 # Here mapping looks like:
4135 # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
4136 # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
4137 # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
4140 # Now we need to generate the batch function calls list
4142 # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
4144 for ((priority,model), id_map) in mapping.iteritems():
4145 functions_ids_maps = {}
4146 # function_ids_maps =
4147 # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
4148 for id, functions in id_map.iteritems():
4149 functions_ids_maps.setdefault(tuple(functions), []).append(id)
4150 for functions, ids in functions_ids_maps.iteritems():
4151 call_map.setdefault((priority,model),[]).append((priority, model, ids,
4152 [f[func_field_to_compute_] for f in functions]))
4153 ordered_keys = call_map.keys()
4157 result = reduce(operator.add, (call_map[k] for k in ordered_keys))
4160 def _store_set_values(self, cr, uid, ids, fields, context):
4161 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4162 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4167 if self._log_access:
4168 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4172 field_dict.setdefault(r[0], [])
4173 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4174 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4175 for i in self.pool._store_function.get(self._name, []):
4177 up_write_date = write_date + datetime.timedelta(hours=i[5])
4178 if datetime.datetime.now() < up_write_date:
4180 field_dict[r[0]].append(i[1])
4186 if self._columns[f]._multi not in keys:
4187 keys.append(self._columns[f]._multi)
4188 todo.setdefault(self._columns[f]._multi, [])
4189 todo[self._columns[f]._multi].append(f)
4193 # use admin user for accessing objects having rules defined on store fields
4194 result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
4195 for id, value in result.items():
4197 for f in value.keys():
4198 if f in field_dict[id]:
4205 if self._columns[v]._type in ('many2one', 'one2one'):
4207 value[v] = value[v][0]
4210 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4211 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4214 cr.execute('update "' + self._table + '" set ' + \
4215 ','.join(upd0) + ' where id = %s', upd1)
4219 # use admin user for accessing objects having rules defined on store fields
4220 result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
4221 for r in result.keys():
4223 if r in field_dict.keys():
4224 if f in field_dict[r]:
4226 for id, value in result.items():
4227 if self._columns[f]._type in ('many2one', 'one2one'):
4232 cr.execute('update "' + self._table + '" set ' + \
4233 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4239 def perm_write(self, cr, user, ids, fields, context=None):
4240 raise NotImplementedError(_('This method does not exist anymore'))
4242 # TODO: ameliorer avec NULL
4243 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4244 """Computes the WHERE clause needed to implement an OpenERP domain.
4245 :param domain: the domain to compute
4247 :param active_test: whether the default filtering of records with ``active``
4248 field set to ``False`` should be applied.
4249 :return: the query expressing the given domain as provided in domain
4250 :rtype: osv.query.Query
4255 # if the object has a field named 'active', filter out all inactive
4256 # records unless they were explicitely asked for
4257 if 'active' in self._columns and (active_test and context.get('active_test', True)):
4259 active_in_args = False
4261 if a[0] == 'active':
4262 active_in_args = True
4263 if not active_in_args:
4264 domain.insert(0, ('active', '=', 1))
4266 domain = [('active', '=', 1)]
4269 e = expression.expression(cr, user, domain, self, context)
4270 tables = e.get_tables()
4271 where_clause, where_params = e.to_sql()
4272 where_clause = where_clause and [where_clause] or []
4274 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4276 return Query(tables, where_clause, where_params)
4278 def _check_qorder(self, word):
4279 if not regex_order.match(word):
4280 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4283 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4284 """Add what's missing in ``query`` to implement all appropriate ir.rules
4285 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4287 :param query: the current query object
4289 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4291 if parent_model and child_object:
4292 # as inherited rules are being applied, we need to add the missing JOIN
4293 # to reach the parent table (if it was not JOINed yet in the query)
4294 child_object._inherits_join_add(child_object, parent_model, query)
4295 query.where_clause += added_clause
4296 query.where_clause_params += added_params
4297 for table in added_tables:
4298 if table not in query.tables:
4299 query.tables.append(table)
4303 # apply main rules on the object
4304 rule_obj = self.pool.get('ir.rule')
4305 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
4307 # apply ir.rules from the parents (through _inherits)
4308 for inherited_model in self._inherits:
4309 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
4310 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
4312 def _generate_m2o_order_by(self, order_field, query):
4314 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4315 either native m2o fields or function/related fields that are stored, including
4316 intermediate JOINs for inheritance if required.
4318 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4320 if order_field not in self._columns and order_field in self._inherit_fields:
4321 # also add missing joins for reaching the table containing the m2o field
4322 qualified_field = self._inherits_join_calc(order_field, query)
4323 order_field_column = self._inherit_fields[order_field][2]
4325 qualified_field = '"%s"."%s"' % (self._table, order_field)
4326 order_field_column = self._columns[order_field]
4328 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4329 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4330 logging.getLogger('orm.search').debug("Many2one function/related fields must be stored " \
4331 "to be used as ordering fields! Ignoring sorting for %s.%s",
4332 self._name, order_field)
4335 # figure out the applicable order_by for the m2o
4336 dest_model = self.pool.get(order_field_column._obj)
4337 m2o_order = dest_model._order
4338 if not regex_order.match(m2o_order):
4339 # _order is complex, can't use it here, so we default to _rec_name
4340 m2o_order = dest_model._rec_name
4342 # extract the field names, to be able to qualify them and add desc/asc
4344 for order_part in m2o_order.split(","):
4345 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
4346 m2o_order = m2o_order_list
4348 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4349 # as we don't want to exclude results that have NULL values for the m2o
4350 src_table, src_field = qualified_field.replace('"','').split('.', 1)
4351 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
4352 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
4353 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4356 def _generate_order_by(self, order_spec, query):
4358 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4359 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4361 :raise" except_orm in case order_spec is malformed
4363 order_by_clause = self._order
4365 order_by_elements = []
4366 self._check_qorder(order_spec)
4367 for order_part in order_spec.split(','):
4368 order_split = order_part.strip().split(' ')
4369 order_field = order_split[0].strip()
4370 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4372 if order_field == 'id':
4373 order_by_clause = '"%s"."%s"' % (self._table, order_field)
4374 elif order_field in self._columns:
4375 order_column = self._columns[order_field]
4376 if order_column._classic_read:
4377 inner_clause = '"%s"."%s"' % (self._table, order_field)
4378 elif order_column._type == 'many2one':
4379 inner_clause = self._generate_m2o_order_by(order_field, query)
4381 continue # ignore non-readable or "non-joinable" fields
4382 elif order_field in self._inherit_fields:
4383 parent_obj = self.pool.get(self._inherit_fields[order_field][3])
4384 order_column = parent_obj._columns[order_field]
4385 if order_column._classic_read:
4386 inner_clause = self._inherits_join_calc(order_field, query)
4387 elif order_column._type == 'many2one':
4388 inner_clause = self._generate_m2o_order_by(order_field, query)
4390 continue # ignore non-readable or "non-joinable" fields
4392 if isinstance(inner_clause, list):
4393 for clause in inner_clause:
4394 order_by_elements.append("%s %s" % (clause, order_direction))
4396 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4397 if order_by_elements:
4398 order_by_clause = ",".join(order_by_elements)
4400 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4402 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4404 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4405 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4406 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4407 This is ok at the security level because this method is private and not callable through XML-RPC.
4409 :param access_rights_uid: optional user ID to use when checking access rights
4410 (not for ir.rules, this is only for ir.model.access)
4414 self.check_read(cr, access_rights_uid or user)
4416 # For transient models, restrict acces to the current user, except for the super-user
4417 if self.is_transient() and self._log_access and user != SUPERUSER_ID:
4418 args = expression.AND(([('create_uid', '=', user)], args or []))
4420 query = self._where_calc(cr, user, args, context=context)
4421 self._apply_ir_rules(cr, user, query, 'read', context=context)
4422 order_by = self._generate_order_by(order, query)
4423 from_clause, where_clause, where_clause_params = query.get_sql()
4425 limit_str = limit and ' limit %d' % limit or ''
4426 offset_str = offset and ' offset %d' % offset or ''
4427 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4430 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4433 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4435 return [x[0] for x in res]
4437 # returns the different values ever entered for one field
4438 # this is used, for example, in the client when the user hits enter on
4440 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4443 if field in self._inherit_fields:
4444 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4446 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4448 def copy_data(self, cr, uid, id, default=None, context=None):
4450 Copy given record's data with all its fields values
4452 :param cr: database cursor
4453 :param user: current user id
4454 :param id: id of the record to copy
4455 :param default: field values to override in the original values of the copied record
4456 :type default: dictionary
4457 :param context: context arguments, like lang, time zone
4458 :type context: dictionary
4459 :return: dictionary containing all the field values
4465 # avoid recursion through already copied records in case of circular relationship
4466 seen_map = context.setdefault('__copy_data_seen',{})
4467 if id in seen_map.setdefault(self._name,[]):
4469 seen_map[self._name].append(id)
4473 if 'state' not in default:
4474 if 'state' in self._defaults:
4475 if callable(self._defaults['state']):
4476 default['state'] = self._defaults['state'](self, cr, uid, context)
4478 default['state'] = self._defaults['state']
4480 context_wo_lang = context.copy()
4481 if 'lang' in context:
4482 del context_wo_lang['lang']
4483 data = self.read(cr, uid, [id,], context=context_wo_lang)
4487 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4489 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4490 fields = self.fields_get(cr, uid, context=context)
4492 ftype = fields[f]['type']
4494 if self._log_access and f in LOG_ACCESS_COLUMNS:
4498 data[f] = default[f]
4499 elif 'function' in fields[f]:
4501 elif ftype == 'many2one':
4503 data[f] = data[f] and data[f][0]
4506 elif ftype in ('one2many', 'one2one'):
4508 rel = self.pool.get(fields[f]['relation'])
4510 # duplicate following the order of the ids
4511 # because we'll rely on it later for copying
4512 # translations in copy_translation()!
4514 for rel_id in data[f]:
4515 # the lines are first duplicated using the wrong (old)
4516 # parent but then are reassigned to the correct one thanks
4517 # to the (0, 0, ...)
4518 d = rel.copy_data(cr, uid, rel_id, context=context)
4520 res.append((0, 0, d))
4522 elif ftype == 'many2many':
4523 data[f] = [(6, 0, data[f])]
4527 # make sure we don't break the current parent_store structure and
4528 # force a clean recompute!
4529 for parent_column in ['parent_left', 'parent_right']:
4530 data.pop(parent_column, None)
4531 # Remove _inherits field's from data recursively, missing parents will
4532 # be created by create() (so that copy() copy everything).
4533 def remove_ids(inherits_dict):
4534 for parent_table in inherits_dict:
4535 del data[inherits_dict[parent_table]]
4536 remove_ids(self.pool.get(parent_table)._inherits)
4537 remove_ids(self._inherits)
4540 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4544 # avoid recursion through already copied records in case of circular relationship
4545 seen_map = context.setdefault('__copy_translations_seen',{})
4546 if old_id in seen_map.setdefault(self._name,[]):
4548 seen_map[self._name].append(old_id)
4550 trans_obj = self.pool.get('ir.translation')
4551 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4552 fields = self.fields_get(cr, uid, context=context)
4554 translation_records = []
4555 for field_name, field_def in fields.items():
4556 # we must recursively copy the translations for o2o and o2m
4557 if field_def['type'] in ('one2one', 'one2many'):
4558 target_obj = self.pool.get(field_def['relation'])
4559 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4560 # here we rely on the order of the ids to match the translations
4561 # as foreseen in copy_data()
4562 old_children = sorted(old_record[field_name])
4563 new_children = sorted(new_record[field_name])
4564 for (old_child, new_child) in zip(old_children, new_children):
4565 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4566 # and for translatable fields we keep them for copy
4567 elif field_def.get('translate'):
4569 if field_name in self._columns:
4570 trans_name = self._name + "," + field_name
4571 elif field_name in self._inherit_fields:
4572 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4574 trans_ids = trans_obj.search(cr, uid, [
4575 ('name', '=', trans_name),
4576 ('res_id', '=', old_id)
4578 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4580 for record in translation_records:
4582 record['res_id'] = new_id
4583 trans_obj.create(cr, uid, record, context=context)
4586 def copy(self, cr, uid, id, default=None, context=None):
4588 Duplicate record with given id updating it with default values
4590 :param cr: database cursor
4591 :param uid: current user id
4592 :param id: id of the record to copy
4593 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4594 :type default: dictionary
4595 :param context: context arguments, like lang, time zone
4596 :type context: dictionary
4602 context = context.copy()
4603 data = self.copy_data(cr, uid, id, default, context)
4604 new_id = self.create(cr, uid, data, context)
4605 self.copy_translations(cr, uid, id, new_id, context)
4608 def exists(self, cr, uid, ids, context=None):
4609 """Checks whether the given id or ids exist in this model,
4610 and return the list of ids that do. This is simple to use for
4611 a truth test on a browse_record::
4616 :param ids: id or list of ids to check for existence
4617 :type ids: int or [int]
4618 :return: the list of ids that currently exist, out of
4621 if type(ids) in (int, long):
4623 query = 'SELECT id FROM "%s"' % (self._table)
4624 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4625 return [x[0] for x in cr.fetchall()]
4627 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4628 warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4629 self._name, DeprecationWarning, stacklevel=3)
4630 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4631 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4632 return self._check_recursion(cr, uid, ids, context, parent)
4634 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4636 Verifies that there is no loop in a hierarchical structure of records,
4637 by following the parent relationship using the **parent** field until a loop
4638 is detected or until a top-level record is found.
4640 :param cr: database cursor
4641 :param uid: current user id
4642 :param ids: list of ids of records to check
4643 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4644 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4648 parent = self._parent_name
4650 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
4653 for i in range(0, len(ids), cr.IN_MAX):
4654 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
4655 cr.execute(query, (tuple(sub_ids_parent),))
4656 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
4657 ids_parent = ids_parent2
4658 for i in ids_parent:
4663 def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
4664 """Retrieve the External ID(s) of any database record.
4666 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
4668 :return: map of ids to the list of their fully qualified External IDs
4669 in the form ``module.key``, or an empty list when there's no External
4670 ID for a record, e.g.::
4672 { 'id': ['module.ext_id', 'module.ext_id_bis'],
4675 ir_model_data = self.pool.get('ir.model.data')
4676 data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
4677 data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
4680 # can't use dict.fromkeys() as the list would be shared!
4682 for record in data_results:
4683 result[record['res_id']].append('%(module)s.%(name)s' % record)
4686 def get_external_id(self, cr, uid, ids, *args, **kwargs):
4687 """Retrieve the External ID of any database record, if there
4688 is one. This method works as a possible implementation
4689 for a function field, to be able to add it to any
4690 model object easily, referencing it as ``Model.get_external_id``.
4692 When multiple External IDs exist for a record, only one
4693 of them is returned (randomly).
4695 :return: map of ids to their fully qualified XML ID,
4696 defaulting to an empty string when there's none
4697 (to be usable as a function field),
4700 { 'id': 'module.ext_id',
4703 results = self._get_xml_ids(cr, uid, ids)
4704 for k, v in results.iteritems():
4711 # backwards compatibility
4712 get_xml_id = get_external_id
4713 _get_xml_ids = _get_external_ids
4716 def is_transient(self):
4717 """ Return whether the model is transient.
4722 return self._transient
4724 def _transient_clean_rows_older_than(self, cr, seconds):
4725 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
4726 cr.execute("SELECT id FROM " + self._table + " WHERE"
4727 " COALESCE(write_date, create_date, now())::timestamp <"
4728 " (now() - interval %s)", ("%s seconds" % seconds,))
4729 ids = [x[0] for x in cr.fetchall()]
4730 self.unlink(cr, SUPERUSER_ID, ids)
4732 def _transient_clean_old_rows(self, cr, count):
4733 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
4735 "SELECT id, COALESCE(write_date, create_date, now())::timestamp"
4736 " AS t FROM " + self._table +
4737 " ORDER BY t LIMIT %s", (count,))
4738 ids = [x[0] for x in cr.fetchall()]
4739 self.unlink(cr, SUPERUSER_ID, ids)
4741 def _transient_vacuum(self, cr, uid, force=False):
4742 """Clean the transient records.
4744 This unlinks old records from the transient model tables whenever the
4745 "_transient_max_count" or "_max_age" conditions (if any) are reached.
4746 Actual cleaning will happen only once every "_transient_check_time" calls.
4747 This means this method can be called frequently called (e.g. whenever
4748 a new record is created).
4750 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
4751 self._transient_check_count += 1
4752 if (not force) and (self._transient_check_count % self._transient_check_time):
4753 self._transient_check_count = 0
4756 # Age-based expiration
4757 if self._transient_max_hours:
4758 self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
4760 # Count-based expiration
4761 if self._transient_max_count:
4762 self._transient_clean_old_rows(cr, self._transient_max_count)
4766 def resolve_o2m_commands_to_record_dicts(self, cr, uid, field_name, o2m_commands, fields=None, context=None):
4767 """ Serializes o2m commands into record dictionaries (as if
4768 all the o2m records came from the database via a read()), and
4769 returns an iterable over these dictionaries.
4771 Because o2m commands might be creation commands, not all
4772 record ids will contain an ``id`` field. Commands matching an
4773 existing record (``UPDATE`` and ``LINK_TO``) will have an id.
4775 .. note:: ``CREATE``, ``UPDATE`` and ``LINK_TO`` stand for the
4776 o2m command codes ``0``, ``1`` and ``4``
4779 :param field_name: name of the o2m field matching the commands
4780 :type field_name: str
4781 :param o2m_commands: one2many commands to execute on ``field_name``
4782 :type o2m_commands: list((int|False, int|False, dict|False))
4783 :param fields: list of fields to read from the database, when applicable
4784 :type fields: list(str)
4785 :raises AssertionError: if a command is not ``CREATE``, ``UPDATE`` or ``LINK_TO``
4786 :returns: o2m records in a shape similar to that returned by
4787 ``read()`` (except records may be missing the ``id``
4788 field if they don't exist in db)
4789 :rtype: ``list(dict)``
4791 o2m_model = self._all_columns[field_name].column._obj
4793 # convert single ids and pairs to tripled commands
4795 for o2m_command in o2m_commands:
4796 if not isinstance(o2m_command, (list, tuple)):
4798 commands.append((command, o2m_command, False))
4799 elif len(o2m_command) == 1:
4800 (command,) = o2m_command
4801 commands.append((command, False, False))
4802 elif len(o2m_command) == 2:
4803 command, id = o2m_command
4804 commands.append((command, id, False))
4806 command = o2m_command[0]
4807 commands.append(o2m_command)
4808 assert command in (0, 1, 4), \
4809 "Only CREATE, UPDATE and LINK_TO commands are supported in resolver"
4811 # extract records to read, by id, in a mapping dict
4812 ids_to_read = [id for (command, id, _) in commands if command in (1, 4)]
4813 records_by_id = dict(
4814 (record['id'], record)
4815 for record in self.pool.get(o2m_model).read(
4816 cr, uid, ids_to_read, fields=fields, context=context))
4819 # merge record from db with record provided by command
4820 for command, id, record in commands:
4822 if command in (1, 4): item.update(records_by_id[id])
4823 if command in (0, 1): item.update(record)
4824 record_dicts.append(item)
4827 # keep this import here, at top it will cause dependency cycle errors
4830 class Model(BaseModel):
4831 """Main super-class for regular database-persisted OpenERP models.
4833 OpenERP models are created by inheriting from this class::
4838 The system will later instantiate the class once per database (on
4839 which the class' module is installed).
4841 _register = False # not visible in ORM registry, meant to be python-inherited only
4842 _transient = False # True in a TransientModel
4844 class TransientModel(BaseModel):
4845 """Model super-class for transient records, meant to be temporarily
4846 persisted, and regularly vaccuum-cleaned.
4848 A TransientModel has a simplified access rights management,
4849 all users can create new records, and may only access the
4850 records they created. The super-user has unrestricted access
4851 to all TransientModel records.
4853 _register = False # not visible in ORM registry, meant to be python-inherited only
4856 class AbstractModel(BaseModel):
4857 """Abstract Model super-class for creating an abstract class meant to be
4858 inherited by regular models (Models or TransientModels) but not meant to
4859 be usable on its own, or persisted.
4861 Technical note: we don't want to make AbstractModel the super-class of
4862 Model or BaseModel because it would not make sense to put the main
4863 definition of persistence methods such as create() in it, and still we
4864 should be able to override them within an AbstractModel.
4866 _auto = False # don't create any database backend for AbstractModels
4867 _register = False # not visible in ORM registry, meant to be python-inherited only
4870 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: