1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
22 #.apidoc title: Object Relational Mapping
23 #.apidoc module-mods: member-order: bysource
26 Object relational mapping to database (postgresql) module
27 * Hierarchical structure
28 * Constraints consistency, validations
29 * Object meta Data depends on its status
30 * Optimised processing by complex query (multiple actions at once)
31 * Default fields value
32 * Permissions optimisation
33 * Persistant object: DB postgresql
35 * Multi-level caching system
36 * 2 different inheritancies
38 - classicals (varchar, integer, boolean, ...)
39 - relations (one2many, many2one, many2many)
57 from lxml import etree
61 import openerp.netsvc as netsvc
62 import openerp.tools as tools
63 from openerp.tools.config import config
64 from openerp.tools.safe_eval import safe_eval as eval
65 from openerp.tools.translate import _
66 from openerp import SUPERUSER_ID
67 from query import Query
69 # List of etree._Element subclasses that we choose to ignore when parsing XML.
70 from openerp.tools import SKIPPED_ELEMENT_TYPES
72 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
73 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
75 def transfer_field_to_modifiers(field, modifiers):
78 for attr in ('invisible', 'readonly', 'required'):
79 state_exceptions[attr] = []
80 default_values[attr] = bool(field.get(attr))
81 for state, modifs in (field.get("states",{})).items():
83 if default_values[modif[0]] != modif[1]:
84 state_exceptions[modif[0]].append(state)
86 for attr, default_value in default_values.items():
87 if state_exceptions[attr]:
88 modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
90 modifiers[attr] = default_value
93 # Don't deal with groups, it is done by check_group().
94 # Need the context to evaluate the invisible attribute on tree views.
95 # For non-tree views, the context shouldn't be given.
96 def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
98 modifiers.update(eval(node.get('attrs')))
100 if node.get('states'):
101 if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
102 # TODO combine with AND or OR, use implicit AND for now.
103 modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
105 modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
107 for a in ('invisible', 'readonly', 'required'):
109 v = bool(eval(node.get(a), {'context': context or {}}))
110 if in_tree_view and a == 'invisible':
111 # Invisible in a tree view has a specific meaning, make it a
112 # new key in the modifiers attribute.
113 modifiers['tree_invisible'] = v
114 elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
115 # Don't set the attribute to False if a dynamic value was
116 # provided (i.e. a domain from attrs or states).
120 def simplify_modifiers(modifiers):
121 for a in ('invisible', 'readonly', 'required'):
122 if a in modifiers and not modifiers[a]:
126 def transfer_modifiers_to_node(modifiers, node):
128 simplify_modifiers(modifiers)
129 node.set('modifiers', simplejson.dumps(modifiers))
131 def setup_modifiers(node, field=None, context=None, in_tree_view=False):
132 """ Processes node attributes and field descriptors to generate
133 the ``modifiers`` node attribute and set it on the provided node.
135 Alters its first argument in-place.
137 :param node: ``field`` node from an OpenERP view
138 :type node: lxml.etree._Element
139 :param dict field: field descriptor corresponding to the provided node
140 :param dict context: execution context used to evaluate node attributes
141 :param bool in_tree_view: triggers the ``tree_invisible`` code
142 path (separate from ``invisible``): in
143 tree view there are two levels of
144 invisibility, cell content (a column is
145 present but the cell itself is not
146 displayed) with ``invisible`` and column
147 invisibility (the whole column is
148 hidden) with ``tree_invisible``.
152 if field is not None:
153 transfer_field_to_modifiers(field, modifiers)
154 transfer_node_to_modifiers(
155 node, modifiers, context=context, in_tree_view=in_tree_view)
156 transfer_modifiers_to_node(modifiers, node)
158 def test_modifiers(what, expected):
160 if isinstance(what, basestring):
161 node = etree.fromstring(what)
162 transfer_node_to_modifiers(node, modifiers)
163 simplify_modifiers(modifiers)
164 json = simplejson.dumps(modifiers)
165 assert json == expected, "%s != %s" % (json, expected)
166 elif isinstance(what, dict):
167 transfer_field_to_modifiers(what, modifiers)
168 simplify_modifiers(modifiers)
169 json = simplejson.dumps(modifiers)
170 assert json == expected, "%s != %s" % (json, expected)
175 # openerp.osv.orm.modifiers_tests()
176 def modifiers_tests():
177 test_modifiers('<field name="a"/>', '{}')
178 test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
179 test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
180 test_modifiers('<field name="a" required="1"/>', '{"required": true}')
181 test_modifiers('<field name="a" invisible="0"/>', '{}')
182 test_modifiers('<field name="a" readonly="0"/>', '{}')
183 test_modifiers('<field name="a" required="0"/>', '{}')
184 test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
185 test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
186 test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
187 test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
189 # The dictionary is supposed to be the result of fields_get().
190 test_modifiers({}, '{}')
191 test_modifiers({"invisible": True}, '{"invisible": true}')
192 test_modifiers({"invisible": False}, '{}')
195 def check_object_name(name):
196 """ Check if the given name is a valid openerp object name.
198 The _name attribute in osv and osv_memory object is subject to
199 some restrictions. This function returns True or False whether
200 the given name is allowed or not.
202 TODO: this is an approximation. The goal in this approximation
203 is to disallow uppercase characters (in some places, we quote
204 table/column names and in other not, which leads to this kind
207 psycopg2.ProgrammingError: relation "xxx" does not exist).
209 The same restriction should apply to both osv and osv_memory
210 objects for consistency.
213 if regex_object_name.match(name) is None:
217 def raise_on_invalid_object_name(name):
218 if not check_object_name(name):
219 msg = "The _name attribute %s is not valid." % name
220 logger = netsvc.Logger()
221 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
222 raise except_orm('ValueError', msg)
224 POSTGRES_CONFDELTYPES = {
232 def intersect(la, lb):
233 return filter(lambda x: x in lb, la)
235 def fix_import_export_id_paths(fieldname):
237 Fixes the id fields in import and exports, and splits field paths
240 :param str fieldname: name of the field to import/export
241 :return: split field name
244 fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
245 fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
246 return fixed_external_id.split('/')
248 class except_orm(Exception):
249 def __init__(self, name, value):
252 self.args = (name, value)
254 class BrowseRecordError(Exception):
257 class browse_null(object):
258 """ Readonly python database object browser
264 def __getitem__(self, name):
267 def __getattr__(self, name):
268 return None # XXX: return self ?
276 def __nonzero__(self):
279 def __unicode__(self):
284 # TODO: execute an object method on browse_record_list
286 class browse_record_list(list):
287 """ Collection of browse objects
289 Such an instance will be returned when doing a ``browse([ids..])``
290 and will be iterable, yielding browse() objects
293 def __init__(self, lst, context=None):
296 super(browse_record_list, self).__init__(lst)
297 self.context = context
300 class browse_record(object):
301 """ An object that behaves like a row of an object's table.
302 It has attributes after the columns of the corresponding object.
306 uobj = pool.get('res.users')
307 user_rec = uobj.browse(cr, uid, 104)
310 logger = netsvc.Logger()
312 def __init__(self, cr, uid, id, table, cache, context=None, list_class=None, fields_process=None):
314 @param cache a dictionary of model->field->data to be shared accross browse
315 objects, thus reducing the SQL read()s . It can speed up things a lot,
316 but also be disastrous if not discarded after write()/unlink() operations
317 @param table the object (inherited from orm)
318 @param context dictionary with an optional context
320 if fields_process is None:
324 self._list_class = list_class or browse_record_list
328 self._table = table # deprecated, use _model!
330 self._table_name = self._table._name
331 self.__logger = logging.getLogger(
332 'osv.browse_record.' + self._table_name)
333 self._context = context
334 self._fields_process = fields_process
336 cache.setdefault(table._name, {})
337 self._data = cache[table._name]
339 if not (id and isinstance(id, (int, long,))):
340 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
341 # if not table.exists(cr, uid, id, context):
342 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
344 if id not in self._data:
345 self._data[id] = {'id': id}
349 def __getitem__(self, name):
353 if name not in self._data[self._id]:
354 # build the list of fields we will fetch
356 # fetch the definition of the field which was asked for
357 if name in self._table._columns:
358 col = self._table._columns[name]
359 elif name in self._table._inherit_fields:
360 col = self._table._inherit_fields[name][2]
361 elif hasattr(self._table, str(name)):
362 attr = getattr(self._table, name)
363 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
364 def function_proxy(*args, **kwargs):
365 if 'context' not in kwargs and self._context:
366 kwargs.update(context=self._context)
367 return attr(self._cr, self._uid, [self._id], *args, **kwargs)
368 return function_proxy
372 error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
373 self.logger.notifyChannel("browse_record", netsvc.LOG_WARNING, error_msg)
374 raise KeyError(error_msg)
376 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
378 # gen the list of "local" (ie not inherited) fields which are classic or many2one
379 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
380 # gen the list of inherited fields
381 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
382 # complete the field list with the inherited fields which are classic or many2one
383 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
384 # otherwise we fetch only that field
386 fields_to_fetch = [(name, col)]
387 ids = filter(lambda id: name not in self._data[id], self._data.keys())
389 field_names = map(lambda x: x[0], fields_to_fetch)
390 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
392 # TODO: improve this, very slow for reports
393 if self._fields_process:
394 lang = self._context.get('lang', 'en_US') or 'en_US'
395 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
397 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
398 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
400 for field_name, field_column in fields_to_fetch:
401 if field_column._type in self._fields_process:
402 for result_line in field_values:
403 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
404 if result_line[field_name]:
405 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
408 # Where did those ids come from? Perhaps old entries in ir_model_dat?
409 self.__logger.warn("No field_values found for ids %s in %s", ids, self)
410 raise KeyError('Field %s not found in %s'%(name, self))
411 # create browse records for 'remote' objects
412 for result_line in field_values:
414 for field_name, field_column in fields_to_fetch:
415 if field_column._type in ('many2one', 'one2one'):
416 if result_line[field_name]:
417 obj = self._table.pool.get(field_column._obj)
418 if isinstance(result_line[field_name], (list, tuple)):
419 value = result_line[field_name][0]
421 value = result_line[field_name]
423 # FIXME: this happen when a _inherits object
424 # overwrite a field of it parent. Need
425 # testing to be sure we got the right
426 # object and not the parent one.
427 if not isinstance(value, browse_record):
429 # In some cases the target model is not available yet, so we must ignore it,
430 # which is safe in most cases, this value will just be loaded later when needed.
431 # This situation can be caused by custom fields that connect objects with m2o without
432 # respecting module dependencies, causing relationships to be connected to soon when
433 # the target is not loaded yet.
435 new_data[field_name] = browse_record(self._cr,
436 self._uid, value, obj, self._cache,
437 context=self._context,
438 list_class=self._list_class,
439 fields_process=self._fields_process)
441 new_data[field_name] = value
443 new_data[field_name] = browse_null()
445 new_data[field_name] = browse_null()
446 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
447 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
448 elif field_column._type in ('reference'):
449 if result_line[field_name]:
450 if isinstance(result_line[field_name], browse_record):
451 new_data[field_name] = result_line[field_name]
453 ref_obj, ref_id = result_line[field_name].split(',')
454 ref_id = long(ref_id)
456 obj = self._table.pool.get(ref_obj)
457 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
459 new_data[field_name] = browse_null()
461 new_data[field_name] = browse_null()
463 new_data[field_name] = result_line[field_name]
464 self._data[result_line['id']].update(new_data)
466 if not name in self._data[self._id]:
467 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
468 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
469 "Fields to fetch: %s, Field values: %s"%(field_names, field_values))
470 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
471 "Cached: %s, Table: %s"%(self._data[self._id], self._table))
472 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
473 return self._data[self._id][name]
475 def __getattr__(self, name):
479 raise AttributeError(e)
481 def __contains__(self, name):
482 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
485 raise NotImplementedError("Iteration is not allowed on %s" % self)
487 def __hasattr__(self, name):
494 return "browse_record(%s, %d)" % (self._table_name, self._id)
496 def __eq__(self, other):
497 if not isinstance(other, browse_record):
499 return (self._table_name, self._id) == (other._table_name, other._id)
501 def __ne__(self, other):
502 if not isinstance(other, browse_record):
504 return (self._table_name, self._id) != (other._table_name, other._id)
506 # we need to define __unicode__ even though we've already defined __str__
507 # because we have overridden __getattr__
508 def __unicode__(self):
509 return unicode(str(self))
512 return hash((self._table_name, self._id))
517 """Force refreshing this browse_record's data and all the data of the
518 records that belong to the same cache, by emptying the cache completely,
519 preserving only the record identifiers (for prefetching optimizations).
521 for model, model_cache in self._cache.iteritems():
522 # only preserve the ids of the records that were in the cache
523 cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
524 self._cache[model].clear()
525 self._cache[model].update(cached_ids)
527 def pg_varchar(size=0):
528 """ Returns the VARCHAR declaration for the provided size:
530 * If no size (or an empty or negative size is provided) return an
532 * Otherwise return a VARCHAR(n)
534 :type int size: varchar size, optional
538 if not isinstance(size, int):
539 raise TypeError("VARCHAR parameter should be an int, got %s"
542 return 'VARCHAR(%d)' % size
545 FIELDS_TO_PGTYPES = {
546 fields.boolean: 'bool',
547 fields.integer: 'int4',
548 fields.integer_big: 'int8',
552 fields.datetime: 'timestamp',
553 fields.binary: 'bytea',
554 fields.many2one: 'int4',
557 def get_pg_type(f, type_override=None):
559 :param fields._column f: field to get a Postgres type for
560 :param type type_override: use the provided type for dispatching instead of the field's own type
561 :returns: (postgres_identification_type, postgres_type_specification)
564 field_type = type_override or type(f)
566 if field_type in FIELDS_TO_PGTYPES:
567 pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
568 elif issubclass(field_type, fields.float):
570 pg_type = ('numeric', 'NUMERIC')
572 pg_type = ('float8', 'DOUBLE PRECISION')
573 elif issubclass(field_type, (fields.char, fields.reference)):
574 pg_type = ('varchar', pg_varchar(f.size))
575 elif issubclass(field_type, fields.selection):
576 if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
577 or getattr(f, 'size', None) == -1:
578 pg_type = ('int4', 'INTEGER')
580 pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
581 elif issubclass(field_type, fields.function):
582 if f._type == 'selection':
583 pg_type = ('varchar', pg_varchar())
585 pg_type = get_pg_type(f, getattr(fields, f._type))
587 logging.getLogger('orm').warn('%s type not supported!', field_type)
593 class MetaModel(type):
594 """ Metaclass for the Model.
596 This class is used as the metaclass for the Model class to discover
597 the models defined in a module (i.e. without instanciating them).
598 If the automatic discovery is not needed, it is possible to set the
599 model's _register attribute to False.
603 module_to_models = {}
605 def __init__(self, name, bases, attrs):
606 if not self._register:
607 self._register = True
608 super(MetaModel, self).__init__(name, bases, attrs)
611 module_name = self.__module__.split('.')[0]
612 if not hasattr(self, '_module'):
613 self._module = module_name
615 # Remember which models to instanciate for this module.
616 self.module_to_models.setdefault(self._module, []).append(self)
619 # Definition of log access columns, automatically added to models if
620 # self._log_access is True
621 LOG_ACCESS_COLUMNS = {
622 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
623 'create_date': 'TIMESTAMP',
624 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
625 'write_date': 'TIMESTAMP'
627 # special columns automatically created by the ORM
628 MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
630 class BaseModel(object):
631 """ Base class for OpenERP models.
633 OpenERP models are created by inheriting from this class' subclasses:
635 * Model: for regular database-persisted models
636 * TransientModel: for temporary data, stored in the database but automatically
637 vaccuumed every so often
638 * AbstractModel: for abstract super classes meant to be shared by multiple
639 _inheriting classes (usually Models or TransientModels)
641 The system will later instantiate the class once per database (on
642 which the class' module is installed).
644 To create a class that should not be instantiated, the _register class attribute
647 __metaclass__ = MetaModel
648 _register = False # Set to false if the model shouldn't be automatically discovered.
654 _parent_name = 'parent_id'
655 _parent_store = False
656 _parent_order = False
662 # dict of {field:method}, with method returning the name_get of records
663 # to include in the _read_group, if grouped on this field
667 _transient = False # True in a TransientModel
668 _transient_max_count = None
669 _transient_max_hours = None
670 _transient_check_time = 20
673 # { 'parent_model': 'm2o_field', ... }
676 # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
677 # model from which it is inherits'd, r is the (local) field towards m, f
678 # is the _column object itself, and n is the original (i.e. top-most)
681 # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
682 # field_column_obj, origina_parent_model), ... }
685 # Mapping field name/column_info object
686 # This is similar to _inherit_fields but:
687 # 1. includes self fields,
688 # 2. uses column_info instead of a triple.
694 _sql_constraints = []
695 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
696 __logger = logging.getLogger('orm')
697 __schema = logging.getLogger('orm.schema')
699 CONCURRENCY_CHECK_FIELD = '__last_update'
701 def log(self, cr, uid, id, message, secondary=False, context=None):
702 if context and context.get('disable_log'):
704 return self.pool.get('res.log').create(cr, uid,
707 'res_model': self._name,
708 'secondary': secondary,
714 def view_init(self, cr, uid, fields_list, context=None):
715 """Override this method to do specific things when a view on the object is opened."""
718 def _field_create(self, cr, context=None):
719 """ Create entries in ir_model_fields for all the model's fields.
721 If necessary, also create an entry in ir_model, and if called from the
722 modules loading scheme (by receiving 'module' in the context), also
723 create entries in ir_model_data (for the model and the fields).
725 - create an entry in ir_model (if there is not already one),
726 - create an entry in ir_model_data (if there is not already one, and if
727 'module' is in the context),
728 - update ir_model_fields with the fields found in _columns
729 (TODO there is some redundancy as _columns is updated from
730 ir_model_fields in __init__).
735 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
737 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
738 model_id = cr.fetchone()[0]
739 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
741 model_id = cr.fetchone()[0]
742 if 'module' in context:
743 name_id = 'model_'+self._name.replace('.', '_')
744 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
746 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
747 (name_id, context['module'], 'ir.model', model_id)
752 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
754 for rec in cr.dictfetchall():
755 cols[rec['name']] = rec
757 for (k, f) in self._columns.items():
759 'model_id': model_id,
762 'field_description': f.string.replace("'", " "),
764 'relation': f._obj or '',
765 'view_load': (f.view_load and 1) or 0,
766 'select_level': tools.ustr(f.select or 0),
767 'readonly': (f.readonly and 1) or 0,
768 'required': (f.required and 1) or 0,
769 'selectable': (f.selectable and 1) or 0,
770 'translate': (f.translate and 1) or 0,
771 'relation_field': (f._type=='one2many' and isinstance(f, fields.one2many)) and f._fields_id or '',
773 # When its a custom field,it does not contain f.select
774 if context.get('field_state', 'base') == 'manual':
775 if context.get('field_name', '') == k:
776 vals['select_level'] = context.get('select', '0')
777 #setting value to let the problem NOT occur next time
779 vals['select_level'] = cols[k]['select_level']
782 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
783 id = cr.fetchone()[0]
785 cr.execute("""INSERT INTO ir_model_fields (
786 id, model_id, model, name, field_description, ttype,
787 relation,view_load,state,select_level,relation_field, translate
789 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
791 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
792 vals['relation'], bool(vals['view_load']), 'base',
793 vals['select_level'], vals['relation_field'], bool(vals['translate'])
795 if 'module' in context:
796 name1 = 'field_' + self._table + '_' + k
797 cr.execute("select name from ir_model_data where name=%s", (name1,))
799 name1 = name1 + "_" + str(id)
800 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
801 (name1, context['module'], 'ir.model.fields', id)
804 for key, val in vals.items():
805 if cols[k][key] != vals[key]:
806 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
808 cr.execute("""UPDATE ir_model_fields SET
809 model_id=%s, field_description=%s, ttype=%s, relation=%s,
810 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s
812 model=%s AND name=%s""", (
813 vals['model_id'], vals['field_description'], vals['ttype'],
814 vals['relation'], bool(vals['view_load']),
815 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['model'], vals['name']
821 # Goal: try to apply inheritance at the instanciation level and
822 # put objects in the pool var
825 def create_instance(cls, pool, cr):
826 """ Instanciate a given model.
828 This class method instanciates the class of some model (i.e. a class
829 deriving from osv or osv_memory). The class might be the class passed
830 in argument or, if it inherits from another class, a class constructed
831 by combining the two classes.
833 The ``attributes`` argument specifies which parent class attributes
836 TODO: the creation of the combined class is repeated at each call of
837 this method. This is probably unnecessary.
840 attributes = ['_columns', '_defaults', '_inherits', '_constraints',
843 parent_names = getattr(cls, '_inherit', None)
845 if isinstance(parent_names, (str, unicode)):
846 name = cls._name or parent_names
847 parent_names = [parent_names]
852 raise TypeError('_name is mandatory in case of multiple inheritance')
854 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
855 parent_model = pool.get(parent_name)
856 if not getattr(cls, '_original_module', None) and name == parent_model._name:
857 cls._original_module = parent_model._original_module
859 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
860 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
861 parent_class = parent_model.__class__
864 new = copy.copy(getattr(parent_model, s, {}))
866 # Don't _inherit custom fields.
870 if hasattr(new, 'update'):
871 new.update(cls.__dict__.get(s, {}))
872 elif s=='_constraints':
873 for c in cls.__dict__.get(s, []):
875 for c2 in range(len(new)):
876 #For _constraints, we should check field and methods as well
877 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
878 or getattr(new[c2][0],'__name__', True) == \
879 getattr(c[0],'__name__', False)):
880 # If new class defines a constraint with
881 # same function name, we let it override
889 new.extend(cls.__dict__.get(s, []))
891 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
892 if not getattr(cls, '_original_module', None):
893 cls._original_module = cls._module
894 obj = object.__new__(cls)
895 obj.__init__(pool, cr)
899 """Register this model.
901 This doesn't create an instance but simply register the model
902 as being part of the module where it is defined.
907 # Set the module name (e.g. base, sale, accounting, ...) on the class.
908 module = cls.__module__.split('.')[0]
909 if not hasattr(cls, '_module'):
912 # Record this class in the list of models to instantiate for this module,
913 # managed by the metaclass.
914 module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
915 if cls not in module_model_list:
916 module_model_list.append(cls)
918 # Since we don't return an instance here, the __init__
919 # method won't be called.
922 def __init__(self, pool, cr):
923 """ Initialize a model and make it part of the given registry.
925 - copy the stored fields' functions in the osv_pool,
926 - update the _columns with the fields found in ir_model_fields,
927 - ensure there is a many2one for each _inherits'd parent,
928 - update the children's _columns,
929 - give a chance to each field to initialize itself.
932 pool.add(self._name, self)
935 if not self._name and not hasattr(self, '_inherit'):
936 name = type(self).__name__.split('.')[0]
937 msg = "The class %s has to have a _name attribute" % name
939 logger = netsvc.Logger()
940 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
941 raise except_orm('ValueError', msg)
943 if not self._description:
944 self._description = self._name
946 self._table = self._name.replace('.', '_')
948 if not hasattr(self, '_log_access'):
949 # If _log_access is not specified, it is the same value as _auto.
950 self._log_access = getattr(self, "_auto", True)
952 self._columns = self._columns.copy()
953 for store_field in self._columns:
954 f = self._columns[store_field]
955 if hasattr(f, 'digits_change'):
957 def not_this_field(stored_func):
958 x, y, z, e, f, l = stored_func
959 return x != self._name or y != store_field
960 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
961 if not isinstance(f, fields.function):
967 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
968 for object, aa in sm.items():
970 (fnct, fields2, order, length) = aa
972 (fnct, fields2, order) = aa
975 raise except_orm('Error',
976 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
977 self.pool._store_function.setdefault(object, [])
978 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
979 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
981 for (key, _, msg) in self._sql_constraints:
982 self.pool._sql_error[self._table+'_'+key] = msg
986 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
988 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
989 for field in cr.dictfetchall():
990 if field['name'] in self._columns:
993 'string': field['field_description'],
994 'required': bool(field['required']),
995 'readonly': bool(field['readonly']),
996 'domain': eval(field['domain']) if field['domain'] else None,
997 'size': field['size'],
998 'ondelete': field['on_delete'],
999 'translate': (field['translate']),
1001 #'select': int(field['select_level'])
1004 if field['ttype'] == 'selection':
1005 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
1006 elif field['ttype'] == 'reference':
1007 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
1008 elif field['ttype'] == 'many2one':
1009 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
1010 elif field['ttype'] == 'one2many':
1011 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
1012 elif field['ttype'] == 'many2many':
1013 _rel1 = field['relation'].replace('.', '_')
1014 _rel2 = field['model'].replace('.', '_')
1015 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
1016 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1018 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1019 self._inherits_check()
1020 self._inherits_reload()
1021 if not self._sequence:
1022 self._sequence = self._table + '_id_seq'
1023 for k in self._defaults:
1024 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1025 for f in self._columns:
1026 self._columns[f].restart()
1029 if self.is_transient():
1030 self._transient_check_count = 0
1031 self._transient_max_count = config.get('osv_memory_count_limit')
1032 self._transient_max_hours = config.get('osv_memory_age_limit')
1033 assert self._log_access, "TransientModels must have log_access turned on, "\
1034 "in order to implement their access rights policy"
1036 def __export_row(self, cr, uid, row, fields, context=None):
1040 def check_type(field_type):
1041 if field_type == 'float':
1043 elif field_type == 'integer':
1045 elif field_type == 'boolean':
1049 def selection_field(in_field):
1050 col_obj = self.pool.get(in_field.keys()[0])
1051 if f[i] in col_obj._columns.keys():
1052 return col_obj._columns[f[i]]
1053 elif f[i] in col_obj._inherits.keys():
1054 selection_field(col_obj._inherits)
1059 data = map(lambda x: '', range(len(fields)))
1061 for fpos in range(len(fields)):
1070 model_data = self.pool.get('ir.model.data')
1071 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
1073 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
1075 r = '%s.%s' % (d['module'], d['name'])
1081 n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
1082 if not model_data.search(cr, uid, [('name', '=', n)]):
1085 model_data.create(cr, uid, {
1087 'model': self._name,
1089 'module': '__export__',
1094 # To display external name of selection field when its exported
1096 if f[i] in self._columns.keys():
1097 cols = self._columns[f[i]]
1098 elif f[i] in self._inherit_fields.keys():
1099 cols = selection_field(self._inherits)
1100 if cols and cols._type == 'selection':
1101 sel_list = cols.selection
1102 if r and type(sel_list) == type([]):
1103 r = [x[1] for x in sel_list if r==x[0]]
1104 r = r and r[0] or False
1106 if f[i] in self._columns:
1107 r = check_type(self._columns[f[i]]._type)
1108 elif f[i] in self._inherit_fields:
1109 r = check_type(self._inherit_fields[f[i]][2]._type)
1110 data[fpos] = r or False
1112 if isinstance(r, (browse_record_list, list)):
1114 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
1117 if [x for x in fields2 if x]:
1119 done.append(fields2)
1121 lines2 = self.__export_row(cr, uid, row2, fields2,
1124 for fpos2 in range(len(fields)):
1125 if lines2 and lines2[0][fpos2]:
1126 data[fpos2] = lines2[0][fpos2]
1130 name_relation = self.pool.get(rr._table_name)._rec_name
1131 if isinstance(rr[name_relation], browse_record):
1132 rr = rr[name_relation]
1133 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
1134 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
1135 dt += tools.ustr(rr_name or '') + ','
1136 data[fpos] = dt[:-1]
1145 if isinstance(r, browse_record):
1146 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
1147 r = r and r[0] and r[0][1] or ''
1148 data[fpos] = tools.ustr(r or '')
1149 return [data] + lines
1151 def export_data(self, cr, uid, ids, fields_to_export, context=None):
1153 Export fields for selected objects
1155 :param cr: database cursor
1156 :param uid: current user id
1157 :param ids: list of ids
1158 :param fields_to_export: list of fields
1159 :param context: context arguments, like lang, time zone
1160 :rtype: dictionary with a *datas* matrix
1162 This method is used when exporting data via client menu
1167 cols = self._columns.copy()
1168 for f in self._inherit_fields:
1169 cols.update({f: self._inherit_fields[f][2]})
1170 fields_to_export = map(fix_import_export_id_paths, fields_to_export)
1172 for row in self.browse(cr, uid, ids, context):
1173 datas += self.__export_row(cr, uid, row, fields_to_export, context)
1174 return {'datas': datas}
1176 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
1177 """Import given data in given module
1179 This method is used when importing data via client menu.
1181 Example of fields to import for a sale.order::
1184 partner_id, (=name_search)
1185 order_line/.id, (=database_id)
1187 order_line/product_id/id, (=xml id)
1188 order_line/price_unit,
1189 order_line/product_uom_qty,
1190 order_line/product_uom/id (=xml_id)
1192 This method returns a 4-tuple with the following structure::
1194 (return_code, errored_resource, error_message, unused)
1196 * The first item is a return code, it is ``-1`` in case of
1197 import error, or the last imported row number in case of success
1198 * The second item contains the record data dict that failed to import
1199 in case of error, otherwise it's 0
1200 * The third item contains an error message string in case of error,
1202 * The last item is currently unused, with no specific semantics
1204 :param fields: list of fields to import
1205 :param data: data to import
1206 :param mode: 'init' or 'update' for record creation
1207 :param current_module: module name
1208 :param noupdate: flag for record creation
1209 :param filename: optional file to store partial import state for recovery
1210 :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
1211 :rtype: (int, dict or 0, str or 0, str or 0)
1215 fields = map(fix_import_export_id_paths, fields)
1216 logger = netsvc.Logger()
1217 ir_model_data_obj = self.pool.get('ir.model.data')
1219 # mode: id (XML id) or .id (database id) or False for name_get
1220 def _get_id(model_name, id, current_module=False, mode='id'):
1223 obj_model = self.pool.get(model_name)
1224 ids = obj_model.search(cr, uid, [('id', '=', int(id))])
1226 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, id))
1229 module, xml_id = id.rsplit('.', 1)
1231 module, xml_id = current_module, id
1232 record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
1233 ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id'])
1234 if not ir_model_data:
1235 raise ValueError('No references to %s.%s' % (module, xml_id))
1236 id = ir_model_data[0]['res_id']
1238 obj_model = self.pool.get(model_name)
1239 ids = obj_model.name_search(cr, uid, id, operator='=', context=context)
1241 raise ValueError('No record found for %s' % (id,))
1246 # datas: a list of records, each record is defined by a list of values
1247 # prefix: a list of prefix fields ['line_ids']
1248 # position: the line to process, skip is False if it's the first line of the current record
1250 # (res, position, warning, res_id) with
1251 # res: the record for the next line to process (including it's one2many)
1252 # position: the new position for the next line
1253 # res_id: the ID of the record if it's a modification
1254 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0, skip=0):
1255 line = datas[position]
1263 for i, field in enumerate(fields):
1266 raise Exception(_('Please check that all your lines have %d columns.'
1267 'Stopped around line %d having %d columns.') % \
1268 (len(fields), position+2, len(line)))
1272 if field[:len(prefix)] <> prefix:
1273 if line[i] and skip:
1276 field_name = field[len(prefix)]
1278 #set the mode for m2o, o2m, m2m : xml_id/id/name
1279 if len(field) == len(prefix)+1:
1282 mode = field[len(prefix)+1]
1284 # TODO: improve this by using csv.csv_reader
1285 def many_ids(line, relation, current_module, mode):
1287 for db_id in line.split(config.get('csv_internal_sep')):
1288 res.append(_get_id(relation, db_id, current_module, mode))
1291 # ID of the record using a XML ID
1292 if field_name == 'id':
1294 data_res_id = _get_id(model_name, line[i], current_module, 'id')
1300 # ID of the record using a database ID
1301 elif field_name == '.id':
1302 data_res_id = _get_id(model_name, line[i], current_module, '.id')
1305 field_type = fields_def[field_name]['type']
1306 # recursive call for getting children and returning [(0,0,{})] or [(1,ID,{})]
1307 if field_type == 'one2many':
1308 if field_name in done:
1310 done[field_name] = True
1311 relation = fields_def[field_name]['relation']
1312 relation_obj = self.pool.get(relation)
1313 newfd = relation_obj.fields_get( cr, uid, context=context )
1316 res = many_ids(line[i], relation, current_module, mode)
1319 while pos < len(datas):
1320 res2 = process_liness(self, datas, prefix + [field_name], current_module, relation_obj._name, newfd, pos, first)
1323 (newrow, pos, w2, data_res_id2, xml_id2) = res2
1324 nbrmax = max(nbrmax, pos)
1329 res.append((4, data_res_id2))
1331 if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
1334 res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
1336 elif field_type == 'many2one':
1337 relation = fields_def[field_name]['relation']
1338 res = _get_id(relation, line[i], current_module, mode)
1340 elif field_type == 'many2many':
1341 relation = fields_def[field_name]['relation']
1342 res = many_ids(line[i], relation, current_module, mode)
1344 elif field_type == 'integer':
1345 res = line[i] and int(line[i]) or 0
1346 elif field_type == 'boolean':
1347 res = line[i].lower() not in ('0', 'false', 'off')
1348 elif field_type == 'float':
1349 res = line[i] and float(line[i]) or 0.0
1350 elif field_type == 'selection':
1351 for key, val in fields_def[field_name]['selection']:
1352 if tools.ustr(line[i]) in [tools.ustr(key), tools.ustr(val)]:
1355 if line[i] and not res:
1356 logging.getLogger('orm.import').warn(
1357 _("key '%s' not found in selection field '%s'"),
1358 tools.ustr(line[i]), tools.ustr(field_name))
1359 warning.append(_("Key/value '%s' not found in selection field '%s'") % (
1360 tools.ustr(line[i]), tools.ustr(field_name)))
1365 row[field_name] = res or False
1367 result = (row, nbrmax, warning, data_res_id, xml_id)
1370 fields_def = self.fields_get(cr, uid, context=context)
1372 if config.get('import_partial', False) and filename:
1373 data = pickle.load(file(config.get('import_partial')))
1376 while position<len(datas):
1379 (res, position, warning, res_id, xml_id) = \
1380 process_liness(self, datas, [], current_module, self._name, fields_def, position=position)
1383 return (-1, res, 'Line ' + str(position) +' : ' + '!\n'.join(warning), '')
1386 ir_model_data_obj._update(cr, uid, self._name,
1387 current_module, res, mode=mode, xml_id=xml_id,
1388 noupdate=noupdate, res_id=res_id, context=context)
1389 except Exception, e:
1390 return (-1, res, 'Line ' + str(position) + ' : ' + tools.ustr(e), '')
1392 if config.get('import_partial', False) and filename and (not (position%100)):
1393 data = pickle.load(file(config.get('import_partial')))
1394 data[filename] = position
1395 pickle.dump(data, file(config.get('import_partial'), 'wb'))
1396 if context.get('defer_parent_store_computation'):
1397 self._parent_store_compute(cr)
1400 if context.get('defer_parent_store_computation'):
1401 self._parent_store_compute(cr)
1402 return (position, 0, 0, 0)
1404 def get_invalid_fields(self, cr, uid):
1405 return list(self._invalids)
1407 def _validate(self, cr, uid, ids, context=None):
1408 context = context or {}
1409 lng = context.get('lang', False) or 'en_US'
1410 trans = self.pool.get('ir.translation')
1412 for constraint in self._constraints:
1413 fun, msg, fields = constraint
1414 if not fun(self, cr, uid, ids):
1415 # Check presence of __call__ directly instead of using
1416 # callable() because it will be deprecated as of Python 3.0
1417 if hasattr(msg, '__call__'):
1418 tmp_msg = msg(self, cr, uid, ids, context=context)
1419 if isinstance(tmp_msg, tuple):
1420 tmp_msg, params = tmp_msg
1421 translated_msg = tmp_msg % params
1423 translated_msg = tmp_msg
1425 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg) or msg
1427 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1429 self._invalids.update(fields)
1432 raise except_orm('ValidateError', '\n'.join(error_msgs))
1434 self._invalids.clear()
1436 def default_get(self, cr, uid, fields_list, context=None):
1438 Returns default values for the fields in fields_list.
1440 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1441 :type fields_list: list
1442 :param context: optional context dictionary - it may contains keys for specifying certain options
1443 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1444 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1445 or override a default value for a field.
1446 A special ``bin_size`` boolean flag may also be passed in the context to request the
1447 value of all fields.binary columns to be returned as the size of the binary instead of its
1448 contents. This can also be selectively overriden by passing a field-specific flag
1449 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1450 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1451 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1453 # trigger view init hook
1454 self.view_init(cr, uid, fields_list, context)
1460 # get the default values for the inherited fields
1461 for t in self._inherits.keys():
1462 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1465 # get the default values defined in the object
1466 for f in fields_list:
1467 if f in self._defaults:
1468 if callable(self._defaults[f]):
1469 defaults[f] = self._defaults[f](self, cr, uid, context)
1471 defaults[f] = self._defaults[f]
1473 fld_def = ((f in self._columns) and self._columns[f]) \
1474 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1477 if isinstance(fld_def, fields.property):
1478 property_obj = self.pool.get('ir.property')
1479 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1481 if isinstance(prop_value, (browse_record, browse_null)):
1482 defaults[f] = prop_value.id
1484 defaults[f] = prop_value
1486 if f not in defaults:
1489 # get the default values set by the user and override the default
1490 # values defined in the object
1491 ir_values_obj = self.pool.get('ir.values')
1492 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1493 for id, field, field_value in res:
1494 if field in fields_list:
1495 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1496 if fld_def._type in ('many2one', 'one2one'):
1497 obj = self.pool.get(fld_def._obj)
1498 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1500 if fld_def._type in ('many2many'):
1501 obj = self.pool.get(fld_def._obj)
1503 for i in range(len(field_value)):
1504 if not obj.search(cr, uid, [('id', '=',
1507 field_value2.append(field_value[i])
1508 field_value = field_value2
1509 if fld_def._type in ('one2many'):
1510 obj = self.pool.get(fld_def._obj)
1512 for i in range(len(field_value)):
1513 field_value2.append({})
1514 for field2 in field_value[i]:
1515 if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'):
1516 obj2 = self.pool.get(obj._columns[field2]._obj)
1517 if not obj2.search(cr, uid,
1518 [('id', '=', field_value[i][field2])]):
1520 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type in ('many2one', 'one2one'):
1521 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1522 if not obj2.search(cr, uid,
1523 [('id', '=', field_value[i][field2])]):
1525 # TODO add test for many2many and one2many
1526 field_value2[i][field2] = field_value[i][field2]
1527 field_value = field_value2
1528 defaults[field] = field_value
1530 # get the default values from the context
1531 for key in context or {}:
1532 if key.startswith('default_') and (key[8:] in fields_list):
1533 defaults[key[8:]] = context[key]
1536 def fields_get_keys(self, cr, user, context=None):
1537 res = self._columns.keys()
1538 # TODO I believe this loop can be replace by
1539 # res.extend(self._inherit_fields.key())
1540 for parent in self._inherits:
1541 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1545 # Overload this method if you need a window title which depends on the context
1547 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1550 def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
1551 """ Return the description of the fields in the node.
1553 In a normal call to this method, node is a complete view architecture
1554 but it is actually possible to give some sub-node (this is used so
1555 that the method can call itself recursively).
1557 Originally, the field descriptions are drawn from the node itself.
1558 But there is now some code calling fields_get() in order to merge some
1559 of those information in the architecture.
1571 if isinstance(s, unicode):
1572 return s.encode('utf8')
1575 def check_group(node):
1576 """ Set invisible to true if the user is not in the specified groups. """
1577 if node.get('groups'):
1578 groups = node.get('groups').split(',')
1579 ir_model_access = self.pool.get('ir.model.access')
1580 can_see = any(ir_model_access.check_groups(cr, user, group) for group in groups)
1582 node.set('invisible', '1')
1583 modifiers['invisible'] = True
1584 if 'attrs' in node.attrib:
1585 del(node.attrib['attrs']) #avoid making field visible later
1586 del(node.attrib['groups'])
1588 if node.tag in ('field', 'node', 'arrow'):
1589 if node.get('object'):
1594 if f.tag in ('field'):
1595 xml += etree.tostring(f, encoding="utf-8")
1597 new_xml = etree.fromstring(encode(xml))
1598 ctx = context.copy()
1599 ctx['base_model_name'] = self._name
1600 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1605 attrs = {'views': views}
1607 if node.get('name'):
1610 if node.get('name') in self._columns:
1611 column = self._columns[node.get('name')]
1613 column = self._inherit_fields[node.get('name')][2]
1618 relation = self.pool.get(column._obj)
1623 if f.tag in ('form', 'tree', 'graph'):
1625 ctx = context.copy()
1626 ctx['base_model_name'] = self._name
1627 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1628 views[str(f.tag)] = {
1632 attrs = {'views': views}
1633 if node.get('widget') and node.get('widget') == 'selection':
1634 # Prepare the cached selection list for the client. This needs to be
1635 # done even when the field is invisible to the current user, because
1636 # other events could need to change its value to any of the selectable ones
1637 # (such as on_change events, refreshes, etc.)
1639 # If domain and context are strings, we keep them for client-side, otherwise
1640 # we evaluate them server-side to consider them when generating the list of
1642 # TODO: find a way to remove this hack, by allow dynamic domains
1644 if column._domain and not isinstance(column._domain, basestring):
1645 dom = column._domain
1646 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1647 search_context = dict(context)
1648 if column._context and not isinstance(column._context, basestring):
1649 search_context.update(column._context)
1650 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1651 if (node.get('required') and not int(node.get('required'))) or not column.required:
1652 attrs['selection'].append((False, ''))
1653 fields[node.get('name')] = attrs
1655 field = model_fields.get(node.get('name'))
1657 transfer_field_to_modifiers(field, modifiers)
1660 elif node.tag in ('form', 'tree'):
1661 result = self.view_header_get(cr, user, False, node.tag, context)
1663 node.set('string', result)
1664 in_tree_view = node.tag == 'tree'
1666 elif node.tag == 'calendar':
1667 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1668 if node.get(additional_field):
1669 fields[node.get(additional_field)] = {}
1673 # The view architeture overrides the python model.
1674 # Get the attrs before they are (possibly) deleted by check_group below
1675 transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
1677 # TODO remove attrs couterpart in modifiers when invisible is true ?
1680 if 'lang' in context:
1681 if node.get('string') and not result:
1682 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1683 if trans == node.get('string') and ('base_model_name' in context):
1684 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1685 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1686 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1688 node.set('string', trans)
1689 if node.get('confirm'):
1690 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm'))
1692 node.set('confirm', trans)
1694 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
1696 node.set('sum', trans)
1697 if node.get('help'):
1698 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('help'))
1700 node.set('help', trans)
1703 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1704 fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
1706 transfer_modifiers_to_node(modifiers, node)
1709 def _disable_workflow_buttons(self, cr, user, node):
1710 """ Set the buttons in node to readonly if the user can't activate them. """
1712 # admin user can always activate workflow buttons
1715 # TODO handle the case of more than one workflow for a model or multiple
1716 # transitions with different groups and same signal
1717 usersobj = self.pool.get('res.users')
1718 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1719 for button in buttons:
1720 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1721 cr.execute("""SELECT DISTINCT t.group_id
1723 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1724 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1727 AND t.group_id is NOT NULL
1728 """, (self._name, button.get('name')))
1729 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1730 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1731 button.set('readonly', str(int(not can_click)))
1734 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1735 """ Return an architecture and a description of all the fields.
1737 The field description combines the result of fields_get() and
1740 :param node: the architecture as as an etree
1741 :return: a tuple (arch, fields) where arch is the given node as a
1742 string and fields is the description of all the fields.
1746 if node.tag == 'diagram':
1747 if node.getchildren()[0].tag == 'node':
1748 node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, None, context)
1749 fields.update(node_fields)
1750 if node.getchildren()[1].tag == 'arrow':
1751 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
1752 fields.update(arrow_fields)
1754 fields = self.fields_get(cr, user, None, context)
1755 fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
1756 node = self._disable_workflow_buttons(cr, user, node)
1757 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1758 for k in fields.keys():
1759 if k not in fields_def:
1761 for field in fields_def:
1763 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1764 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1765 elif field in fields:
1766 fields[field].update(fields_def[field])
1768 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1769 res = cr.fetchall()[:]
1771 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1772 msg = "\n * ".join([r[0] for r in res])
1773 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1774 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1775 raise except_orm('View error', msg)
1778 def _get_default_form_view(self, cr, user, context=None):
1779 """ Generates a default single-line form view using all fields
1780 of the current model except the m2m and o2m ones.
1782 :param cr: database cursor
1783 :param int user: user id
1784 :param dict context: connection context
1785 :returns: a form view as an lxml document
1786 :rtype: etree._Element
1788 view = etree.Element('form', string=self._description)
1789 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1790 for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
1791 if descriptor['type'] in ('one2many', 'many2many'):
1793 etree.SubElement(view, 'field', name=field)
1794 if descriptor['type'] == 'text':
1795 etree.SubElement(view, 'newline')
1798 def _get_default_tree_view(self, cr, user, context=None):
1799 """ Generates a single-field tree view, using _rec_name if
1800 it's one of the columns or the first column it finds otherwise
1802 :param cr: database cursor
1803 :param int user: user id
1804 :param dict context: connection context
1805 :returns: a tree view as an lxml document
1806 :rtype: etree._Element
1808 _rec_name = self._rec_name
1809 if _rec_name not in self._columns:
1810 _rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
1812 view = etree.Element('tree', string=self._description)
1813 etree.SubElement(view, 'field', name=_rec_name)
1816 def _get_default_calendar_view(self, cr, user, context=None):
1817 """ Generates a default calendar view by trying to infer
1818 calendar fields from a number of pre-set attribute names
1820 :param cr: database cursor
1821 :param int user: user id
1822 :param dict context: connection context
1823 :returns: a calendar view
1824 :rtype: etree._Element
1826 def set_first_of(seq, in_, to):
1827 """Sets the first value of ``seq`` also found in ``in_`` to
1828 the ``to`` attribute of the view being closed over.
1830 Returns whether it's found a suitable value (and set it on
1831 the attribute) or not
1839 view = etree.Element('calendar', string=self._description)
1840 etree.SubElement(view, 'field', name=self._rec_name)
1842 if (self._date_name not in self._columns):
1844 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
1845 if dt in self._columns:
1846 self._date_name = dt
1851 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
1852 view.set('date_start', self._date_name)
1854 set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
1855 self._columns, 'color')
1857 if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
1858 self._columns, 'date_stop'):
1859 if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
1860 self._columns, 'date_delay'):
1862 _('Invalid Object Architecture!'),
1863 _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % (self._name)))
1867 def _get_default_search_view(self, cr, uid, context=None):
1869 :param cr: database cursor
1870 :param int user: user id
1871 :param dict context: connection context
1872 :returns: an lxml document of the view
1873 :rtype: etree._Element
1875 form_view = self.fields_view_get(cr, uid, False, 'form', context=context)
1876 tree_view = self.fields_view_get(cr, uid, False, 'tree', context=context)
1878 # TODO it seems _all_columns could be used instead of fields_get (no need for translated fields info)
1879 fields = self.fields_get(cr, uid, context=context)
1880 fields_to_search = set(
1881 field for field, descriptor in fields.iteritems()
1882 if descriptor.get('select'))
1884 for view in (form_view, tree_view):
1885 view_root = etree.fromstring(view['arch'])
1886 # Only care about select=1 in xpath below, because select=2 is covered
1887 # by the custom advanced search in clients
1888 fields_to_search.update(view_root.xpath("//field[@select=1]/@name"))
1890 tree_view_root = view_root # as provided by loop above
1891 search_view = etree.Element("search", string=tree_view_root.get("string", ""))
1893 field_group = etree.SubElement(search_view, "group")
1894 for field_name in fields_to_search:
1895 etree.SubElement(field_group, "field", name=field_name)
1900 # if view_id, view_type is not required
1902 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1904 Get the detailed composition of the requested view like fields, model, view architecture
1906 :param cr: database cursor
1907 :param user: current user id
1908 :param view_id: id of the view or None
1909 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
1910 :param context: context arguments, like lang, time zone
1911 :param toolbar: true to include contextual actions
1912 :param submenu: deprecated
1913 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
1914 :raise AttributeError:
1915 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
1916 * if some tag other than 'position' is found in parent view
1917 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
1924 if isinstance(s, unicode):
1925 return s.encode('utf8')
1928 def raise_view_error(error_msg, child_view_id):
1929 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
1930 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
1931 % (child_view.xml_id, self._name, error_msg))
1933 def locate(source, spec):
1934 """ Locate a node in a source (parent) architecture.
1936 Given a complete source (parent) architecture (i.e. the field
1937 `arch` in a view), and a 'spec' node (a node in an inheriting
1938 view that specifies the location in the source view of what
1939 should be changed), return (if it exists) the node in the
1940 source view matching the specification.
1942 :param source: a parent architecture to modify
1943 :param spec: a modifying node in an inheriting view
1944 :return: a node in the source matching the spec
1947 if spec.tag == 'xpath':
1948 nodes = source.xpath(spec.get('expr'))
1949 return nodes[0] if nodes else None
1950 elif spec.tag == 'field':
1951 # Only compare the field name: a field can be only once in a given view
1952 # at a given level (and for multilevel expressions, we should use xpath
1953 # inheritance spec anyway).
1954 for node in source.getiterator('field'):
1955 if node.get('name') == spec.get('name'):
1959 for node in source.getiterator(spec.tag):
1961 for attr in spec.attrib:
1962 if attr != 'position' and (not node.get(attr) or node.get(attr) != spec.get(attr)):
1969 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
1970 """ Apply an inheriting view.
1972 Apply to a source architecture all the spec nodes (i.e. nodes
1973 describing where and what changes to apply to some parent
1974 architecture) given by an inheriting view.
1976 :param source: a parent architecture to modify
1977 :param specs_arch: a modifying architecture in an inheriting view
1978 :param inherit_id: the database id of the inheriting view
1979 :return: a modified source where the specs are applied
1982 specs_tree = etree.fromstring(encode(specs_arch))
1983 # Queue of specification nodes (i.e. nodes describing where and
1984 # changes to apply to some parent architecture).
1985 specs = [specs_tree]
1989 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
1991 if spec.tag == 'data':
1992 specs += [ c for c in specs_tree ]
1994 node = locate(source, spec)
1995 if node is not None:
1996 pos = spec.get('position', 'inside')
1997 if pos == 'replace':
1998 if node.getparent() is None:
1999 source = copy.deepcopy(spec[0])
2002 node.addprevious(child)
2003 node.getparent().remove(node)
2004 elif pos == 'attributes':
2005 for child in spec.getiterator('attribute'):
2006 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
2008 node.set(attribute[0], attribute[1])
2010 del(node.attrib[attribute[0]])
2012 sib = node.getnext()
2016 elif pos == 'after':
2021 sib.addprevious(child)
2022 elif pos == 'before':
2023 node.addprevious(child)
2025 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
2028 ' %s="%s"' % (attr, spec.get(attr))
2029 for attr in spec.attrib
2030 if attr != 'position'
2032 tag = "<%s%s>" % (spec.tag, attrs)
2033 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
2036 def apply_view_inheritance(cr, user, source, inherit_id):
2037 """ Apply all the (directly and indirectly) inheriting views.
2039 :param source: a parent architecture to modify (with parent
2040 modifications already applied)
2041 :param inherit_id: the database view_id of the parent view
2042 :return: a modified source where all the modifying architecture
2046 sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
2047 for (view_arch, view_id) in sql_inherit:
2048 source = apply_inheritance_specs(source, view_arch, view_id)
2049 source = apply_view_inheritance(cr, user, source, view_id)
2052 result = {'type': view_type, 'model': self._name}
2055 parent_view_model = None
2056 view_ref = context.get(view_type + '_view_ref')
2057 # Search for a root (i.e. without any parent) view.
2059 if view_ref and not view_id:
2061 module, view_ref = view_ref.split('.', 1)
2062 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
2063 view_ref_res = cr.fetchone()
2065 view_id = view_ref_res[0]
2068 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2070 WHERE id=%s""", (view_id,))
2072 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2074 WHERE model=%s AND type=%s AND inherit_id IS NULL
2075 ORDER BY priority""", (self._name, view_type))
2076 sql_res = cr.dictfetchone()
2081 view_id = sql_res['inherit_id'] or sql_res['id']
2082 parent_view_model = sql_res['model']
2083 if not sql_res['inherit_id']:
2086 # if a view was found
2088 source = etree.fromstring(encode(sql_res['arch']))
2090 arch=apply_view_inheritance(cr, user, source, sql_res['id']),
2091 type=sql_res['type'],
2092 view_id=sql_res['id'],
2093 name=sql_res['name'],
2094 field_parent=sql_res['field_parent'] or False)
2096 # otherwise, build some kind of default view
2098 view = getattr(self, '_get_default_%s_view' % view_type)(
2100 except AttributeError:
2101 # what happens here, graph case?
2102 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
2110 if parent_view_model != self._name:
2111 ctx = context.copy()
2112 ctx['base_model_name'] = parent_view_model
2115 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
2116 result['arch'] = xarch
2117 result['fields'] = xfields
2122 for key in ('report_sxw_content', 'report_rml_content',
2123 'report_sxw', 'report_rml',
2124 'report_sxw_content_data', 'report_rml_content_data'):
2128 ir_values_obj = self.pool.get('ir.values')
2129 resprint = ir_values_obj.get(cr, user, 'action',
2130 'client_print_multi', [(self._name, False)], False,
2132 resaction = ir_values_obj.get(cr, user, 'action',
2133 'client_action_multi', [(self._name, False)], False,
2136 resrelate = ir_values_obj.get(cr, user, 'action',
2137 'client_action_relate', [(self._name, False)], False,
2139 resaction = [clean(action) for action in resaction
2140 if view_type == 'tree' or not action[2].get('multi')]
2141 resprint = [clean(print_) for print_ in resprint
2142 if view_type == 'tree' or not print_[2].get('multi')]
2143 resrelate = map(lambda x: x[2], resrelate)
2145 for x in itertools.chain(resprint, resaction, resrelate):
2146 x['string'] = x['name']
2148 result['toolbar'] = {
2150 'action': resaction,
2155 _view_look_dom_arch = __view_look_dom_arch
2157 def search_count(self, cr, user, args, context=None):
2160 res = self.search(cr, user, args, context=context, count=True)
2161 if isinstance(res, list):
2165 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
2167 Search for records based on a search domain.
2169 :param cr: database cursor
2170 :param user: current user id
2171 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
2172 :param offset: optional number of results to skip in the returned values (default: 0)
2173 :param limit: optional max number of records to return (default: **None**)
2174 :param order: optional columns to sort by (default: self._order=id )
2175 :param context: optional context arguments, like lang, time zone
2176 :type context: dictionary
2177 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
2178 :return: id or list of ids of records matching the criteria
2179 :rtype: integer or list of integers
2180 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
2182 **Expressing a search domain (args)**
2184 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
2186 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
2187 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
2188 The semantics of most of these operators are obvious.
2189 The ``child_of`` operator will look for records who are children or grand-children of a given record,
2190 according to the semantics of this model (i.e following the relationship field named by
2191 ``self._parent_name``, by default ``parent_id``.
2192 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
2194 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
2195 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
2196 Be very careful about this when you combine them the first time.
2198 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
2200 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
2202 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
2204 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
2207 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
2209 def name_get(self, cr, user, ids, context=None):
2210 """Returns the preferred display value (text representation) for the records with the
2211 given ``ids``. By default this will be the value of the ``name`` column, unless
2212 the model implements a custom behavior.
2213 Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
2217 :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
2221 if isinstance(ids, (int, long)):
2223 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
2224 [self._rec_name], context, load='_classic_write')]
2226 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
2227 """Search for records that have a display name matching the given ``name`` pattern if compared
2228 with the given ``operator``, while also matching the optional search domain (``args``).
2229 This is used for example to provide suggestions based on a partial value for a relational
2231 Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
2234 This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
2235 and then :meth:`~.name_get` on the result of the search.
2237 :param list args: optional search domain (see :meth:`~.search` for syntax),
2238 specifying further restrictions
2239 :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
2241 :param int limit: optional max number of records to return
2243 :return: list of pairs ``(id,text_repr)`` for all matching records.
2245 return self._name_search(cr, user, name, args, operator, context, limit)
2247 def name_create(self, cr, uid, name, context=None):
2248 """Creates a new record by calling :meth:`~.create` with only one
2249 value provided: the name of the new record (``_rec_name`` field).
2250 The new record will also be initialized with any default values applicable
2251 to this model, or provided through the context. The usual behavior of
2252 :meth:`~.create` applies.
2253 Similarly, this method may raise an exception if the model has multiple
2254 required fields and some do not have default values.
2256 :param name: name of the record to create
2259 :return: the :meth:`~.name_get` pair value for the newly-created record.
2261 rec_id = self.create(cr, uid, {self._rec_name: name}, context);
2262 return self.name_get(cr, uid, [rec_id], context)[0]
2264 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2265 # solve some access rights issues
2266 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2272 # optimize out the default criterion of ``ilike ''`` that matches everything
2273 if not (name == '' and operator == 'ilike'):
2274 args += [(self._rec_name, operator, name)]
2275 access_rights_uid = name_get_uid or user
2276 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2277 res = self.name_get(cr, access_rights_uid, ids, context)
2280 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2283 self.pool.get('ir.translation').check_read(cr, uid)
2285 fields = self._columns.keys() + self._inherit_fields.keys()
2286 #FIXME: collect all calls to _get_source into one SQL call.
2288 res[lang] = {'code': lang}
2290 if f in self._columns:
2291 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2293 res[lang][f] = res_trans
2295 res[lang][f] = self._columns[f].string
2296 for table in self._inherits:
2297 cols = intersect(self._inherit_fields.keys(), fields)
2298 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2301 res[lang]['code'] = lang
2302 for f in res2[lang]:
2303 res[lang][f] = res2[lang][f]
2306 def write_string(self, cr, uid, id, langs, vals, context=None):
2307 self.pool.get('ir.translation').check_write(cr, uid)
2308 #FIXME: try to only call the translation in one SQL
2311 if field in self._columns:
2312 src = self._columns[field].string
2313 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2314 for table in self._inherits:
2315 cols = intersect(self._inherit_fields.keys(), vals)
2317 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2320 def _add_missing_default_values(self, cr, uid, values, context=None):
2321 missing_defaults = []
2322 avoid_tables = [] # avoid overriding inherited values when parent is set
2323 for tables, parent_field in self._inherits.items():
2324 if parent_field in values:
2325 avoid_tables.append(tables)
2326 for field in self._columns.keys():
2327 if not field in values:
2328 missing_defaults.append(field)
2329 for field in self._inherit_fields.keys():
2330 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2331 missing_defaults.append(field)
2333 if len(missing_defaults):
2334 # override defaults with the provided values, never allow the other way around
2335 defaults = self.default_get(cr, uid, missing_defaults, context)
2337 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2338 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2339 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2340 defaults[dv] = [(6, 0, defaults[dv])]
2341 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2342 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2343 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2344 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2345 defaults.update(values)
2349 def clear_caches(self):
2350 """ Clear the caches
2352 This clears the caches associated to methods decorated with
2353 ``tools.ormcache`` or ``tools.ormcache_multi``.
2356 getattr(self, '_ormcache')
2358 except AttributeError:
2361 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2363 Get the list of records in list view grouped by the given ``groupby`` fields
2365 :param cr: database cursor
2366 :param uid: current user id
2367 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2368 :param list fields: list of fields present in the list view specified on the object
2369 :param list groupby: fields by which the records will be grouped
2370 :param int offset: optional number of records to skip
2371 :param int limit: optional max number of records to return
2372 :param dict context: context arguments, like lang, time zone
2373 :param list orderby: optional ``order by`` specification, for
2374 overriding the natural sort ordering of the
2375 groups, see also :py:meth:`~osv.osv.osv.search`
2376 (supported only for many2one fields currently)
2377 :return: list of dictionaries(one dictionary for each record) containing:
2379 * the values of fields grouped by the fields in ``groupby`` argument
2380 * __domain: list of tuples specifying the search criteria
2381 * __context: dictionary with argument like ``groupby``
2382 :rtype: [{'field_name_1': value, ...]
2383 :raise AccessError: * if user has no read rights on the requested object
2384 * if user tries to bypass access rules for read on the requested object
2387 context = context or {}
2388 self.check_read(cr, uid)
2390 fields = self._columns.keys()
2392 query = self._where_calc(cr, uid, domain, context=context)
2393 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2395 # Take care of adding join(s) if groupby is an '_inherits'ed field
2396 groupby_list = groupby
2397 qualified_groupby_field = groupby
2399 if isinstance(groupby, list):
2400 groupby = groupby[0]
2401 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2404 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2405 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2406 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2408 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2409 fget = self.fields_get(cr, uid, fields)
2410 float_int_fields = filter(lambda x: fget[x]['type'] in ('float', 'integer'), fields)
2412 group_count = group_by = groupby
2414 if fget.get(groupby):
2415 if fget[groupby]['type'] in ('date', 'datetime'):
2416 flist = "to_char(%s,'yyyy-mm') as %s " % (qualified_groupby_field, groupby)
2417 groupby = "to_char(%s,'yyyy-mm')" % (qualified_groupby_field)
2418 qualified_groupby_field = groupby
2420 flist = qualified_groupby_field
2422 # Don't allow arbitrary values, as this would be a SQL injection vector!
2423 raise except_orm(_('Invalid group_by'),
2424 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2427 fields_pre = [f for f in float_int_fields if
2428 f == self.CONCURRENCY_CHECK_FIELD
2429 or (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2430 for f in fields_pre:
2431 if f not in ['id', 'sequence']:
2432 group_operator = fget[f].get('group_operator', 'sum')
2435 qualified_field = '"%s"."%s"' % (self._table, f)
2436 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2438 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2440 from_clause, where_clause, where_clause_params = query.get_sql()
2441 where_clause = where_clause and ' WHERE ' + where_clause
2442 limit_str = limit and ' limit %d' % limit or ''
2443 offset_str = offset and ' offset %d' % offset or ''
2444 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2446 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2449 for r in cr.dictfetchall():
2450 for fld, val in r.items():
2451 if val == None: r[fld] = False
2452 alldata[r['id']] = r
2455 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=orderby or groupby, context=context)
2456 # the IDS of records that have groupby field value = False or '' should be sorted too
2457 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2458 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2459 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2460 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2464 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2465 if not isinstance(groupby_list, (str, unicode)):
2466 if groupby or not context.get('group_by_no_leaf', False):
2467 d['__context'] = {'group_by': groupby_list[1:]}
2468 if groupby and groupby in fget:
2469 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2470 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2471 days = calendar.monthrange(dt.year, dt.month)[1]
2473 d[groupby] = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d').strftime('%B %Y')
2474 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2475 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2476 del alldata[d['id']][groupby]
2477 d.update(alldata[d['id']])
2480 if groupby and groupby in self._group_by_full:
2481 gids = map(lambda x: x[groupby][0], data)
2482 stages = self._group_by_full[groupby](self, cr, uid, gids, domain, context)
2483 # as both lists are sorted in the same way, we can merge in one pass
2485 while stages and ((pos<len(data)) or (pos<len(stages))):
2486 if (pos<len(data)) and (data[pos][groupby][0] == stages[pos][0]):
2489 val = dict(map(lambda x: (x, False), fields))
2491 groupby: stages[pos],
2492 '__domain': [(groupby, '=', stages[pos][0])]+domain,
2493 groupby+'_count': 0L,
2494 '__context': {'group_by': groupby_list[1:]}
2496 data.insert(pos, val)
2499 def _inherits_join_add(self, current_table, parent_model_name, query):
2501 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2502 :param current_table: current model object
2503 :param parent_model_name: name of the parent model for which the clauses should be added
2504 :param query: query object on which the JOIN should be added
2506 inherits_field = current_table._inherits[parent_model_name]
2507 parent_model = self.pool.get(parent_model_name)
2508 parent_table_name = parent_model._table
2509 quoted_parent_table_name = '"%s"' % parent_table_name
2510 if quoted_parent_table_name not in query.tables:
2511 query.tables.append(quoted_parent_table_name)
2512 query.where_clause.append('(%s.%s = %s.id)' % (current_table._table, inherits_field, parent_table_name))
2516 def _inherits_join_calc(self, field, query):
2518 Adds missing table select and join clause(s) to ``query`` for reaching
2519 the field coming from an '_inherits' parent table (no duplicates).
2521 :param field: name of inherited field to reach
2522 :param query: query object on which the JOIN should be added
2523 :return: qualified name of field, to be used in SELECT clause
2525 current_table = self
2526 while field in current_table._inherit_fields and not field in current_table._columns:
2527 parent_model_name = current_table._inherit_fields[field][0]
2528 parent_table = self.pool.get(parent_model_name)
2529 self._inherits_join_add(current_table, parent_model_name, query)
2530 current_table = parent_table
2531 return '"%s".%s' % (current_table._table, field)
2533 def _parent_store_compute(self, cr):
2534 if not self._parent_store:
2536 logger = netsvc.Logger()
2537 logger.notifyChannel('data', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
2538 def browse_rec(root, pos=0):
2540 where = self._parent_name+'='+str(root)
2542 where = self._parent_name+' IS NULL'
2543 if self._parent_order:
2544 where += ' order by '+self._parent_order
2545 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2547 for id in cr.fetchall():
2548 pos2 = browse_rec(id[0], pos2)
2549 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2551 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2552 if self._parent_order:
2553 query += ' order by ' + self._parent_order
2556 for (root,) in cr.fetchall():
2557 pos = browse_rec(root, pos)
2560 def _update_store(self, cr, f, k):
2561 logger = netsvc.Logger()
2562 logger.notifyChannel('data', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
2563 ss = self._columns[k]._symbol_set
2564 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2565 cr.execute('select id from '+self._table)
2566 ids_lst = map(lambda x: x[0], cr.fetchall())
2569 ids_lst = ids_lst[40:]
2570 res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
2571 for key, val in res.items():
2574 # if val is a many2one, just write the ID
2575 if type(val) == tuple:
2577 if (val<>False) or (type(val)<>bool):
2578 cr.execute(update_query, (ss[1](val), key))
2580 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2581 """Raise except_orm if value is not among the valid values for the selection field"""
2582 if self._columns[field]._type == 'reference':
2583 val_model, val_id_str = value.split(',', 1)
2586 val_id = long(val_id_str)
2590 raise except_orm(_('ValidateError'),
2591 _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
2595 if isinstance(self._columns[field].selection, (tuple, list)):
2596 if val in dict(self._columns[field].selection):
2598 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2600 raise except_orm(_('ValidateError'),
2601 _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
2603 def _check_removed_columns(self, cr, log=False):
2604 # iterate on the database columns to drop the NOT NULL constraints
2605 # of fields which were required but have been removed (or will be added by another module)
2606 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2607 columns += MAGIC_COLUMNS
2608 cr.execute("SELECT a.attname, a.attnotnull"
2609 " FROM pg_class c, pg_attribute a"
2610 " WHERE c.relname=%s"
2611 " AND c.oid=a.attrelid"
2612 " AND a.attisdropped=%s"
2613 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2614 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2616 for column in cr.dictfetchall():
2618 self.__logger.debug("column %s is in the table %s but not in the corresponding object %s",
2619 column['attname'], self._table, self._name)
2620 if column['attnotnull']:
2621 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2622 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2623 self._table, column['attname'])
2625 # checked version: for direct m2o starting from `self`
2626 def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
2627 assert self.is_transient() or not dest_model.is_transient(), \
2628 'Many2One relationships from non-transient Model to TransientModel are forbidden'
2629 if self.is_transient() and not dest_model.is_transient():
2630 # TransientModel relationships to regular Models are annoying
2631 # usually because they could block deletion due to the FKs.
2632 # So unless stated otherwise we default them to ondelete=cascade.
2633 ondelete = ondelete or 'cascade'
2634 self._foreign_keys.append((self._table, source_field, dest_model._table, ondelete or 'set null'))
2635 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2636 self._table, source_field, dest_model._table, ondelete)
2638 # unchecked version: for custom cases, such as m2m relationships
2639 def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
2640 self._foreign_keys.append((source_table, source_field, dest_model._table, ondelete or 'set null'))
2641 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2642 source_table, source_field, dest_model._table, ondelete)
2644 def _auto_init(self, cr, context=None):
2647 Call _field_create and, unless _auto is False:
2649 - create the corresponding table in database for the model,
2650 - possibly add the parent columns in database,
2651 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2652 'write_date' in database if _log_access is True (the default),
2653 - report on database columns no more existing in _columns,
2654 - remove no more existing not null constraints,
2655 - alter existing database columns to match _columns,
2656 - create database tables to match _columns,
2657 - add database indices to match _columns,
2658 - save in self._foreign_keys a list a foreign keys to create (see
2662 self._foreign_keys = []
2663 raise_on_invalid_object_name(self._name)
2666 store_compute = False
2668 update_custom_fields = context.get('update_custom_fields', False)
2669 self._field_create(cr, context=context)
2670 create = not self._table_exist(cr)
2672 if getattr(self, '_auto', True):
2675 self._create_table(cr)
2678 if self._parent_store:
2679 if not self._parent_columns_exist(cr):
2680 self._create_parent_columns(cr)
2681 store_compute = True
2683 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2684 if self._log_access:
2685 self._add_log_columns(cr)
2687 self._check_removed_columns(cr, log=False)
2689 # iterate on the "object columns"
2690 column_data = self._select_column_data(cr)
2692 for k, f in self._columns.iteritems():
2693 if k in MAGIC_COLUMNS:
2695 # Don't update custom (also called manual) fields
2696 if f.manual and not update_custom_fields:
2699 if isinstance(f, fields.one2many):
2700 self._o2m_raise_on_missing_reference(cr, f)
2702 elif isinstance(f, fields.many2many):
2703 self._m2m_raise_or_create_relation(cr, f)
2706 res = column_data.get(k)
2708 # The field is not found as-is in database, try if it
2709 # exists with an old name.
2710 if not res and hasattr(f, 'oldname'):
2711 res = column_data.get(f.oldname)
2713 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
2715 column_data[k] = res
2716 self.__schema.debug("Table '%s': renamed column '%s' to '%s'",
2717 self._table, f.oldname, k)
2719 # The field already exists in database. Possibly
2720 # change its type, rename it, drop it or change its
2723 f_pg_type = res['typname']
2724 f_pg_size = res['size']
2725 f_pg_notnull = res['attnotnull']
2726 if isinstance(f, fields.function) and not f.store and\
2727 not getattr(f, 'nodrop', False):
2728 self.__logger.info('column %s (%s) in table %s removed: converted to a function !\n',
2729 k, f.string, self._table)
2730 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
2732 self.__schema.debug("Table '%s': dropped column '%s' with cascade",
2736 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
2741 ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
2742 ('varchar', 'text', 'TEXT', ''),
2743 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2744 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
2745 ('timestamp', 'date', 'date', '::date'),
2746 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2747 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2749 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
2750 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2751 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
2752 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
2753 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2755 self.__schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
2756 self._table, k, f_pg_size, f.size)
2758 if (f_pg_type==c[0]) and (f._type==c[1]):
2759 if f_pg_type != f_obj_type:
2761 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2762 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
2763 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
2764 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2766 self.__schema.debug("Table '%s': column '%s' changed type from %s to %s",
2767 self._table, k, c[0], c[1])
2770 if f_pg_type != f_obj_type:
2774 newname = k + '_moved' + str(i)
2775 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
2776 "WHERE c.relname=%s " \
2777 "AND a.attname=%s " \
2778 "AND c.oid=a.attrelid ", (self._table, newname))
2779 if not cr.fetchone()[0]:
2783 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2784 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
2785 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2786 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
2787 self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
2788 self._table, k, f_pg_type, f._type, newname)
2790 # if the field is required and hasn't got a NOT NULL constraint
2791 if f.required and f_pg_notnull == 0:
2792 # set the field to the default value if any
2793 if k in self._defaults:
2794 if callable(self._defaults[k]):
2795 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
2797 default = self._defaults[k]
2799 if (default is not None):
2800 ss = self._columns[k]._symbol_set
2801 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
2802 cr.execute(query, (ss[1](default),))
2803 # add the NOT NULL constraint
2806 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2808 self.__schema.debug("Table '%s': column '%s': added NOT NULL constraint",
2811 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
2812 "If you want to have it, you should update the records and execute manually:\n"\
2813 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2814 self.__schema.warn(msg, self._table, k, self._table, k)
2816 elif not f.required and f_pg_notnull == 1:
2817 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2819 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2822 indexname = '%s_%s_index' % (self._table, k)
2823 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
2824 res2 = cr.dictfetchall()
2825 if not res2 and f.select:
2826 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2828 if f._type == 'text':
2829 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
2830 msg = "Table '%s': Adding (b-tree) index for text column '%s'."\
2831 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
2832 " because there is a length limit for indexable btree values!\n"\
2833 "Use a search view instead if you simply want to make the field searchable."
2834 self.__schema.warn(msg, self._table, k, f._type)
2835 if res2 and not f.select:
2836 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
2838 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
2839 self.__schema.debug(msg, self._table, k, f._type)
2841 if isinstance(f, fields.many2one):
2842 dest_model = self.pool.get(f._obj)
2843 ref = dest_model._table
2844 if ref != 'ir_actions':
2845 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
2846 'pg_attribute as att1, pg_attribute as att2 '
2847 'WHERE con.conrelid = cl1.oid '
2848 'AND cl1.relname = %s '
2849 'AND con.confrelid = cl2.oid '
2850 'AND cl2.relname = %s '
2851 'AND array_lower(con.conkey, 1) = 1 '
2852 'AND con.conkey[1] = att1.attnum '
2853 'AND att1.attrelid = cl1.oid '
2854 'AND att1.attname = %s '
2855 'AND array_lower(con.confkey, 1) = 1 '
2856 'AND con.confkey[1] = att2.attnum '
2857 'AND att2.attrelid = cl2.oid '
2858 'AND att2.attname = %s '
2859 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
2860 res2 = cr.dictfetchall()
2862 if res2[0]['confdeltype'] != POSTGRES_CONFDELTYPES.get((f.ondelete or 'set null').upper(), 'a'):
2863 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"')
2864 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
2866 self.__schema.debug("Table '%s': column '%s': XXX",
2869 # The field doesn't exist in database. Create it if necessary.
2871 if not isinstance(f, fields.function) or f.store:
2872 # add the missing field
2873 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2874 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
2875 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2876 self._table, k, get_pg_type(f)[1])
2879 if not create and k in self._defaults:
2880 if callable(self._defaults[k]):
2881 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
2883 default = self._defaults[k]
2885 ss = self._columns[k]._symbol_set
2886 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
2887 cr.execute(query, (ss[1](default),))
2889 netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
2891 # remember the functions to call for the stored fields
2892 if isinstance(f, fields.function):
2894 if f.store is not True: # i.e. if f.store is a dict
2895 order = f.store[f.store.keys()[0]][2]
2896 todo_end.append((order, self._update_store, (f, k)))
2898 # and add constraints if needed
2899 if isinstance(f, fields.many2one):
2900 if not self.pool.get(f._obj):
2901 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2902 dest_model = self.pool.get(f._obj)
2903 ref = dest_model._table
2904 # ir_actions is inherited so foreign key doesn't work on it
2905 if ref != 'ir_actions':
2906 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
2908 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2912 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2913 self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
2916 msg = "WARNING: unable to set column %s of table %s not null !\n"\
2917 "Try to re-run: openerp-server --update=module\n"\
2918 "If it doesn't work, update records and execute manually:\n"\
2919 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2920 self.__logger.warn(msg, k, self._table, self._table, k)
2924 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2925 create = not bool(cr.fetchone())
2927 cr.commit() # start a new transaction
2929 self._add_sql_constraints(cr)
2932 self._execute_sql(cr)
2935 self._parent_store_compute(cr)
2941 def _auto_end(self, cr, context=None):
2942 """ Create the foreign keys recorded by _auto_init. """
2943 for t, k, r, d in self._foreign_keys:
2944 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
2946 del self._foreign_keys
2949 def _table_exist(self, cr):
2950 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2954 def _create_table(self, cr):
2955 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
2956 cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
2957 self.__schema.debug("Table '%s': created", self._table)
2960 def _parent_columns_exist(self, cr):
2961 cr.execute("""SELECT c.relname
2962 FROM pg_class c, pg_attribute a
2963 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2964 """, (self._table, 'parent_left'))
2968 def _create_parent_columns(self, cr):
2969 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
2970 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
2971 if 'parent_left' not in self._columns:
2972 self.__logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
2974 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2975 self._table, 'parent_left', 'INTEGER')
2976 elif not self._columns['parent_left'].select:
2977 self.__logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
2979 if 'parent_right' not in self._columns:
2980 self.__logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
2982 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2983 self._table, 'parent_right', 'INTEGER')
2984 elif not self._columns['parent_right'].select:
2985 self.__logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
2987 if self._columns[self._parent_name].ondelete != 'cascade':
2988 self.__logger.error("The column %s on object %s must be set as ondelete='cascade'",
2989 self._parent_name, self._name)
2994 def _add_log_columns(self, cr):
2995 for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
2998 FROM pg_class c, pg_attribute a
2999 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3000 """, (self._table, field))
3002 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
3004 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
3005 self._table, field, field_def)
3008 def _select_column_data(self, cr):
3009 # attlen is the number of bytes necessary to represent the type when
3010 # the type has a fixed size. If the type has a varying size attlen is
3011 # -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
3012 # Thus the query can return a negative size for a unlimited varchar.
3013 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
3014 "FROM pg_class c,pg_attribute a,pg_type t " \
3015 "WHERE c.relname=%s " \
3016 "AND c.oid=a.attrelid " \
3017 "AND a.atttypid=t.oid", (self._table,))
3018 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
3021 def _o2m_raise_on_missing_reference(self, cr, f):
3022 # TODO this check should be a method on fields.one2many.
3023 other = self.pool.get(f._obj)
3025 # TODO the condition could use fields_get_keys().
3026 if f._fields_id not in other._columns.keys():
3027 if f._fields_id not in other._inherit_fields.keys():
3028 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
3031 def _m2m_raise_or_create_relation(self, cr, f):
3032 m2m_tbl, col1, col2 = f._sql_names(self)
3033 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
3034 if not cr.dictfetchall():
3035 if not self.pool.get(f._obj):
3036 raise except_orm('Programming Error', ('Many2Many destination model does not exist: `%s`') % (f._obj,))
3037 dest_model = self.pool.get(f._obj)
3038 ref = dest_model._table
3039 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s")) WITH OIDS' % (m2m_tbl, col1, col2, col1, col2))
3041 # create foreign key references with ondelete=cascade, unless the targets are SQL views
3042 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
3043 if not cr.fetchall():
3044 self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
3045 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
3046 if not cr.fetchall():
3047 self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
3049 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
3050 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
3051 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
3053 self.__schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
3056 def _add_sql_constraints(self, cr):
3059 Modify this model's database table constraints so they match the one in
3063 for (key, con, _) in self._sql_constraints:
3064 conname = '%s_%s' % (self._table, key)
3066 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
3067 existing_constraints = cr.dictfetchall()
3072 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
3073 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
3074 self._table, conname, con),
3075 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3080 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3081 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3082 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3088 if not existing_constraints:
3089 # constraint does not exists:
3090 sql_actions['add']['execute'] = True
3091 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3092 elif con.lower() not in [item['condef'].lower() for item in existing_constraints]:
3093 # constraint exists but its definition has changed:
3094 sql_actions['drop']['execute'] = True
3095 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3096 sql_actions['add']['execute'] = True
3097 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3099 # we need to add the constraint:
3100 sql_actions = [item for item in sql_actions.values()]
3101 sql_actions.sort(key=lambda x: x['order'])
3102 for sql_action in [action for action in sql_actions if action['execute']]:
3104 cr.execute(sql_action['query'])
3106 self.__schema.debug(sql_action['msg_ok'])
3108 self.__schema.warn(sql_action['msg_err'])
3112 def _execute_sql(self, cr):
3113 """ Execute the SQL code from the _sql attribute (if any)."""
3114 if hasattr(self, "_sql"):
3115 for line in self._sql.split(';'):
3116 line2 = line.replace('\n', '').strip()
3122 # Update objects that uses this one to update their _inherits fields
3125 def _inherits_reload_src(self):
3126 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3127 for obj in self.pool.models.values():
3128 if self._name in obj._inherits:
3129 obj._inherits_reload()
3132 def _inherits_reload(self):
3133 """ Recompute the _inherit_fields mapping.
3135 This will also call itself on each inherits'd child model.
3139 for table in self._inherits:
3140 other = self.pool.get(table)
3141 for col in other._columns.keys():
3142 res[col] = (table, self._inherits[table], other._columns[col], table)
3143 for col in other._inherit_fields.keys():
3144 res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
3145 self._inherit_fields = res
3146 self._all_columns = self._get_column_infos()
3147 self._inherits_reload_src()
3150 def _get_column_infos(self):
3151 """Returns a dict mapping all fields names (direct fields and
3152 inherited field via _inherits) to a ``column_info`` struct
3153 giving detailed columns """
3155 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
3156 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
3157 for k, col in self._columns.iteritems():
3158 result[k] = fields.column_info(k, col)
3162 def _inherits_check(self):
3163 for table, field_name in self._inherits.items():
3164 if field_name not in self._columns:
3165 logging.getLogger('init').info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.' % (field_name, self._name))
3166 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3167 required=True, ondelete="cascade")
3168 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
3169 logging.getLogger('init').warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.' % (field_name, self._name))
3170 self._columns[field_name].required = True
3171 self._columns[field_name].ondelete = "cascade"
3173 #def __getattr__(self, name):
3175 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3176 # (though inherits doesn't use Python inheritance).
3177 # Handles translating between local ids and remote ids.
3178 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3179 # when you have inherits.
3181 # for model, field in self._inherits.iteritems():
3182 # proxy = self.pool.get(model)
3183 # if hasattr(proxy, name):
3184 # attribute = getattr(proxy, name)
3185 # if not hasattr(attribute, '__call__'):
3189 # return super(orm, self).__getattr__(name)
3191 # def _proxy(cr, uid, ids, *args, **kwargs):
3192 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3193 # lst = [obj[field].id for obj in objects if obj[field]]
3194 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3199 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
3200 """ Return the definition of each field.
3202 The returned value is a dictionary (indiced by field name) of
3203 dictionaries. The _inherits'd fields are included. The string, help,
3204 and selection (if present) attributes are translated.
3206 :param cr: database cursor
3207 :param user: current user id
3208 :param fields: list of fields
3209 :param context: context arguments, like lang, time zone
3210 :return: dictionary of field dictionaries, each one describing a field of the business object
3211 :raise AccessError: * if user has no create/write rights on the requested object
3217 write_access = self.check_write(cr, user, False) or \
3218 self.check_create(cr, user, False)
3222 translation_obj = self.pool.get('ir.translation')
3223 for parent in self._inherits:
3224 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
3226 for f, field in self._columns.iteritems():
3227 if allfields and f not in allfields:
3230 res[f] = fields.field_to_dict(self, cr, user, field, context=context)
3232 if not write_access:
3233 res[f]['readonly'] = True
3234 res[f]['states'] = {}
3236 if 'string' in res[f]:
3237 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
3239 res[f]['string'] = res_trans
3240 if 'help' in res[f]:
3241 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
3243 res[f]['help'] = help_trans
3244 if 'selection' in res[f]:
3245 if isinstance(field.selection, (tuple, list)):
3246 sel = field.selection
3248 for key, val in sel:
3251 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
3252 sel2.append((key, val2 or val))
3253 res[f]['selection'] = sel2
3257 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3258 """ Read records with given ids with the given fields
3260 :param cr: database cursor
3261 :param user: current user id
3262 :param ids: id or list of the ids of the records to read
3263 :param fields: optional list of field names to return (default: all fields would be returned)
3264 :type fields: list (example ['field_name_1', ...])
3265 :param context: optional context dictionary - it may contains keys for specifying certain options
3266 like ``context_lang``, ``context_tz`` to alter the results of the call.
3267 A special ``bin_size`` boolean flag may also be passed in the context to request the
3268 value of all fields.binary columns to be returned as the size of the binary instead of its
3269 contents. This can also be selectively overriden by passing a field-specific flag
3270 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
3271 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
3272 :return: list of dictionaries((dictionary per record asked)) with requested field values
3273 :rtype: [{‘name_of_the_field’: value, ...}, ...]
3274 :raise AccessError: * if user has no read rights on the requested object
3275 * if user tries to bypass access rules for read on the requested object
3281 self.check_read(cr, user)
3283 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
3284 if isinstance(ids, (int, long)):
3288 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3289 result = self._read_flat(cr, user, select, fields, context, load)
3292 for key, v in r.items():
3296 if isinstance(ids, (int, long, dict)):
3297 return result and result[0] or False
3300 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3305 if fields_to_read == None:
3306 fields_to_read = self._columns.keys()
3308 # Construct a clause for the security rules.
3309 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3310 # or will at least contain self._table.
3311 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3313 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3314 fields_pre = [f for f in fields_to_read if
3315 f == self.CONCURRENCY_CHECK_FIELD
3316 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3317 ] + self._inherits.values()
3321 def convert_field(f):
3322 f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
3323 if f in ('create_date', 'write_date'):
3324 return "date_trunc('second', %s) as %s" % (f_qual, f)
3325 if f == self.CONCURRENCY_CHECK_FIELD:
3326 if self._log_access:
3327 return "COALESCE(%s.write_date, %s.create_date, now())::timestamp AS %s" % (self._table, self._table, f,)
3328 return "now()::timestamp AS %s" % (f,)
3329 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3330 return 'length(%s) as "%s"' % (f_qual, f)
3333 fields_pre2 = map(convert_field, fields_pre)
3334 order_by = self._parent_order or self._order
3335 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
3336 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3338 query += " AND " + (' OR '.join(rule_clause))
3339 query += " ORDER BY " + order_by
3340 for sub_ids in cr.split_for_in_conditions(ids):
3342 cr.execute(query, [tuple(sub_ids)] + rule_params)
3343 if cr.rowcount != len(sub_ids):
3344 raise except_orm(_('AccessError'),
3345 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: read, Document type: %s).')
3346 % (self._description,))
3348 cr.execute(query, (tuple(sub_ids),))
3349 res.extend(cr.dictfetchall())
3351 res = map(lambda x: {'id': x}, ids)
3353 for f in fields_pre:
3354 if f == self.CONCURRENCY_CHECK_FIELD:
3356 if self._columns[f].translate:
3357 ids = [x['id'] for x in res]
3358 #TODO: optimize out of this loop
3359 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3361 r[f] = res_trans.get(r['id'], False) or r[f]
3363 for table in self._inherits:
3364 col = self._inherits[table]
3365 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3368 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3376 if not record[col]: # if the record is deleted from _inherits table?
3378 record.update(res3[record[col]])
3379 if col not in fields_to_read:
3382 # all fields which need to be post-processed by a simple function (symbol_get)
3383 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3386 for f in fields_post:
3387 r[f] = self._columns[f]._symbol_get(r[f])
3388 ids = [x['id'] for x in res]
3390 # all non inherited fields for which the attribute whose name is in load is False
3391 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3393 # Compute POST fields
3395 for f in fields_post:
3396 todo.setdefault(self._columns[f]._multi, [])
3397 todo[self._columns[f]._multi].append(f)
3398 for key, val in todo.items():
3400 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3401 assert res2 is not None, \
3402 'The function field "%s" on the "%s" model returned None\n' \
3403 '(a dictionary was expected).' % (val[0], self._name)
3406 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3407 multi_fields = res2.get(record['id'],{})
3409 record[pos] = multi_fields.get(pos,[])
3412 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3415 record[f] = res2[record['id']]
3420 for field in vals.copy():
3422 if field in self._columns:
3423 fobj = self._columns[field]
3430 for group in groups:
3431 module = group.split(".")[0]
3432 grp = group.split(".")[1]
3433 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3434 (grp, module, 'res.groups', user))
3435 readonly = cr.fetchall()
3436 if readonly[0][0] >= 1:
3439 elif readonly[0][0] == 0:
3445 if type(vals[field]) == type([]):
3447 elif type(vals[field]) == type(0.0):
3449 elif type(vals[field]) == type(''):
3450 vals[field] = '=No Permission='
3455 # TODO check READ access
3456 def perm_read(self, cr, user, ids, context=None, details=True):
3458 Returns some metadata about the given records.
3460 :param details: if True, \*_uid fields are replaced with the name of the user
3461 :return: list of ownership dictionaries for each requested record
3462 :rtype: list of dictionaries with the following keys:
3465 * create_uid: user who created the record
3466 * create_date: date when the record was created
3467 * write_uid: last user who changed the record
3468 * write_date: date of the last change to the record
3469 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3476 uniq = isinstance(ids, (int, long))
3480 if self._log_access:
3481 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3482 quoted_table = '"%s"' % self._table
3483 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3484 query = '''SELECT %s, __imd.module, __imd.name
3485 FROM %s LEFT JOIN ir_model_data __imd
3486 ON (__imd.model = %%s and __imd.res_id = %s.id)
3487 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3488 cr.execute(query, (self._name, tuple(ids)))
3489 res = cr.dictfetchall()
3492 r[key] = r[key] or False
3493 if details and key in ('write_uid', 'create_uid') and r[key]:
3495 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3497 pass # Leave the numeric uid there
3498 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3499 del r['name'], r['module']
3504 def _check_concurrency(self, cr, ids, context):
3507 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3509 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
3510 for sub_ids in cr.split_for_in_conditions(ids):
3513 id_ref = "%s,%s" % (self._name, id)
3514 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3516 ids_to_check.extend([id, update_date])
3517 if not ids_to_check:
3519 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3522 # mention the first one only to keep the error message readable
3523 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3525 def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
3526 """Verifies that the operation given by ``operation`` is allowed for the user
3527 according to the access rights."""
3528 return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
3530 def check_create(self, cr, uid, raise_exception=True):
3531 return self.check_access_rights(cr, uid, 'create', raise_exception)
3533 def check_read(self, cr, uid, raise_exception=True):
3534 return self.check_access_rights(cr, uid, 'read', raise_exception)
3536 def check_unlink(self, cr, uid, raise_exception=True):
3537 return self.check_access_rights(cr, uid, 'unlink', raise_exception)
3539 def check_write(self, cr, uid, raise_exception=True):
3540 return self.check_access_rights(cr, uid, 'write', raise_exception)
3542 def check_access_rule(self, cr, uid, ids, operation, context=None):
3543 """Verifies that the operation given by ``operation`` is allowed for the user
3544 according to ir.rules.
3546 :param operation: one of ``write``, ``unlink``
3547 :raise except_orm: * if current ir.rules do not permit this operation.
3548 :return: None if the operation is allowed
3550 if uid == SUPERUSER_ID:
3553 if self.is_transient():
3554 # Only one single implicit access rule for transient models: owner only!
3555 # This is ok to hardcode because we assert that TransientModels always
3556 # have log_access enabled and this the create_uid column is always there.
3557 # And even with _inherits, these fields are always present in the local
3558 # table too, so no need for JOINs.
3559 cr.execute("""SELECT distinct create_uid
3561 WHERE id IN %%s""" % self._table, (tuple(ids),))
3562 uids = [x[0] for x in cr.fetchall()]
3563 if len(uids) != 1 or uids[0] != uid:
3564 raise except_orm(_('AccessError'), '%s access is '
3565 'restricted to your own records for transient models '
3566 '(except for the super-user).' % operation.capitalize())
3568 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3570 where_clause = ' and ' + ' and '.join(where_clause)
3571 for sub_ids in cr.split_for_in_conditions(ids):
3572 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3573 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3574 [sub_ids] + where_params)
3575 if cr.rowcount != len(sub_ids):
3576 raise except_orm(_('AccessError'),
3577 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: %s, Document type: %s).')
3578 % (operation, self._description))
3580 def unlink(self, cr, uid, ids, context=None):
3582 Delete records with given ids
3584 :param cr: database cursor
3585 :param uid: current user id
3586 :param ids: id or list of ids
3587 :param context: (optional) context arguments, like lang, time zone
3589 :raise AccessError: * if user has no unlink rights on the requested object
3590 * if user tries to bypass access rules for unlink on the requested object
3591 :raise UserError: if the record is default property for other records
3596 if isinstance(ids, (int, long)):
3599 result_store = self._store_get_values(cr, uid, ids, None, context)
3601 self._check_concurrency(cr, ids, context)
3603 self.check_unlink(cr, uid)
3605 properties = self.pool.get('ir.property')
3606 domain = [('res_id', '=', False),
3607 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3609 if properties.search(cr, uid, domain, context=context):
3610 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3612 wf_service = netsvc.LocalService("workflow")
3614 wf_service.trg_delete(uid, self._name, oid, cr)
3617 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3618 pool_model_data = self.pool.get('ir.model.data')
3619 ir_values_obj = self.pool.get('ir.values')
3620 for sub_ids in cr.split_for_in_conditions(ids):
3621 cr.execute('delete from ' + self._table + ' ' \
3622 'where id IN %s', (sub_ids,))
3624 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3625 # as these are not connected with real database foreign keys, and would be dangling references.
3626 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
3627 # to avoid possible side-effects during admin calls.
3628 # Step 1. Calling unlink of ir_model_data only for the affected IDS
3629 reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
3630 # Step 2. Marching towards the real deletion of referenced records
3632 pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
3634 # For the same reason, removing the record relevant to ir_values
3635 ir_value_ids = ir_values_obj.search(cr, uid,
3636 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3639 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3641 for order, object, store_ids, fields in result_store:
3642 if object != self._name:
3643 obj = self.pool.get(object)
3644 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3645 rids = map(lambda x: x[0], cr.fetchall())
3647 obj._store_set_values(cr, uid, rids, fields, context)
3654 def write(self, cr, user, ids, vals, context=None):
3656 Update records with given ids with the given field values
3658 :param cr: database cursor
3659 :param user: current user id
3661 :param ids: object id or list of object ids to update according to **vals**
3662 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3663 :type vals: dictionary
3664 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3665 :type context: dictionary
3667 :raise AccessError: * if user has no write rights on the requested object
3668 * if user tries to bypass access rules for write on the requested object
3669 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3670 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3672 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3674 + For a many2many field, a list of tuples is expected.
3675 Here is the list of tuple that are accepted, with the corresponding semantics ::
3677 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3678 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3679 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3680 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3681 (4, ID) link to existing record with id = ID (adds a relationship)
3682 (5) unlink all (like using (3,ID) for all linked records)
3683 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3686 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3688 + For a one2many field, a lits of tuples is expected.
3689 Here is the list of tuple that are accepted, with the corresponding semantics ::
3691 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3692 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3693 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3696 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
3698 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
3699 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
3703 for field in vals.copy():
3705 if field in self._columns:
3706 fobj = self._columns[field]
3707 elif field in self._inherit_fields:
3708 fobj = self._inherit_fields[field][2]
3715 for group in groups:
3716 module = group.split(".")[0]
3717 grp = group.split(".")[1]
3718 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3719 (grp, module, 'res.groups', user))
3720 readonly = cr.fetchall()
3721 if readonly[0][0] >= 1:
3724 elif readonly[0][0] == 0:
3736 if isinstance(ids, (int, long)):
3739 self._check_concurrency(cr, ids, context)
3740 self.check_write(cr, user)
3742 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
3744 # No direct update of parent_left/right
3745 vals.pop('parent_left', None)
3746 vals.pop('parent_right', None)
3748 parents_changed = []
3749 parent_order = self._parent_order or self._order
3750 if self._parent_store and (self._parent_name in vals):
3751 # The parent_left/right computation may take up to
3752 # 5 seconds. No need to recompute the values if the
3753 # parent is the same.
3754 # Note: to respect parent_order, nodes must be processed in
3755 # order, so ``parents_changed`` must be ordered properly.
3756 parent_val = vals[self._parent_name]
3758 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
3759 (self._table, self._parent_name, self._parent_name, parent_order)
3760 cr.execute(query, (tuple(ids), parent_val))
3762 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
3763 (self._table, self._parent_name, parent_order)
3764 cr.execute(query, (tuple(ids),))
3765 parents_changed = map(operator.itemgetter(0), cr.fetchall())
3772 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
3774 if field in self._columns:
3775 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
3776 if (not totranslate) or not self._columns[field].translate:
3777 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
3778 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
3779 direct.append(field)
3781 upd_todo.append(field)
3783 updend.append(field)
3784 if field in self._columns \
3785 and hasattr(self._columns[field], 'selection') \
3787 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3789 if self._log_access:
3790 upd0.append('write_uid=%s')
3791 upd0.append('write_date=now()')
3795 self.check_access_rule(cr, user, ids, 'write', context=context)
3796 for sub_ids in cr.split_for_in_conditions(ids):
3797 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
3798 'where id IN %s', upd1 + [sub_ids])
3799 if cr.rowcount != len(sub_ids):
3800 raise except_orm(_('AccessError'),
3801 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
3806 if self._columns[f].translate:
3807 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
3810 # Inserting value to DB
3811 self.write(cr, user, ids, {f: vals[f]})
3812 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
3815 # call the 'set' method of fields which are not classic_write
3816 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3818 # default element in context must be removed when call a one2many or many2many
3819 rel_context = context.copy()
3820 for c in context.items():
3821 if c[0].startswith('default_'):
3822 del rel_context[c[0]]
3824 for field in upd_todo:
3826 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
3828 for table in self._inherits:
3829 col = self._inherits[table]
3831 for sub_ids in cr.split_for_in_conditions(ids):
3832 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
3833 'where id IN %s', (sub_ids,))
3834 nids.extend([x[0] for x in cr.fetchall()])
3838 if self._inherit_fields[val][0] == table:
3841 self.pool.get(table).write(cr, user, nids, v, context)
3843 self._validate(cr, user, ids, context)
3845 # TODO: use _order to set dest at the right position and not first node of parent
3846 # We can't defer parent_store computation because the stored function
3847 # fields that are computer may refer (directly or indirectly) to
3848 # parent_left/right (via a child_of domain)
3851 self.pool._init_parent[self._name] = True
3853 order = self._parent_order or self._order
3854 parent_val = vals[self._parent_name]
3856 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
3858 clause, params = '%s IS NULL' % (self._parent_name,), ()
3860 for id in parents_changed:
3861 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
3862 pleft, pright = cr.fetchone()
3863 distance = pright - pleft + 1
3865 # Positions of current siblings, to locate proper insertion point;
3866 # this can _not_ be fetched outside the loop, as it needs to be refreshed
3867 # after each update, in case several nodes are sequentially inserted one
3868 # next to the other (i.e computed incrementally)
3869 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
3870 parents = cr.fetchall()
3872 # Find Position of the element
3874 for (parent_pright, parent_id) in parents:
3877 position = parent_pright + 1
3879 # It's the first node of the parent
3884 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
3885 position = cr.fetchone()[0] + 1
3887 if pleft < position <= pright:
3888 raise except_orm(_('UserError'), _('Recursivity Detected.'))
3890 if pleft < position:
3891 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3892 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3893 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
3895 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3896 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3897 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
3899 result += self._store_get_values(cr, user, ids, vals.keys(), context)
3903 for order, object, ids_to_update, fields_to_recompute in result:
3904 key = (object, tuple(fields_to_recompute))
3905 done.setdefault(key, {})
3906 # avoid to do several times the same computation
3908 for id in ids_to_update:
3909 if id not in done[key]:
3910 done[key][id] = True
3912 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
3914 wf_service = netsvc.LocalService("workflow")
3916 wf_service.trg_write(user, self._name, id, cr)
3920 # TODO: Should set perm to user.xxx
3922 def create(self, cr, user, vals, context=None):
3924 Create a new record for the model.
3926 The values for the new record are initialized using the ``vals``
3927 argument, and if necessary the result of ``default_get()``.
3929 :param cr: database cursor
3930 :param user: current user id
3932 :param vals: field values for new record, e.g {'field_name': field_value, ...}
3933 :type vals: dictionary
3934 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3935 :type context: dictionary
3936 :return: id of new record created
3937 :raise AccessError: * if user has no create rights on the requested object
3938 * if user tries to bypass access rules for create on the requested object
3939 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3940 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3942 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
3943 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
3950 if self.is_transient():
3951 self._transient_vacuum(cr, user)
3953 self.check_create(cr, user)
3955 vals = self._add_missing_default_values(cr, user, vals, context)
3958 for v in self._inherits:
3959 if self._inherits[v] not in vals:
3962 tocreate[v] = {'id': vals[self._inherits[v]]}
3963 (upd0, upd1, upd2) = ('', '', [])
3965 for v in vals.keys():
3966 if v in self._inherit_fields:
3967 (table, col, col_detail, original_parent) = self._inherit_fields[v]
3968 tocreate[table][v] = vals[v]
3971 if (v not in self._inherit_fields) and (v not in self._columns):
3974 # Try-except added to filter the creation of those records whose filds are readonly.
3975 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
3977 cr.execute("SELECT nextval('"+self._sequence+"')")
3979 raise except_orm(_('UserError'),
3980 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
3982 id_new = cr.fetchone()[0]
3983 for table in tocreate:
3984 if self._inherits[table] in vals:
3985 del vals[self._inherits[table]]
3987 record_id = tocreate[table].pop('id', None)
3989 if record_id is None or not record_id:
3990 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
3992 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
3994 upd0 += ',' + self._inherits[table]
3996 upd2.append(record_id)
3998 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
3999 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
4001 for bool_field in bool_fields:
4002 if bool_field not in vals:
4003 vals[bool_field] = False
4005 for field in vals.copy():
4007 if field in self._columns:
4008 fobj = self._columns[field]
4010 fobj = self._inherit_fields[field][2]
4016 for group in groups:
4017 module = group.split(".")[0]
4018 grp = group.split(".")[1]
4019 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
4020 (grp, module, 'res.groups', user))
4021 readonly = cr.fetchall()
4022 if readonly[0][0] >= 1:
4025 elif readonly[0][0] == 0:
4033 if self._columns[field]._classic_write:
4034 upd0 = upd0 + ',"' + field + '"'
4035 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
4036 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
4038 if not isinstance(self._columns[field], fields.related):
4039 upd_todo.append(field)
4040 if field in self._columns \
4041 and hasattr(self._columns[field], 'selection') \
4043 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4044 if self._log_access:
4045 upd0 += ',create_uid,create_date'
4048 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
4049 self.check_access_rule(cr, user, [id_new], 'create', context=context)
4050 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4052 if self._parent_store and not context.get('defer_parent_store_computation'):
4054 self.pool._init_parent[self._name] = True
4056 parent = vals.get(self._parent_name, False)
4058 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
4060 result_p = cr.fetchall()
4061 for (pleft,) in result_p:
4066 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
4067 pleft_old = cr.fetchone()[0]
4070 cr.execute('select max(parent_right) from '+self._table)
4071 pleft = cr.fetchone()[0] or 0
4072 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
4073 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
4074 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4076 # default element in context must be remove when call a one2many or many2many
4077 rel_context = context.copy()
4078 for c in context.items():
4079 if c[0].startswith('default_'):
4080 del rel_context[c[0]]
4083 for field in upd_todo:
4084 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4085 self._validate(cr, user, [id_new], context)
4087 if not context.get('no_store_function', False):
4088 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
4091 for order, object, ids, fields2 in result:
4092 if not (object, ids, fields2) in done:
4093 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
4094 done.append((object, ids, fields2))
4096 if self._log_create and not (context and context.get('no_store_function', False)):
4097 message = self._description + \
4099 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4100 "' " + _("created.")
4101 self.log(cr, user, id_new, message, True, context=context)
4102 wf_service = netsvc.LocalService("workflow")
4103 wf_service.trg_create(user, self._name, id_new, cr)
4106 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
4107 """Fetch records as objects allowing to use dot notation to browse fields and relations
4109 :param cr: database cursor
4110 :param user: current user id
4111 :param select: id or list of ids.
4112 :param context: context arguments, like lang, time zone
4113 :rtype: object or list of objects requested
4116 self._list_class = list_class or browse_record_list
4118 # need to accepts ints and longs because ids coming from a method
4119 # launched by button in the interface have a type long...
4120 if isinstance(select, (int, long)):
4121 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
4122 elif isinstance(select, list):
4123 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
4125 return browse_null()
4127 def _store_get_values(self, cr, uid, ids, fields, context):
4128 """Returns an ordered list of fields.functions to call due to
4129 an update operation on ``fields`` of records with ``ids``,
4130 obtained by calling the 'store' functions of these fields,
4131 as setup by their 'store' attribute.
4133 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4135 if fields is None: fields = []
4136 stored_functions = self.pool._store_function.get(self._name, [])
4138 # use indexed names for the details of the stored_functions:
4139 model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
4141 # only keep functions that should be triggered for the ``fields``
4143 to_compute = [f for f in stored_functions \
4144 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
4147 for function in to_compute:
4148 # use admin user for accessing objects having rules defined on store fields
4149 target_ids = [id for id in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id]
4151 # the compound key must consider the priority and model name
4152 key = (function[priority_], function[model_name_])
4153 for target_id in target_ids:
4154 mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
4156 # Here mapping looks like:
4157 # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
4158 # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
4159 # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
4162 # Now we need to generate the batch function calls list
4164 # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
4166 for ((priority,model), id_map) in mapping.iteritems():
4167 functions_ids_maps = {}
4168 # function_ids_maps =
4169 # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
4170 for id, functions in id_map.iteritems():
4171 functions_ids_maps.setdefault(tuple(functions), []).append(id)
4172 for functions, ids in functions_ids_maps.iteritems():
4173 call_map.setdefault((priority,model),[]).append((priority, model, ids,
4174 [f[func_field_to_compute_] for f in functions]))
4175 ordered_keys = call_map.keys()
4179 result = reduce(operator.add, (call_map[k] for k in ordered_keys))
4182 def _store_set_values(self, cr, uid, ids, fields, context):
4183 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4184 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4189 if self._log_access:
4190 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4194 field_dict.setdefault(r[0], [])
4195 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4196 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4197 for i in self.pool._store_function.get(self._name, []):
4199 up_write_date = write_date + datetime.timedelta(hours=i[5])
4200 if datetime.datetime.now() < up_write_date:
4202 field_dict[r[0]].append(i[1])
4208 if self._columns[f]._multi not in keys:
4209 keys.append(self._columns[f]._multi)
4210 todo.setdefault(self._columns[f]._multi, [])
4211 todo[self._columns[f]._multi].append(f)
4215 # use admin user for accessing objects having rules defined on store fields
4216 result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
4217 for id, value in result.items():
4219 for f in value.keys():
4220 if f in field_dict[id]:
4227 if self._columns[v]._type in ('many2one', 'one2one'):
4229 value[v] = value[v][0]
4232 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4233 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4236 cr.execute('update "' + self._table + '" set ' + \
4237 ','.join(upd0) + ' where id = %s', upd1)
4241 # use admin user for accessing objects having rules defined on store fields
4242 result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
4243 for r in result.keys():
4245 if r in field_dict.keys():
4246 if f in field_dict[r]:
4248 for id, value in result.items():
4249 if self._columns[f]._type in ('many2one', 'one2one'):
4254 cr.execute('update "' + self._table + '" set ' + \
4255 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4261 def perm_write(self, cr, user, ids, fields, context=None):
4262 raise NotImplementedError(_('This method does not exist anymore'))
4264 # TODO: ameliorer avec NULL
4265 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4266 """Computes the WHERE clause needed to implement an OpenERP domain.
4267 :param domain: the domain to compute
4269 :param active_test: whether the default filtering of records with ``active``
4270 field set to ``False`` should be applied.
4271 :return: the query expressing the given domain as provided in domain
4272 :rtype: osv.query.Query
4277 # if the object has a field named 'active', filter out all inactive
4278 # records unless they were explicitely asked for
4279 if 'active' in self._columns and (active_test and context.get('active_test', True)):
4281 active_in_args = False
4283 if a[0] == 'active':
4284 active_in_args = True
4285 if not active_in_args:
4286 domain.insert(0, ('active', '=', 1))
4288 domain = [('active', '=', 1)]
4291 e = expression.expression(cr, user, domain, self, context)
4292 tables = e.get_tables()
4293 where_clause, where_params = e.to_sql()
4294 where_clause = where_clause and [where_clause] or []
4296 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4298 return Query(tables, where_clause, where_params)
4300 def _check_qorder(self, word):
4301 if not regex_order.match(word):
4302 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4305 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4306 """Add what's missing in ``query`` to implement all appropriate ir.rules
4307 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4309 :param query: the current query object
4311 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4313 if parent_model and child_object:
4314 # as inherited rules are being applied, we need to add the missing JOIN
4315 # to reach the parent table (if it was not JOINed yet in the query)
4316 child_object._inherits_join_add(child_object, parent_model, query)
4317 query.where_clause += added_clause
4318 query.where_clause_params += added_params
4319 for table in added_tables:
4320 if table not in query.tables:
4321 query.tables.append(table)
4325 # apply main rules on the object
4326 rule_obj = self.pool.get('ir.rule')
4327 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
4329 # apply ir.rules from the parents (through _inherits)
4330 for inherited_model in self._inherits:
4331 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
4332 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
4334 def _generate_m2o_order_by(self, order_field, query):
4336 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4337 either native m2o fields or function/related fields that are stored, including
4338 intermediate JOINs for inheritance if required.
4340 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4342 if order_field not in self._columns and order_field in self._inherit_fields:
4343 # also add missing joins for reaching the table containing the m2o field
4344 qualified_field = self._inherits_join_calc(order_field, query)
4345 order_field_column = self._inherit_fields[order_field][2]
4347 qualified_field = '"%s"."%s"' % (self._table, order_field)
4348 order_field_column = self._columns[order_field]
4350 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4351 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4352 logging.getLogger('orm.search').debug("Many2one function/related fields must be stored " \
4353 "to be used as ordering fields! Ignoring sorting for %s.%s",
4354 self._name, order_field)
4357 # figure out the applicable order_by for the m2o
4358 dest_model = self.pool.get(order_field_column._obj)
4359 m2o_order = dest_model._order
4360 if not regex_order.match(m2o_order):
4361 # _order is complex, can't use it here, so we default to _rec_name
4362 m2o_order = dest_model._rec_name
4364 # extract the field names, to be able to qualify them and add desc/asc
4366 for order_part in m2o_order.split(","):
4367 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
4368 m2o_order = m2o_order_list
4370 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4371 # as we don't want to exclude results that have NULL values for the m2o
4372 src_table, src_field = qualified_field.replace('"','').split('.', 1)
4373 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
4374 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
4375 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4378 def _generate_order_by(self, order_spec, query):
4380 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4381 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4383 :raise" except_orm in case order_spec is malformed
4385 order_by_clause = self._order
4387 order_by_elements = []
4388 self._check_qorder(order_spec)
4389 for order_part in order_spec.split(','):
4390 order_split = order_part.strip().split(' ')
4391 order_field = order_split[0].strip()
4392 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4394 if order_field == 'id':
4395 order_by_clause = '"%s"."%s"' % (self._table, order_field)
4396 elif order_field in self._columns:
4397 order_column = self._columns[order_field]
4398 if order_column._classic_read:
4399 inner_clause = '"%s"."%s"' % (self._table, order_field)
4400 elif order_column._type == 'many2one':
4401 inner_clause = self._generate_m2o_order_by(order_field, query)
4403 continue # ignore non-readable or "non-joinable" fields
4404 elif order_field in self._inherit_fields:
4405 parent_obj = self.pool.get(self._inherit_fields[order_field][3])
4406 order_column = parent_obj._columns[order_field]
4407 if order_column._classic_read:
4408 inner_clause = self._inherits_join_calc(order_field, query)
4409 elif order_column._type == 'many2one':
4410 inner_clause = self._generate_m2o_order_by(order_field, query)
4412 continue # ignore non-readable or "non-joinable" fields
4414 if isinstance(inner_clause, list):
4415 for clause in inner_clause:
4416 order_by_elements.append("%s %s" % (clause, order_direction))
4418 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4419 if order_by_elements:
4420 order_by_clause = ",".join(order_by_elements)
4422 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4424 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4426 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4427 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4428 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4429 This is ok at the security level because this method is private and not callable through XML-RPC.
4431 :param access_rights_uid: optional user ID to use when checking access rights
4432 (not for ir.rules, this is only for ir.model.access)
4436 self.check_read(cr, access_rights_uid or user)
4438 # For transient models, restrict acces to the current user, except for the super-user
4439 if self.is_transient() and self._log_access and user != SUPERUSER_ID:
4440 args = expression.AND(([('create_uid', '=', user)], args or []))
4442 query = self._where_calc(cr, user, args, context=context)
4443 self._apply_ir_rules(cr, user, query, 'read', context=context)
4444 order_by = self._generate_order_by(order, query)
4445 from_clause, where_clause, where_clause_params = query.get_sql()
4447 limit_str = limit and ' limit %d' % limit or ''
4448 offset_str = offset and ' offset %d' % offset or ''
4449 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4452 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4455 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4457 return [x[0] for x in res]
4459 # returns the different values ever entered for one field
4460 # this is used, for example, in the client when the user hits enter on
4462 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4465 if field in self._inherit_fields:
4466 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4468 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4470 def copy_data(self, cr, uid, id, default=None, context=None):
4472 Copy given record's data with all its fields values
4474 :param cr: database cursor
4475 :param user: current user id
4476 :param id: id of the record to copy
4477 :param default: field values to override in the original values of the copied record
4478 :type default: dictionary
4479 :param context: context arguments, like lang, time zone
4480 :type context: dictionary
4481 :return: dictionary containing all the field values
4487 # avoid recursion through already copied records in case of circular relationship
4488 seen_map = context.setdefault('__copy_data_seen',{})
4489 if id in seen_map.setdefault(self._name,[]):
4491 seen_map[self._name].append(id)
4495 if 'state' not in default:
4496 if 'state' in self._defaults:
4497 if callable(self._defaults['state']):
4498 default['state'] = self._defaults['state'](self, cr, uid, context)
4500 default['state'] = self._defaults['state']
4502 context_wo_lang = context.copy()
4503 if 'lang' in context:
4504 del context_wo_lang['lang']
4505 data = self.read(cr, uid, [id,], context=context_wo_lang)
4509 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4511 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4512 fields = self.fields_get(cr, uid, context=context)
4514 ftype = fields[f]['type']
4516 if self._log_access and f in LOG_ACCESS_COLUMNS:
4520 data[f] = default[f]
4521 elif 'function' in fields[f]:
4523 elif ftype == 'many2one':
4525 data[f] = data[f] and data[f][0]
4528 elif ftype in ('one2many', 'one2one'):
4530 rel = self.pool.get(fields[f]['relation'])
4532 # duplicate following the order of the ids
4533 # because we'll rely on it later for copying
4534 # translations in copy_translation()!
4536 for rel_id in data[f]:
4537 # the lines are first duplicated using the wrong (old)
4538 # parent but then are reassigned to the correct one thanks
4539 # to the (0, 0, ...)
4540 d = rel.copy_data(cr, uid, rel_id, context=context)
4542 res.append((0, 0, d))
4544 elif ftype == 'many2many':
4545 data[f] = [(6, 0, data[f])]
4549 # make sure we don't break the current parent_store structure and
4550 # force a clean recompute!
4551 for parent_column in ['parent_left', 'parent_right']:
4552 data.pop(parent_column, None)
4553 # Remove _inherits field's from data recursively, missing parents will
4554 # be created by create() (so that copy() copy everything).
4555 def remove_ids(inherits_dict):
4556 for parent_table in inherits_dict:
4557 del data[inherits_dict[parent_table]]
4558 remove_ids(self.pool.get(parent_table)._inherits)
4559 remove_ids(self._inherits)
4562 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4566 # avoid recursion through already copied records in case of circular relationship
4567 seen_map = context.setdefault('__copy_translations_seen',{})
4568 if old_id in seen_map.setdefault(self._name,[]):
4570 seen_map[self._name].append(old_id)
4572 trans_obj = self.pool.get('ir.translation')
4573 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4574 fields = self.fields_get(cr, uid, context=context)
4576 translation_records = []
4577 for field_name, field_def in fields.items():
4578 # we must recursively copy the translations for o2o and o2m
4579 if field_def['type'] in ('one2one', 'one2many'):
4580 target_obj = self.pool.get(field_def['relation'])
4581 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4582 # here we rely on the order of the ids to match the translations
4583 # as foreseen in copy_data()
4584 old_children = sorted(old_record[field_name])
4585 new_children = sorted(new_record[field_name])
4586 for (old_child, new_child) in zip(old_children, new_children):
4587 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4588 # and for translatable fields we keep them for copy
4589 elif field_def.get('translate'):
4591 if field_name in self._columns:
4592 trans_name = self._name + "," + field_name
4593 elif field_name in self._inherit_fields:
4594 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4596 trans_ids = trans_obj.search(cr, uid, [
4597 ('name', '=', trans_name),
4598 ('res_id', '=', old_id)
4600 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4602 for record in translation_records:
4604 record['res_id'] = new_id
4605 trans_obj.create(cr, uid, record, context=context)
4608 def copy(self, cr, uid, id, default=None, context=None):
4610 Duplicate record with given id updating it with default values
4612 :param cr: database cursor
4613 :param uid: current user id
4614 :param id: id of the record to copy
4615 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4616 :type default: dictionary
4617 :param context: context arguments, like lang, time zone
4618 :type context: dictionary
4619 :return: id of the newly created record
4624 context = context.copy()
4625 data = self.copy_data(cr, uid, id, default, context)
4626 new_id = self.create(cr, uid, data, context)
4627 self.copy_translations(cr, uid, id, new_id, context)
4630 def exists(self, cr, uid, ids, context=None):
4631 """Checks whether the given id or ids exist in this model,
4632 and return the list of ids that do. This is simple to use for
4633 a truth test on a browse_record::
4638 :param ids: id or list of ids to check for existence
4639 :type ids: int or [int]
4640 :return: the list of ids that currently exist, out of
4643 if type(ids) in (int, long):
4645 query = 'SELECT id FROM "%s"' % (self._table)
4646 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4647 return [x[0] for x in cr.fetchall()]
4649 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4650 warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4651 self._name, DeprecationWarning, stacklevel=3)
4652 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4653 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4654 return self._check_recursion(cr, uid, ids, context, parent)
4656 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4658 Verifies that there is no loop in a hierarchical structure of records,
4659 by following the parent relationship using the **parent** field until a loop
4660 is detected or until a top-level record is found.
4662 :param cr: database cursor
4663 :param uid: current user id
4664 :param ids: list of ids of records to check
4665 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4666 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4670 parent = self._parent_name
4672 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
4675 for i in range(0, len(ids), cr.IN_MAX):
4676 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
4677 cr.execute(query, (tuple(sub_ids_parent),))
4678 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
4679 ids_parent = ids_parent2
4680 for i in ids_parent:
4685 def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
4686 """Retrieve the External ID(s) of any database record.
4688 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
4690 :return: map of ids to the list of their fully qualified External IDs
4691 in the form ``module.key``, or an empty list when there's no External
4692 ID for a record, e.g.::
4694 { 'id': ['module.ext_id', 'module.ext_id_bis'],
4697 ir_model_data = self.pool.get('ir.model.data')
4698 data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
4699 data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
4702 # can't use dict.fromkeys() as the list would be shared!
4704 for record in data_results:
4705 result[record['res_id']].append('%(module)s.%(name)s' % record)
4708 def get_external_id(self, cr, uid, ids, *args, **kwargs):
4709 """Retrieve the External ID of any database record, if there
4710 is one. This method works as a possible implementation
4711 for a function field, to be able to add it to any
4712 model object easily, referencing it as ``Model.get_external_id``.
4714 When multiple External IDs exist for a record, only one
4715 of them is returned (randomly).
4717 :return: map of ids to their fully qualified XML ID,
4718 defaulting to an empty string when there's none
4719 (to be usable as a function field),
4722 { 'id': 'module.ext_id',
4725 results = self._get_xml_ids(cr, uid, ids)
4726 for k, v in results.iteritems():
4733 # backwards compatibility
4734 get_xml_id = get_external_id
4735 _get_xml_ids = _get_external_ids
4738 def is_transient(self):
4739 """ Return whether the model is transient.
4744 return self._transient
4746 def _transient_clean_rows_older_than(self, cr, seconds):
4747 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
4748 cr.execute("SELECT id FROM " + self._table + " WHERE"
4749 " COALESCE(write_date, create_date, now())::timestamp <"
4750 " (now() - interval %s)", ("%s seconds" % seconds,))
4751 ids = [x[0] for x in cr.fetchall()]
4752 self.unlink(cr, SUPERUSER_ID, ids)
4754 def _transient_clean_old_rows(self, cr, count):
4755 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
4757 "SELECT id, COALESCE(write_date, create_date, now())::timestamp"
4758 " AS t FROM " + self._table +
4759 " ORDER BY t LIMIT %s", (count,))
4760 ids = [x[0] for x in cr.fetchall()]
4761 self.unlink(cr, SUPERUSER_ID, ids)
4763 def _transient_vacuum(self, cr, uid, force=False):
4764 """Clean the transient records.
4766 This unlinks old records from the transient model tables whenever the
4767 "_transient_max_count" or "_max_age" conditions (if any) are reached.
4768 Actual cleaning will happen only once every "_transient_check_time" calls.
4769 This means this method can be called frequently called (e.g. whenever
4770 a new record is created).
4772 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
4773 self._transient_check_count += 1
4774 if (not force) and (self._transient_check_count % self._transient_check_time):
4775 self._transient_check_count = 0
4778 # Age-based expiration
4779 if self._transient_max_hours:
4780 self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
4782 # Count-based expiration
4783 if self._transient_max_count:
4784 self._transient_clean_old_rows(cr, self._transient_max_count)
4788 def resolve_o2m_commands_to_record_dicts(self, cr, uid, field_name, o2m_commands, fields=None, context=None):
4789 """ Serializes o2m commands into record dictionaries (as if
4790 all the o2m records came from the database via a read()), and
4791 returns an iterable over these dictionaries.
4793 Because o2m commands might be creation commands, not all
4794 record ids will contain an ``id`` field. Commands matching an
4795 existing record (``UPDATE`` and ``LINK_TO``) will have an id.
4797 .. note:: ``CREATE``, ``UPDATE`` and ``LINK_TO`` stand for the
4798 o2m command codes ``0``, ``1`` and ``4``
4801 :param field_name: name of the o2m field matching the commands
4802 :type field_name: str
4803 :param o2m_commands: one2many commands to execute on ``field_name``
4804 :type o2m_commands: list((int|False, int|False, dict|False))
4805 :param fields: list of fields to read from the database, when applicable
4806 :type fields: list(str)
4807 :raises AssertionError: if a command is not ``CREATE``, ``UPDATE`` or ``LINK_TO``
4808 :returns: o2m records in a shape similar to that returned by
4809 ``read()`` (except records may be missing the ``id``
4810 field if they don't exist in db)
4811 :rtype: ``list(dict)``
4813 o2m_model = self._all_columns[field_name].column._obj
4815 # convert single ids and pairs to tripled commands
4817 for o2m_command in o2m_commands:
4818 if not isinstance(o2m_command, (list, tuple)):
4820 commands.append((command, o2m_command, False))
4821 elif len(o2m_command) == 1:
4822 (command,) = o2m_command
4823 commands.append((command, False, False))
4824 elif len(o2m_command) == 2:
4825 command, id = o2m_command
4826 commands.append((command, id, False))
4828 command = o2m_command[0]
4829 commands.append(o2m_command)
4830 assert command in (0, 1, 4), \
4831 "Only CREATE, UPDATE and LINK_TO commands are supported in resolver"
4833 # extract records to read, by id, in a mapping dict
4834 ids_to_read = [id for (command, id, _) in commands if command in (1, 4)]
4835 records_by_id = dict(
4836 (record['id'], record)
4837 for record in self.pool.get(o2m_model).read(
4838 cr, uid, ids_to_read, fields=fields, context=context))
4841 # merge record from db with record provided by command
4842 for command, id, record in commands:
4844 if command in (1, 4): item.update(records_by_id[id])
4845 if command in (0, 1): item.update(record)
4846 record_dicts.append(item)
4849 # keep this import here, at top it will cause dependency cycle errors
4852 class Model(BaseModel):
4853 """Main super-class for regular database-persisted OpenERP models.
4855 OpenERP models are created by inheriting from this class::
4860 The system will later instantiate the class once per database (on
4861 which the class' module is installed).
4863 _register = False # not visible in ORM registry, meant to be python-inherited only
4864 _transient = False # True in a TransientModel
4866 class TransientModel(BaseModel):
4867 """Model super-class for transient records, meant to be temporarily
4868 persisted, and regularly vaccuum-cleaned.
4870 A TransientModel has a simplified access rights management,
4871 all users can create new records, and may only access the
4872 records they created. The super-user has unrestricted access
4873 to all TransientModel records.
4875 _register = False # not visible in ORM registry, meant to be python-inherited only
4878 class AbstractModel(BaseModel):
4879 """Abstract Model super-class for creating an abstract class meant to be
4880 inherited by regular models (Models or TransientModels) but not meant to
4881 be usable on its own, or persisted.
4883 Technical note: we don't want to make AbstractModel the super-class of
4884 Model or BaseModel because it would not make sense to put the main
4885 definition of persistence methods such as create() in it, and still we
4886 should be able to override them within an AbstractModel.
4888 _auto = False # don't create any database backend for AbstractModels
4889 _register = False # not visible in ORM registry, meant to be python-inherited only
4892 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: