1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
23 # Object relationnal mapping to postgresql module
24 # . Hierarchical structure
25 # . Constraints consistency, validations
26 # . Object meta Data depends on its status
27 # . Optimised processing by complex query (multiple actions at once)
28 # . Default fields value
29 # . Permissions optimisation
30 # . Persistant object: DB postgresql
32 # . Multi-level caching system
33 # . 2 different inheritancies
35 # - classicals (varchar, integer, boolean, ...)
36 # - relations (one2many, many2one, many2many)
52 import openerp.netsvc as netsvc
53 from lxml import etree
54 from openerp.tools.config import config
55 from openerp.tools.translate import _
58 from query import Query
59 import openerp.tools as tools
60 from openerp.tools.safe_eval import safe_eval as eval
62 # List of etree._Element subclasses that we choose to ignore when parsing XML.
63 from openerp.tools import SKIPPED_ELEMENT_TYPES
65 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
66 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
68 # Mapping between openerp module names and their osv classes.
69 module_class_list = {}
71 # Super-user identifier (aka Administrator aka root)
74 def check_object_name(name):
75 """ Check if the given name is a valid openerp object name.
77 The _name attribute in osv and osv_memory object is subject to
78 some restrictions. This function returns True or False whether
79 the given name is allowed or not.
81 TODO: this is an approximation. The goal in this approximation
82 is to disallow uppercase characters (in some places, we quote
83 table/column names and in other not, which leads to this kind
86 psycopg2.ProgrammingError: relation "xxx" does not exist).
88 The same restriction should apply to both osv and osv_memory
89 objects for consistency.
92 if regex_object_name.match(name) is None:
96 def raise_on_invalid_object_name(name):
97 if not check_object_name(name):
98 msg = "The _name attribute %s is not valid." % name
99 logger = netsvc.Logger()
100 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
101 raise except_orm('ValueError', msg)
103 POSTGRES_CONFDELTYPES = {
111 def last_day_of_current_month():
112 today = datetime.date.today()
113 last_day = str(calendar.monthrange(today.year, today.month)[1])
114 return time.strftime('%Y-%m-' + last_day)
116 def intersect(la, lb):
117 return filter(lambda x: x in lb, la)
119 class except_orm(Exception):
120 def __init__(self, name, value):
123 self.args = (name, value)
125 class BrowseRecordError(Exception):
128 # Readonly python database object browser
129 class browse_null(object):
134 def __getitem__(self, name):
137 def __getattr__(self, name):
138 return None # XXX: return self ?
146 def __nonzero__(self):
149 def __unicode__(self):
154 # TODO: execute an object method on browse_record_list
156 class browse_record_list(list):
158 def __init__(self, lst, context=None):
161 super(browse_record_list, self).__init__(lst)
162 self.context = context
165 class browse_record(object):
166 logger = netsvc.Logger()
168 def __init__(self, cr, uid, id, table, cache, context=None, list_class=None, fields_process=None):
170 table : the object (inherited from orm)
171 context : dictionary with an optional context
173 if fields_process is None:
177 self._list_class = list_class or browse_record_list
182 self._table_name = self._table._name
183 self.__logger = logging.getLogger(
184 'osv.browse_record.' + self._table_name)
185 self._context = context
186 self._fields_process = fields_process
188 cache.setdefault(table._name, {})
189 self._data = cache[table._name]
191 if not (id and isinstance(id, (int, long,))):
192 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
193 # if not table.exists(cr, uid, id, context):
194 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
196 if id not in self._data:
197 self._data[id] = {'id': id}
201 def __getitem__(self, name):
205 if name not in self._data[self._id]:
206 # build the list of fields we will fetch
208 # fetch the definition of the field which was asked for
209 if name in self._table._columns:
210 col = self._table._columns[name]
211 elif name in self._table._inherit_fields:
212 col = self._table._inherit_fields[name][2]
213 elif hasattr(self._table, str(name)):
214 attr = getattr(self._table, name)
216 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
217 return lambda *args, **argv: attr(self._cr, self._uid, [self._id], *args, **argv)
221 self.logger.notifyChannel("browse_record", netsvc.LOG_WARNING,
222 "Field '%s' does not exist in object '%s': \n%s" % (
223 name, self, ''.join(traceback.format_exc())))
224 raise KeyError("Field '%s' does not exist in object '%s'" % (
227 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
229 # gen the list of "local" (ie not inherited) fields which are classic or many2one
230 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
231 # gen the list of inherited fields
232 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
233 # complete the field list with the inherited fields which are classic or many2one
234 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
235 # otherwise we fetch only that field
237 fields_to_fetch = [(name, col)]
238 ids = filter(lambda id: name not in self._data[id], self._data.keys())
240 field_names = map(lambda x: x[0], fields_to_fetch)
241 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
243 # TODO: improve this, very slow for reports
244 if self._fields_process:
245 lang = self._context.get('lang', 'en_US') or 'en_US'
246 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
248 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
249 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
251 for field_name, field_column in fields_to_fetch:
252 if field_column._type in self._fields_process:
253 for result_line in field_values:
254 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
255 if result_line[field_name]:
256 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
259 # Where did those ids come from? Perhaps old entries in ir_model_dat?
260 self.__logger.warn("No field_values found for ids %s in %s", ids, self)
261 raise KeyError('Field %s not found in %s'%(name, self))
262 # create browse records for 'remote' objects
263 for result_line in field_values:
265 for field_name, field_column in fields_to_fetch:
266 if field_column._type in ('many2one', 'one2one'):
267 if result_line[field_name]:
268 obj = self._table.pool.get(field_column._obj)
269 if isinstance(result_line[field_name], (list, tuple)):
270 value = result_line[field_name][0]
272 value = result_line[field_name]
274 # FIXME: this happen when a _inherits object
275 # overwrite a field of it parent. Need
276 # testing to be sure we got the right
277 # object and not the parent one.
278 if not isinstance(value, browse_record):
280 # In some cases the target model is not available yet, so we must ignore it,
281 # which is safe in most cases, this value will just be loaded later when needed.
282 # This situation can be caused by custom fields that connect objects with m2o without
283 # respecting module dependencies, causing relationships to be connected to soon when
284 # the target is not loaded yet.
286 new_data[field_name] = browse_record(self._cr,
287 self._uid, value, obj, self._cache,
288 context=self._context,
289 list_class=self._list_class,
290 fields_process=self._fields_process)
292 new_data[field_name] = value
294 new_data[field_name] = browse_null()
296 new_data[field_name] = browse_null()
297 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
298 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
299 elif field_column._type in ('reference'):
300 if result_line[field_name]:
301 if isinstance(result_line[field_name], browse_record):
302 new_data[field_name] = result_line[field_name]
304 ref_obj, ref_id = result_line[field_name].split(',')
305 ref_id = long(ref_id)
307 obj = self._table.pool.get(ref_obj)
308 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
310 new_data[field_name] = browse_null()
312 new_data[field_name] = browse_null()
314 new_data[field_name] = result_line[field_name]
315 self._data[result_line['id']].update(new_data)
317 if not name in self._data[self._id]:
318 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
319 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
320 "Fields to fetch: %s, Field values: %s"%(field_names, field_values))
321 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
322 "Cached: %s, Table: %s"%(self._data[self._id], self._table))
323 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
324 return self._data[self._id][name]
326 def __getattr__(self, name):
330 raise AttributeError(e)
332 def __contains__(self, name):
333 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
335 def __hasattr__(self, name):
342 return "browse_record(%s, %d)" % (self._table_name, self._id)
344 def __eq__(self, other):
345 if not isinstance(other, browse_record):
347 return (self._table_name, self._id) == (other._table_name, other._id)
349 def __ne__(self, other):
350 if not isinstance(other, browse_record):
352 return (self._table_name, self._id) != (other._table_name, other._id)
354 # we need to define __unicode__ even though we've already defined __str__
355 # because we have overridden __getattr__
356 def __unicode__(self):
357 return unicode(str(self))
360 return hash((self._table_name, self._id))
368 (type returned by postgres when the column was created, type expression to create the column)
372 fields.boolean: 'bool',
373 fields.integer: 'int4',
374 fields.integer_big: 'int8',
378 fields.datetime: 'timestamp',
379 fields.binary: 'bytea',
380 fields.many2one: 'int4',
382 if type(f) in type_dict:
383 f_type = (type_dict[type(f)], type_dict[type(f)])
384 elif isinstance(f, fields.float):
386 f_type = ('numeric', 'NUMERIC')
388 f_type = ('float8', 'DOUBLE PRECISION')
389 elif isinstance(f, (fields.char, fields.reference)):
390 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
391 elif isinstance(f, fields.selection):
392 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
393 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
394 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
397 f_size = getattr(f, 'size', None) or 16
400 f_type = ('int4', 'INTEGER')
402 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
403 elif isinstance(f, fields.function) and eval('fields.'+(f._type), globals()) in type_dict:
404 t = eval('fields.'+(f._type), globals())
405 f_type = (type_dict[t], type_dict[t])
406 elif isinstance(f, fields.function) and f._type == 'float':
408 f_type = ('numeric', 'NUMERIC')
410 f_type = ('float8', 'DOUBLE PRECISION')
411 elif isinstance(f, fields.function) and f._type == 'selection':
412 f_type = ('text', 'text')
413 elif isinstance(f, fields.function) and f._type == 'char':
414 f_type = ('varchar', 'VARCHAR(%d)' % (f.size))
416 logger = netsvc.Logger()
417 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
422 class MetaModel(type):
423 """ Metaclass for the Model.
425 This class is used as the metaclass for the Model class to discover
426 the models defined in a module (i.e. without instanciating them).
427 If the automatic discovery is not needed, it is possible to set the
428 model's _register attribute to False.
432 module_to_models = {}
434 def __init__(self, name, bases, attrs):
435 if not self._register:
436 self._register = True
437 super(MetaModel, self).__init__(name, bases, attrs)
440 module_name = self.__module__.split('.')[0]
441 if not hasattr(self, '_module'):
442 self._module = module_name
444 # Remember which models to instanciate for this module.
445 self.module_to_models.setdefault(self._module, []).append(self)
448 class orm_template(object):
449 """ Base class for OpenERP models.
451 OpenERP models are created by inheriting from this class (although
452 not directly; more specifically by inheriting from osv or
453 osv_memory). The constructor is called once, usually directly
454 after the class definition, e.g.:
460 The system will later instanciate the class once per database (on
461 which the class' module is installed).
469 _parent_name = 'parent_id'
470 _parent_store = False
471 _parent_order = False
477 # Mapping from inherits'd field name to triple (m, r, f)
478 # where m is the model from which it is inherits'd,
479 # r is the (local) field towards m,
480 # and f is the _column object itself.
482 # Mapping field name/column_info object
483 # This is similar to _inherit_fields but:
484 # 1. includes self fields,
485 # 2. uses column_info instead of a triple.
491 CONCURRENCY_CHECK_FIELD = '__last_update'
492 def log(self, cr, uid, id, message, secondary=False, context=None):
493 return self.pool.get('res.log').create(cr, uid,
496 'res_model': self._name,
497 'secondary': secondary,
503 def view_init(self, cr, uid, fields_list, context=None):
504 """Override this method to do specific things when a view on the object is opened."""
507 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
508 raise NotImplementedError(_('The read_group method is not implemented on this object !'))
510 def _field_create(self, cr, context=None):
511 """ Create entries in ir_model_fields for all the model's fields.
513 If necessary, also create an entry in ir_model, and if called from the
514 modules loading scheme (by receiving 'module' in the context), also
515 create entries in ir_model_data (for the model and the fields).
517 - create an entry in ir_model (if there is not already one),
518 - create an entry in ir_model_data (if there is not already one, and if
519 'module' is in the context),
520 - update ir_model_fields with the fields found in _columns
521 (TODO there is some redundancy as _columns is updated from
522 ir_model_fields in __init__).
527 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
529 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
530 model_id = cr.fetchone()[0]
531 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
533 model_id = cr.fetchone()[0]
534 if 'module' in context:
535 name_id = 'model_'+self._name.replace('.', '_')
536 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
538 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
539 (name_id, context['module'], 'ir.model', model_id)
544 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
546 for rec in cr.dictfetchall():
547 cols[rec['name']] = rec
549 for (k, f) in self._columns.items():
551 'model_id': model_id,
554 'field_description': f.string.replace("'", " "),
556 'relation': f._obj or '',
557 'view_load': (f.view_load and 1) or 0,
558 'select_level': tools.ustr(f.select or 0),
559 'readonly': (f.readonly and 1) or 0,
560 'required': (f.required and 1) or 0,
561 'selectable': (f.selectable and 1) or 0,
562 'translate': (f.translate and 1) or 0,
563 'relation_field': (f._type=='one2many' and isinstance(f, fields.one2many)) and f._fields_id or '',
565 # When its a custom field,it does not contain f.select
566 if context.get('field_state', 'base') == 'manual':
567 if context.get('field_name', '') == k:
568 vals['select_level'] = context.get('select', '0')
569 #setting value to let the problem NOT occur next time
571 vals['select_level'] = cols[k]['select_level']
574 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
575 id = cr.fetchone()[0]
577 cr.execute("""INSERT INTO ir_model_fields (
578 id, model_id, model, name, field_description, ttype,
579 relation,view_load,state,select_level,relation_field, translate
581 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
583 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
584 vals['relation'], bool(vals['view_load']), 'base',
585 vals['select_level'], vals['relation_field'], bool(vals['translate'])
587 if 'module' in context:
588 name1 = 'field_' + self._table + '_' + k
589 cr.execute("select name from ir_model_data where name=%s", (name1,))
591 name1 = name1 + "_" + str(id)
592 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
593 (name1, context['module'], 'ir.model.fields', id)
596 for key, val in vals.items():
597 if cols[k][key] != vals[key]:
598 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
600 cr.execute("""UPDATE ir_model_fields SET
601 model_id=%s, field_description=%s, ttype=%s, relation=%s,
602 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s
604 model=%s AND name=%s""", (
605 vals['model_id'], vals['field_description'], vals['ttype'],
606 vals['relation'], bool(vals['view_load']),
607 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['model'], vals['name']
612 def _auto_init(self, cr, context=None):
613 raise_on_invalid_object_name(self._name)
614 self._field_create(cr, context=context)
616 def _auto_end(self, cr, context=None):
620 # Goal: try to apply inheritance at the instanciation level and
621 # put objects in the pool var
624 def makeInstance(cls, pool, cr, attributes):
625 """ Instanciate a given model.
627 This class method instanciates the class of some model (i.e. a class
628 deriving from osv or osv_memory). The class might be the class passed
629 in argument or, if it inherits from another class, a class constructed
630 by combining the two classes.
632 The ``attributes`` argument specifies which parent class attributes
635 TODO: the creation of the combined class is repeated at each call of
636 this method. This is probably unnecessary.
639 parent_names = getattr(cls, '_inherit', None)
641 if isinstance(parent_names, (str, unicode)):
642 name = cls._name or parent_names
643 parent_names = [parent_names]
648 raise TypeError('_name is mandatory in case of multiple inheritance')
650 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
651 parent_class = pool.get(parent_name).__class__
652 if not pool.get(parent_name):
653 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
654 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
657 new = copy.copy(getattr(pool.get(parent_name), s))
659 # Don't _inherit custom fields.
663 if hasattr(new, 'update'):
664 new.update(cls.__dict__.get(s, {}))
665 elif s=='_constraints':
666 for c in cls.__dict__.get(s, []):
668 for c2 in range(len(new)):
669 #For _constraints, we should check field and methods as well
670 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
671 or getattr(new[c2][0],'__name__', True) == \
672 getattr(c[0],'__name__', False)):
673 # If new class defines a constraint with
674 # same function name, we let it override
682 new.extend(cls.__dict__.get(s, []))
684 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
685 obj = object.__new__(cls)
686 obj.__init__(pool, cr)
690 """ Register this model.
692 This doesn't create an instance but simply register the model
693 as being part of the module where it is defined.
695 TODO make it possible to not even have to call the constructor
700 # Set the module name (e.g. base, sale, accounting, ...) on the class.
701 module = cls.__module__.split('.')[0]
702 if not hasattr(cls, '_module'):
705 # Remember which models to instanciate for this module.
706 module_class_list.setdefault(cls._module, []).append(cls)
708 # Since we don't return an instance here, the __init__
709 # method won't be called.
712 def __init__(self, pool, cr):
713 """ Initialize a model and make it part of the given registry."""
714 pool.add(self._name, self)
717 if not self._name and not hasattr(self, '_inherit'):
718 name = type(self).__name__.split('.')[0]
719 msg = "The class %s has to have a _name attribute" % name
721 logger = netsvc.Logger()
722 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
723 raise except_orm('ValueError', msg)
725 if not self._description:
726 self._description = self._name
728 self._table = self._name.replace('.', '_')
730 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
731 """Fetch records as objects allowing to use dot notation to browse fields and relations
733 :param cr: database cursor
734 :param user: current user id
735 :param select: id or list of ids
736 :param context: context arguments, like lang, time zone
737 :rtype: object or list of objects requested
740 self._list_class = list_class or browse_record_list
742 # need to accepts ints and longs because ids coming from a method
743 # launched by button in the interface have a type long...
744 if isinstance(select, (int, long)):
745 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
746 elif isinstance(select, list):
747 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
751 def __export_row(self, cr, uid, row, fields, context=None):
755 def check_type(field_type):
756 if field_type == 'float':
758 elif field_type == 'integer':
760 elif field_type == 'boolean':
764 def selection_field(in_field):
765 col_obj = self.pool.get(in_field.keys()[0])
766 if f[i] in col_obj._columns.keys():
767 return col_obj._columns[f[i]]
768 elif f[i] in col_obj._inherits.keys():
769 selection_field(col_obj._inherits)
774 data = map(lambda x: '', range(len(fields)))
776 for fpos in range(len(fields)):
785 model_data = self.pool.get('ir.model.data')
786 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
788 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
790 r = '%s.%s' % (d['module'], d['name'])
797 # To display external name of selection field when its exported
799 if f[i] in self._columns.keys():
800 cols = self._columns[f[i]]
801 elif f[i] in self._inherit_fields.keys():
802 cols = selection_field(self._inherits)
803 if cols and cols._type == 'selection':
804 sel_list = cols.selection
805 if r and type(sel_list) == type([]):
806 r = [x[1] for x in sel_list if r==x[0]]
807 r = r and r[0] or False
809 if f[i] in self._columns:
810 r = check_type(self._columns[f[i]]._type)
811 elif f[i] in self._inherit_fields:
812 r = check_type(self._inherit_fields[f[i]][2]._type)
813 data[fpos] = r or False
815 if isinstance(r, (browse_record_list, list)):
817 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
820 if [x for x in fields2 if x]:
824 lines2 = self.__export_row(cr, uid, row2, fields2,
827 for fpos2 in range(len(fields)):
828 if lines2 and lines2[0][fpos2]:
829 data[fpos2] = lines2[0][fpos2]
833 name_relation = self.pool.get(rr._table_name)._rec_name
834 if isinstance(rr[name_relation], browse_record):
835 rr = rr[name_relation]
836 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
837 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
838 dt += tools.ustr(rr_name or '') + ','
848 if isinstance(r, browse_record):
849 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
850 r = r and r[0] and r[0][1] or ''
851 data[fpos] = tools.ustr(r or '')
852 return [data] + lines
854 def export_data(self, cr, uid, ids, fields_to_export, context=None):
856 Export fields for selected objects
858 :param cr: database cursor
859 :param uid: current user id
860 :param ids: list of ids
861 :param fields_to_export: list of fields
862 :param context: context arguments, like lang, time zone
863 :rtype: dictionary with a *datas* matrix
865 This method is used when exporting data via client menu
870 cols = self._columns.copy()
871 for f in self._inherit_fields:
872 cols.update({f: self._inherit_fields[f][2]})
874 if x=='.id': return [x]
875 return x.replace(':id','/id').replace('.id','/.id').split('/')
876 fields_to_export = map(fsplit, fields_to_export)
878 for row in self.browse(cr, uid, ids, context):
879 datas += self.__export_row(cr, uid, row, fields_to_export, context)
880 return {'datas': datas}
882 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
884 Import given data in given module
886 :param cr: database cursor
887 :param uid: current user id
888 :param fields: list of fields
889 :param data: data to import
890 :param mode: 'init' or 'update' for record creation
891 :param current_module: module name
892 :param noupdate: flag for record creation
893 :param context: context arguments, like lang, time zone,
894 :param filename: optional file to store partial import state for recovery
897 This method is used when importing data via client menu.
899 Example of fields to import for a sale.order::
902 partner_id, (=name_search)
903 order_line/.id, (=database_id)
905 order_line/product_id/id, (=xml id)
906 order_line/price_unit,
907 order_line/product_uom_qty,
908 order_line/product_uom/id (=xml_id)
912 def _replace_field(x):
913 x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
914 return x.replace(':id','/id').split('/')
915 fields = map(_replace_field, fields)
916 logger = netsvc.Logger()
917 ir_model_data_obj = self.pool.get('ir.model.data')
919 # mode: id (XML id) or .id (database id) or False for name_get
920 def _get_id(model_name, id, current_module=False, mode='id'):
923 obj_model = self.pool.get(model_name)
924 ids = obj_model.search(cr, uid, [('id', '=', int(id))])
926 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, id))
929 module, xml_id = id.rsplit('.', 1)
931 module, xml_id = current_module, id
932 record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
933 ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id'])
934 if not ir_model_data:
935 raise ValueError('No references to %s.%s' % (module, xml_id))
936 id = ir_model_data[0]['res_id']
938 obj_model = self.pool.get(model_name)
939 ids = obj_model.name_search(cr, uid, id, operator='=', context=context)
941 raise ValueError('No record found for %s' % (id,))
946 # datas: a list of records, each record is defined by a list of values
947 # prefix: a list of prefix fields ['line_ids']
948 # position: the line to process, skip is False if it's the first line of the current record
950 # (res, position, warning, res_id) with
951 # res: the record for the next line to process (including it's one2many)
952 # position: the new position for the next line
953 # res_id: the ID of the record if it's a modification
954 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0, skip=0):
955 line = datas[position]
963 for i in range(len(fields)):
966 raise Exception(_('Please check that all your lines have %d columns.'
967 'Stopped around line %d having %d columns.') % \
968 (len(fields), position+2, len(line)))
973 if field[:len(prefix)] <> prefix:
978 # ID of the record using a XML ID
979 if field[len(prefix)]=='id':
981 data_res_id = _get_id(model_name, line[i], current_module, 'id')
987 # ID of the record using a database ID
988 elif field[len(prefix)]=='.id':
989 data_res_id = _get_id(model_name, line[i], current_module, '.id')
992 # recursive call for getting children and returning [(0,0,{})] or [(1,ID,{})]
993 if fields_def[field[len(prefix)]]['type']=='one2many':
994 if field[len(prefix)] in done:
996 done[field[len(prefix)]] = True
997 relation_obj = self.pool.get(fields_def[field[len(prefix)]]['relation'])
998 newfd = relation_obj.fields_get( cr, uid, context=context )
1002 while pos < len(datas):
1003 res2 = process_liness(self, datas, prefix + [field[len(prefix)]], current_module, relation_obj._name, newfd, pos, first)
1006 (newrow, pos, w2, data_res_id2, xml_id2) = res2
1007 nbrmax = max(nbrmax, pos)
1010 if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
1012 res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
1014 elif fields_def[field[len(prefix)]]['type']=='many2one':
1015 relation = fields_def[field[len(prefix)]]['relation']
1016 if len(field) == len(prefix)+1:
1019 mode = field[len(prefix)+1]
1020 res = _get_id(relation, line[i], current_module, mode)
1022 elif fields_def[field[len(prefix)]]['type']=='many2many':
1023 relation = fields_def[field[len(prefix)]]['relation']
1024 if len(field) == len(prefix)+1:
1027 mode = field[len(prefix)+1]
1029 # TODO: improve this by using csv.csv_reader
1031 for db_id in line[i].split(config.get('csv_internal_sep')):
1032 res.append( _get_id(relation, db_id, current_module, mode) )
1035 elif fields_def[field[len(prefix)]]['type'] == 'integer':
1036 res = line[i] and int(line[i]) or 0
1037 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
1038 res = line[i].lower() not in ('0', 'false', 'off')
1039 elif fields_def[field[len(prefix)]]['type'] == 'float':
1040 res = line[i] and float(line[i]) or 0.0
1041 elif fields_def[field[len(prefix)]]['type'] == 'selection':
1042 for key, val in fields_def[field[len(prefix)]]['selection']:
1043 if tools.ustr(line[i]) in [tools.ustr(key), tools.ustr(val)]:
1046 if line[i] and not res:
1047 logger.notifyChannel("import", netsvc.LOG_WARNING,
1048 _("key '%s' not found in selection field '%s'") % \
1049 (tools.ustr(line[i]), tools.ustr(field[len(prefix)])))
1050 warning += [_("Key/value '%s' not found in selection field '%s'") % (tools.ustr(line[i]), tools.ustr(field[len(prefix)]))]
1055 row[field[len(prefix)]] = res or False
1057 result = (row, nbrmax, warning, data_res_id, xml_id)
1060 fields_def = self.fields_get(cr, uid, context=context)
1062 if config.get('import_partial', False) and filename:
1063 data = pickle.load(file(config.get('import_partial')))
1066 while position<len(datas):
1069 (res, position, warning, res_id, xml_id) = \
1070 process_liness(self, datas, [], current_module, self._name, fields_def, position=position)
1073 return (-1, res, 'Line ' + str(position) +' : ' + '!\n'.join(warning), '')
1076 ir_model_data_obj._update(cr, uid, self._name,
1077 current_module, res, mode=mode, xml_id=xml_id,
1078 noupdate=noupdate, res_id=res_id, context=context)
1079 except Exception, e:
1080 return (-1, res, 'Line ' + str(position) +' : ' + str(e), '')
1082 if config.get('import_partial', False) and filename and (not (position%100)):
1083 data = pickle.load(file(config.get('import_partial')))
1084 data[filename] = position
1085 pickle.dump(data, file(config.get('import_partial'), 'wb'))
1086 if context.get('defer_parent_store_computation'):
1087 self._parent_store_compute(cr)
1090 if context.get('defer_parent_store_computation'):
1091 self._parent_store_compute(cr)
1092 return (position, 0, 0, 0)
1094 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
1096 Read records with given ids with the given fields
1098 :param cr: database cursor
1099 :param user: current user id
1100 :param ids: id or list of the ids of the records to read
1101 :param fields: optional list of field names to return (default: all fields would be returned)
1102 :type fields: list (example ['field_name_1', ...])
1103 :param context: optional context dictionary - it may contains keys for specifying certain options
1104 like ``context_lang``, ``context_tz`` to alter the results of the call.
1105 A special ``bin_size`` boolean flag may also be passed in the context to request the
1106 value of all fields.binary columns to be returned as the size of the binary instead of its
1107 contents. This can also be selectively overriden by passing a field-specific flag
1108 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1109 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1110 :return: list of dictionaries((dictionary per record asked)) with requested field values
1111 :rtype: [{‘name_of_the_field’: value, ...}, ...]
1112 :raise AccessError: * if user has no read rights on the requested object
1113 * if user tries to bypass access rules for read on the requested object
1116 raise NotImplementedError(_('The read method is not implemented on this object !'))
1118 def get_invalid_fields(self, cr, uid):
1119 return list(self._invalids)
1121 def _validate(self, cr, uid, ids, context=None):
1122 context = context or {}
1123 lng = context.get('lang', False) or 'en_US'
1124 trans = self.pool.get('ir.translation')
1126 for constraint in self._constraints:
1127 fun, msg, fields = constraint
1128 if not fun(self, cr, uid, ids):
1129 # Check presence of __call__ directly instead of using
1130 # callable() because it will be deprecated as of Python 3.0
1131 if hasattr(msg, '__call__'):
1132 tmp_msg = msg(self, cr, uid, ids, context=context)
1133 if isinstance(tmp_msg, tuple):
1134 tmp_msg, params = tmp_msg
1135 translated_msg = tmp_msg % params
1137 translated_msg = tmp_msg
1139 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
1141 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1143 self._invalids.update(fields)
1146 raise except_orm('ValidateError', '\n'.join(error_msgs))
1148 self._invalids.clear()
1150 def default_get(self, cr, uid, fields_list, context=None):
1152 Returns default values for the fields in fields_list.
1154 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1155 :type fields_list: list
1156 :param context: optional context dictionary - it may contains keys for specifying certain options
1157 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1158 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1159 or override a default value for a field.
1160 A special ``bin_size`` boolean flag may also be passed in the context to request the
1161 value of all fields.binary columns to be returned as the size of the binary instead of its
1162 contents. This can also be selectively overriden by passing a field-specific flag
1163 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1164 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1165 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1167 # trigger view init hook
1168 self.view_init(cr, uid, fields_list, context)
1174 # get the default values for the inherited fields
1175 for t in self._inherits.keys():
1176 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1179 # get the default values defined in the object
1180 for f in fields_list:
1181 if f in self._defaults:
1182 if callable(self._defaults[f]):
1183 defaults[f] = self._defaults[f](self, cr, uid, context)
1185 defaults[f] = self._defaults[f]
1187 fld_def = ((f in self._columns) and self._columns[f]) \
1188 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1191 if isinstance(fld_def, fields.property):
1192 property_obj = self.pool.get('ir.property')
1193 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1195 if isinstance(prop_value, (browse_record, browse_null)):
1196 defaults[f] = prop_value.id
1198 defaults[f] = prop_value
1200 if f not in defaults:
1203 # get the default values set by the user and override the default
1204 # values defined in the object
1205 ir_values_obj = self.pool.get('ir.values')
1206 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1207 for id, field, field_value in res:
1208 if field in fields_list:
1209 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1210 if fld_def._type in ('many2one', 'one2one'):
1211 obj = self.pool.get(fld_def._obj)
1212 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1214 if fld_def._type in ('many2many'):
1215 obj = self.pool.get(fld_def._obj)
1217 for i in range(len(field_value)):
1218 if not obj.search(cr, uid, [('id', '=',
1221 field_value2.append(field_value[i])
1222 field_value = field_value2
1223 if fld_def._type in ('one2many'):
1224 obj = self.pool.get(fld_def._obj)
1226 for i in range(len(field_value)):
1227 field_value2.append({})
1228 for field2 in field_value[i]:
1229 if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'):
1230 obj2 = self.pool.get(obj._columns[field2]._obj)
1231 if not obj2.search(cr, uid,
1232 [('id', '=', field_value[i][field2])]):
1234 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type in ('many2one', 'one2one'):
1235 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1236 if not obj2.search(cr, uid,
1237 [('id', '=', field_value[i][field2])]):
1239 # TODO add test for many2many and one2many
1240 field_value2[i][field2] = field_value[i][field2]
1241 field_value = field_value2
1242 defaults[field] = field_value
1244 # get the default values from the context
1245 for key in context or {}:
1246 if key.startswith('default_') and (key[8:] in fields_list):
1247 defaults[key[8:]] = context[key]
1251 def perm_read(self, cr, user, ids, context=None, details=True):
1252 raise NotImplementedError(_('The perm_read method is not implemented on this object !'))
1254 def unlink(self, cr, uid, ids, context=None):
1255 raise NotImplementedError(_('The unlink method is not implemented on this object !'))
1257 def write(self, cr, user, ids, vals, context=None):
1258 raise NotImplementedError(_('The write method is not implemented on this object !'))
1260 def create(self, cr, user, vals, context=None):
1261 raise NotImplementedError(_('The create method is not implemented on this object !'))
1263 def fields_get_keys(self, cr, user, context=None):
1264 res = self._columns.keys()
1265 # TODO I believe this loop can be replace by
1266 # res.extend(self._inherit_fields.key())
1267 for parent in self._inherits:
1268 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1272 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
1273 """ Return the definition of each field.
1275 The returned value is a dictionary (indiced by field name) of
1276 dictionaries. The _inherits'd fields are included. The string,
1277 help, and selection (if present) attributes are translated.
1285 translation_obj = self.pool.get('ir.translation')
1286 for parent in self._inherits:
1287 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
1289 for f, field in self._columns.iteritems():
1290 if allfields and f not in allfields:
1293 res[f] = fields.field_to_dict(self, cr, user, context, field)
1295 if not write_access:
1296 res[f]['readonly'] = True
1297 res[f]['states'] = {}
1299 if 'string' in res[f]:
1300 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
1302 res[f]['string'] = res_trans
1303 if 'help' in res[f]:
1304 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
1306 res[f]['help'] = help_trans
1307 if 'selection' in res[f]:
1308 if isinstance(field.selection, (tuple, list)):
1309 sel = field.selection
1311 for key, val in sel:
1314 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
1315 sel2.append((key, val2 or val))
1316 res[f]['selection'] = sel2
1322 # Overload this method if you need a window title which depends on the context
1324 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1327 def __view_look_dom(self, cr, user, node, view_id, context=None):
1335 if isinstance(s, unicode):
1336 return s.encode('utf8')
1339 def check_group(node):
1340 """ Set invisible to true if the user is not in the specified groups. """
1341 if node.get('groups'):
1342 groups = node.get('groups').split(',')
1343 access_pool = self.pool.get('ir.model.access')
1344 can_see = any(access_pool.check_groups(cr, user, group) for group in groups)
1346 node.set('invisible', '1')
1347 if 'attrs' in node.attrib:
1348 del(node.attrib['attrs']) #avoid making field visible later
1349 del(node.attrib['groups'])
1351 if node.tag in ('field', 'node', 'arrow'):
1352 if node.get('object'):
1357 if f.tag in ('field'):
1358 xml += etree.tostring(f, encoding="utf-8")
1360 new_xml = etree.fromstring(encode(xml))
1361 ctx = context.copy()
1362 ctx['base_model_name'] = self._name
1363 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1368 attrs = {'views': views}
1370 if node.get('name'):
1373 if node.get('name') in self._columns:
1374 column = self._columns[node.get('name')]
1376 column = self._inherit_fields[node.get('name')][2]
1381 relation = self.pool.get(column._obj)
1386 if f.tag in ('form', 'tree', 'graph'):
1388 ctx = context.copy()
1389 ctx['base_model_name'] = self._name
1390 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1391 views[str(f.tag)] = {
1395 attrs = {'views': views}
1396 if node.get('widget') and node.get('widget') == 'selection':
1397 # Prepare the cached selection list for the client. This needs to be
1398 # done even when the field is invisible to the current user, because
1399 # other events could need to change its value to any of the selectable ones
1400 # (such as on_change events, refreshes, etc.)
1402 # If domain and context are strings, we keep them for client-side, otherwise
1403 # we evaluate them server-side to consider them when generating the list of
1405 # TODO: find a way to remove this hack, by allow dynamic domains
1407 if column._domain and not isinstance(column._domain, basestring):
1408 dom = column._domain
1409 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1410 search_context = dict(context)
1411 if column._context and not isinstance(column._context, basestring):
1412 search_context.update(column._context)
1413 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1414 if (node.get('required') and not int(node.get('required'))) or not column.required:
1415 attrs['selection'].append((False, ''))
1416 fields[node.get('name')] = attrs
1418 elif node.tag in ('form', 'tree'):
1419 result = self.view_header_get(cr, user, False, node.tag, context)
1421 node.set('string', result)
1423 elif node.tag == 'calendar':
1424 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1425 if node.get(additional_field):
1426 fields[node.get(additional_field)] = {}
1431 if 'lang' in context:
1432 if node.get('string') and not result:
1433 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1434 if trans == node.get('string') and ('base_model_name' in context):
1435 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1436 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1437 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1439 node.set('string', trans)
1440 if node.get('confirm'):
1441 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm'))
1443 node.set('confirm', trans)
1445 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
1447 node.set('sum', trans)
1448 if node.get('help'):
1449 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('help'))
1451 node.set('help', trans)
1454 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1455 fields.update(self.__view_look_dom(cr, user, f, view_id, context))
1459 def _disable_workflow_buttons(self, cr, user, node):
1460 """ Set the buttons in node to readonly if the user can't activate them. """
1462 # admin user can always activate workflow buttons
1465 # TODO handle the case of more than one workflow for a model or multiple
1466 # transitions with different groups and same signal
1467 usersobj = self.pool.get('res.users')
1468 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1469 for button in buttons:
1470 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1471 cr.execute("""SELECT DISTINCT t.group_id
1473 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1474 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1477 AND t.group_id is NOT NULL
1478 """, (self._name, button.get('name')))
1479 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1480 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1481 button.set('readonly', str(int(not can_click)))
1484 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1485 fields_def = self.__view_look_dom(cr, user, node, view_id, context=context)
1486 node = self._disable_workflow_buttons(cr, user, node)
1487 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1489 if node.tag == 'diagram':
1490 if node.getchildren()[0].tag == 'node':
1491 node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, fields_def.keys(), context)
1492 fields.update(node_fields)
1493 if node.getchildren()[1].tag == 'arrow':
1494 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, fields_def.keys(), context)
1495 fields.update(arrow_fields)
1497 fields = self.fields_get(cr, user, fields_def.keys(), context)
1498 for field in fields_def:
1500 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1501 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1502 elif field in fields:
1503 fields[field].update(fields_def[field])
1505 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1506 res = cr.fetchall()[:]
1508 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1509 msg = "\n * ".join([r[0] for r in res])
1510 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1511 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1512 raise except_orm('View error', msg)
1515 def __get_default_calendar_view(self):
1516 """Generate a default calendar view (For internal use only).
1519 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
1520 '<calendar string="%s"') % (self._description)
1522 if (self._date_name not in self._columns):
1524 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
1525 if dt in self._columns:
1526 self._date_name = dt
1531 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
1534 arch += ' date_start="%s"' % (self._date_name)
1536 for color in ["user_id", "partner_id", "x_user_id", "x_partner_id"]:
1537 if color in self._columns:
1538 arch += ' color="' + color + '"'
1541 dt_stop_flag = False
1543 for dt_stop in ["date_stop", "date_end", "x_date_stop", "x_date_end"]:
1544 if dt_stop in self._columns:
1545 arch += ' date_stop="' + dt_stop + '"'
1549 if not dt_stop_flag:
1550 for dt_delay in ["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"]:
1551 if dt_delay in self._columns:
1552 arch += ' date_delay="' + dt_delay + '"'
1556 ' <field name="%s"/>\n'
1557 '</calendar>') % (self._rec_name)
1561 def __get_default_search_view(self, cr, uid, context=None):
1562 form_view = self.fields_view_get(cr, uid, False, 'form', context=context)
1563 tree_view = self.fields_view_get(cr, uid, False, 'tree', context=context)
1565 fields_to_search = set()
1566 # TODO it seems _all_columns could be used instead of fields_get (no need for translated fields info)
1567 fields = self.fields_get(cr, uid, context=context)
1568 for field in fields:
1569 if fields[field].get('select'):
1570 fields_to_search.add(field)
1571 for view in (form_view, tree_view):
1572 view_root = etree.fromstring(view['arch'])
1573 # Only care about select=1 in xpath below, because select=2 is covered
1574 # by the custom advanced search in clients
1575 fields_to_search = fields_to_search.union(view_root.xpath("//field[@select=1]/@name"))
1577 tree_view_root = view_root # as provided by loop above
1578 search_view = etree.Element("search", attrib={'string': tree_view_root.get("string", "")})
1579 field_group = etree.Element("group")
1580 search_view.append(field_group)
1582 for field_name in fields_to_search:
1583 field_group.append(etree.Element("field", attrib={'name': field_name}))
1585 return etree.tostring(search_view, encoding="utf-8").replace('\t', '')
1588 # if view_id, view_type is not required
1590 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1592 Get the detailed composition of the requested view like fields, model, view architecture
1594 :param cr: database cursor
1595 :param user: current user id
1596 :param view_id: id of the view or None
1597 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
1598 :param context: context arguments, like lang, time zone
1599 :param toolbar: true to include contextual actions
1600 :param submenu: example (portal_project module)
1601 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
1602 :raise AttributeError:
1603 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
1604 * if some tag other than 'position' is found in parent view
1605 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
1612 if isinstance(s, unicode):
1613 return s.encode('utf8')
1616 def raise_view_error(error_msg, child_view_id):
1617 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
1618 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
1619 % (child_view.xml_id, self._name, error_msg))
1621 def locate(source, spec):
1622 """ Locate a node in a source (parent) architecture.
1624 Given a complete source (parent) architecture (i.e. the field
1625 `arch` in a view), and a 'spec' node (a node in an inheriting
1626 view that specifies the location in the source view of what
1627 should be changed), return (if it exists) the node in the
1628 source view matching the specification.
1630 :param source: a parent architecture to modify
1631 :param spec: a modifying node in an inheriting view
1632 :return: a node in the source matching the spec
1635 if spec.tag == 'xpath':
1636 nodes = source.xpath(spec.get('expr'))
1637 return nodes[0] if nodes else None
1638 elif spec.tag == 'field':
1639 # Only compare the field name: a field can be only once in a given view
1640 # at a given level (and for multilevel expressions, we should use xpath
1641 # inheritance spec anyway).
1642 for node in source.getiterator('field'):
1643 if node.get('name') == spec.get('name'):
1647 for node in source.getiterator(spec.tag):
1649 for attr in spec.attrib:
1650 if attr != 'position' and (not node.get(attr) or node.get(attr) != spec.get(attr)):
1657 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
1658 """ Apply an inheriting view.
1660 Apply to a source architecture all the spec nodes (i.e. nodes
1661 describing where and what changes to apply to some parent
1662 architecture) given by an inheriting view.
1664 :param source: a parent architecture to modify
1665 :param specs_arch: a modifying architecture in an inheriting view
1666 :param inherit_id: the database id of the inheriting view
1667 :return: a modified source where the specs are applied
1670 specs_tree = etree.fromstring(encode(specs_arch))
1671 # Queue of specification nodes (i.e. nodes describing where and
1672 # changes to apply to some parent architecture).
1673 specs = [specs_tree]
1677 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
1679 if spec.tag == 'data':
1680 specs += [ c for c in specs_tree ]
1682 node = locate(source, spec)
1683 if node is not None:
1684 pos = spec.get('position', 'inside')
1685 if pos == 'replace':
1686 if node.getparent() is None:
1687 source = copy.deepcopy(spec[0])
1690 node.addprevious(child)
1691 node.getparent().remove(node)
1692 elif pos == 'attributes':
1693 for child in spec.getiterator('attribute'):
1694 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
1696 node.set(attribute[0], attribute[1])
1698 del(node.attrib[attribute[0]])
1700 sib = node.getnext()
1704 elif pos == 'after':
1709 sib.addprevious(child)
1710 elif pos == 'before':
1711 node.addprevious(child)
1713 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
1716 ' %s="%s"' % (attr, spec.get(attr))
1717 for attr in spec.attrib
1718 if attr != 'position'
1720 tag = "<%s%s>" % (spec.tag, attrs)
1721 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
1724 def apply_view_inheritance(source, inherit_id):
1725 """ Apply all the (directly and indirectly) inheriting views.
1727 :param source: a parent architecture to modify (with parent
1728 modifications already applied)
1729 :param inherit_id: the database id of the parent view
1730 :return: a modified source where all the modifying architecture
1734 # get all views which inherit from (ie modify) this view
1735 cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
1736 sql_inherit = cr.fetchall()
1737 for (inherit, id) in sql_inherit:
1738 source = apply_inheritance_specs(source, inherit, id)
1739 source = apply_view_inheritance(source, id)
1742 result = {'type': view_type, 'model': self._name}
1745 parent_view_model = None
1746 view_ref = context.get(view_type + '_view_ref')
1747 # Search for a root (i.e. without any parent) view.
1749 if view_ref and not view_id:
1751 module, view_ref = view_ref.split('.', 1)
1752 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
1753 view_ref_res = cr.fetchone()
1755 view_id = view_ref_res[0]
1758 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
1760 WHERE id=%s""", (view_id,))
1762 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
1764 WHERE model=%s AND type=%s AND inherit_id IS NULL
1765 ORDER BY priority""", (self._name, view_type))
1766 sql_res = cr.dictfetchone()
1771 view_id = sql_res['inherit_id'] or sql_res['id']
1772 parent_view_model = sql_res['model']
1773 if not sql_res['inherit_id']:
1776 # if a view was found
1778 result['type'] = sql_res['type']
1779 result['view_id'] = sql_res['id']
1781 source = etree.fromstring(encode(sql_res['arch']))
1782 result['arch'] = apply_view_inheritance(source, result['view_id'])
1784 result['name'] = sql_res['name']
1785 result['field_parent'] = sql_res['field_parent'] or False
1788 # otherwise, build some kind of default view
1789 if view_type == 'form':
1790 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1791 res = self.fields_get(cr, user, context=context)
1792 xml = '<?xml version="1.0" encoding="utf-8"?> ' \
1793 '<form string="%s">' % (self._description,)
1795 if res[x]['type'] not in ('one2many', 'many2many'):
1796 xml += '<field name="%s"/>' % (x,)
1797 if res[x]['type'] == 'text':
1801 elif view_type == 'tree':
1802 _rec_name = self._rec_name
1803 if _rec_name not in self._columns:
1804 _rec_name = self._columns.keys()[0]
1805 xml = '<?xml version="1.0" encoding="utf-8"?>' \
1806 '<tree string="%s"><field name="%s"/></tree>' \
1807 % (self._description, _rec_name)
1809 elif view_type == 'calendar':
1810 xml = self.__get_default_calendar_view()
1812 elif view_type == 'search':
1813 xml = self.__get_default_search_view(cr, user, context)
1816 # what happens here, graph case?
1817 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
1818 result['arch'] = etree.fromstring(encode(xml))
1819 result['name'] = 'default'
1820 result['field_parent'] = False
1821 result['view_id'] = 0
1823 if parent_view_model != self._name:
1824 ctx = context.copy()
1825 ctx['base_model_name'] = parent_view_model
1828 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
1829 result['arch'] = xarch
1830 result['fields'] = xfields
1833 if context and context.get('active_id', False):
1834 data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
1836 act_id = data_menu.id
1838 data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
1839 result['submenu'] = getattr(data_action, 'menus', False)
1843 for key in ('report_sxw_content', 'report_rml_content',
1844 'report_sxw', 'report_rml',
1845 'report_sxw_content_data', 'report_rml_content_data'):
1849 ir_values_obj = self.pool.get('ir.values')
1850 resprint = ir_values_obj.get(cr, user, 'action',
1851 'client_print_multi', [(self._name, False)], False,
1853 resaction = ir_values_obj.get(cr, user, 'action',
1854 'client_action_multi', [(self._name, False)], False,
1857 resrelate = ir_values_obj.get(cr, user, 'action',
1858 'client_action_relate', [(self._name, False)], False,
1860 resprint = map(clean, resprint)
1861 resaction = map(clean, resaction)
1862 resaction = filter(lambda x: not x.get('multi', False), resaction)
1863 resprint = filter(lambda x: not x.get('multi', False), resprint)
1864 resrelate = map(lambda x: x[2], resrelate)
1866 for x in resprint + resaction + resrelate:
1867 x['string'] = x['name']
1869 result['toolbar'] = {
1871 'action': resaction,
1876 _view_look_dom_arch = __view_look_dom_arch
1878 def search_count(self, cr, user, args, context=None):
1881 res = self.search(cr, user, args, context=context, count=True)
1882 if isinstance(res, list):
1886 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
1888 Search for records based on a search domain.
1890 :param cr: database cursor
1891 :param user: current user id
1892 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
1893 :param offset: optional number of results to skip in the returned values (default: 0)
1894 :param limit: optional max number of records to return (default: **None**)
1895 :param order: optional columns to sort by (default: self._order=id )
1896 :param context: optional context arguments, like lang, time zone
1897 :type context: dictionary
1898 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
1899 :return: id or list of ids of records matching the criteria
1900 :rtype: integer or list of integers
1901 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
1903 **Expressing a search domain (args)**
1905 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
1907 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
1908 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
1909 The semantics of most of these operators are obvious.
1910 The ``child_of`` operator will look for records who are children or grand-children of a given record,
1911 according to the semantics of this model (i.e following the relationship field named by
1912 ``self._parent_name``, by default ``parent_id``.
1913 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
1915 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
1916 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
1917 Be very careful about this when you combine them the first time.
1919 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
1921 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
1923 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
1925 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
1928 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
1930 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
1932 Private implementation of search() method, allowing specifying the uid to use for the access right check.
1933 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
1934 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
1936 :param access_rights_uid: optional user ID to use when checking access rights
1937 (not for ir.rules, this is only for ir.model.access)
1939 raise NotImplementedError(_('The search method is not implemented on this object !'))
1941 def name_get(self, cr, user, ids, context=None):
1944 :param cr: database cursor
1945 :param user: current user id
1947 :param ids: list of ids
1948 :param context: context arguments, like lang, time zone
1949 :type context: dictionary
1950 :return: tuples with the text representation of requested objects for to-many relationships
1957 if isinstance(ids, (int, long)):
1959 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
1960 [self._rec_name], context, load='_classic_write')]
1962 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
1964 Search for records and their display names according to a search domain.
1966 :param cr: database cursor
1967 :param user: current user id
1968 :param name: object name to search
1969 :param args: list of tuples specifying search criteria [('field_name', 'operator', 'value'), ...]
1970 :param operator: operator for search criterion
1971 :param context: context arguments, like lang, time zone
1972 :type context: dictionary
1973 :param limit: optional max number of records to return
1974 :return: list of object names matching the search criteria, used to provide completion for to-many relationships
1976 This method is equivalent of :py:meth:`~osv.osv.osv.search` on **name** + :py:meth:`~osv.osv.osv.name_get` on the result.
1977 See :py:meth:`~osv.osv.osv.search` for an explanation of the possible values for the search domain specified in **args**.
1980 return self._name_search(cr, user, name, args, operator, context, limit)
1982 def name_create(self, cr, uid, name, context=None):
1984 Creates a new record by calling :py:meth:`~osv.osv.osv.create` with only one
1985 value provided: the name of the new record (``_rec_name`` field).
1986 The new record will also be initialized with any default values applicable
1987 to this model, or provided through the context. The usual behavior of
1988 :py:meth:`~osv.osv.osv.create` applies.
1989 Similarly, this method may raise an exception if the model has multiple
1990 required fields and some do not have default values.
1992 :param name: name of the record to create
1994 :return: the :py:meth:`~osv.osv.osv.name_get` value for the newly-created record.
1996 rec_id = self.create(cr, uid, {self._rec_name: name}, context);
1997 return self.name_get(cr, uid, [rec_id], context)[0]
1999 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2000 # solve some access rights issues
2001 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2008 args += [(self._rec_name, operator, name)]
2009 access_rights_uid = name_get_uid or user
2010 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2011 res = self.name_get(cr, access_rights_uid, ids, context)
2014 def copy(self, cr, uid, id, default=None, context=None):
2015 raise NotImplementedError(_('The copy method is not implemented on this object !'))
2017 def exists(self, cr, uid, id, context=None):
2018 raise NotImplementedError(_('The exists method is not implemented on this object !'))
2020 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2023 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
2025 fields = self._columns.keys() + self._inherit_fields.keys()
2026 #FIXME: collect all calls to _get_source into one SQL call.
2028 res[lang] = {'code': lang}
2030 if f in self._columns:
2031 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2033 res[lang][f] = res_trans
2035 res[lang][f] = self._columns[f].string
2036 for table in self._inherits:
2037 cols = intersect(self._inherit_fields.keys(), fields)
2038 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2041 res[lang]['code'] = lang
2042 for f in res2[lang]:
2043 res[lang][f] = res2[lang][f]
2046 def write_string(self, cr, uid, id, langs, vals, context=None):
2047 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
2048 #FIXME: try to only call the translation in one SQL
2051 if field in self._columns:
2052 src = self._columns[field].string
2053 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2054 for table in self._inherits:
2055 cols = intersect(self._inherit_fields.keys(), vals)
2057 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2060 def _check_removed_columns(self, cr, log=False):
2061 raise NotImplementedError()
2063 def _add_missing_default_values(self, cr, uid, values, context=None):
2064 missing_defaults = []
2065 avoid_tables = [] # avoid overriding inherited values when parent is set
2066 for tables, parent_field in self._inherits.items():
2067 if parent_field in values:
2068 avoid_tables.append(tables)
2069 for field in self._columns.keys():
2070 if not field in values:
2071 missing_defaults.append(field)
2072 for field in self._inherit_fields.keys():
2073 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2074 missing_defaults.append(field)
2076 if len(missing_defaults):
2077 # override defaults with the provided values, never allow the other way around
2078 defaults = self.default_get(cr, uid, missing_defaults, context)
2080 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2081 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2082 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2083 defaults[dv] = [(6, 0, defaults[dv])]
2084 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2085 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2086 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2087 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2088 defaults.update(values)
2092 class orm_memory(orm_template):
2094 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
2095 _inherit_fields = {}
2101 def createInstance(cls, pool, cr):
2102 return cls.makeInstance(pool, cr, ['_columns', '_defaults'])
2104 def __init__(self, pool, cr):
2105 super(orm_memory, self).__init__(pool, cr)
2109 self._max_count = config.get('osv_memory_count_limit')
2110 self._max_hours = config.get('osv_memory_age_limit')
2111 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
2113 def _check_access(self, uid, object_id, mode):
2114 if uid != 1 and self.datas[object_id]['internal.create_uid'] != uid:
2115 raise except_orm(_('AccessError'), '%s access is only allowed on your own records for osv_memory objects except for the super-user' % mode.capitalize())
2117 def vaccum(self, cr, uid, force=False):
2118 """Run the vaccuum cleaning system, expiring and removing old records from the
2119 virtual osv_memory tables if the "max count" or "max age" conditions are enabled
2120 and have been reached. This method can be called very often (e.g. everytime a record
2121 is created), but will only actually trigger the cleanup process once out of
2122 "_check_time" times (by default once out of 20 calls)."""
2124 if (not force) and (self.check_id % self._check_time):
2128 # Age-based expiration
2130 max = time.time() - self._max_hours * 60 * 60
2131 for k,v in self.datas.iteritems():
2132 if v['internal.date_access'] < max:
2134 self.unlink(cr, ROOT_USER_ID, tounlink)
2136 # Count-based expiration
2137 if self._max_count and len(self.datas) > self._max_count:
2138 # sort by access time to remove only the first/oldest ones in LRU fashion
2139 records = self.datas.items()
2140 records.sort(key=lambda x:x[1]['internal.date_access'])
2141 self.unlink(cr, ROOT_USER_ID, [x[0] for x in records[:len(self.datas)-self._max_count]])
2145 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
2148 if not fields_to_read:
2149 fields_to_read = self._columns.keys()
2153 if isinstance(ids, (int, long)):
2157 for f in fields_to_read:
2158 record = self.datas.get(id)
2160 self._check_access(user, id, 'read')
2161 r[f] = record.get(f, False)
2162 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
2165 if id in self.datas:
2166 self.datas[id]['internal.date_access'] = time.time()
2167 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
2168 for f in fields_post:
2169 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
2170 for record in result:
2171 record[f] = res2[record['id']]
2172 if isinstance(ids_orig, (int, long)):
2176 def write(self, cr, user, ids, vals, context=None):
2182 if self._columns[field]._classic_write:
2183 vals2[field] = vals[field]
2185 upd_todo.append(field)
2186 for object_id in ids:
2187 self._check_access(user, object_id, mode='write')
2188 self.datas[object_id].update(vals2)
2189 self.datas[object_id]['internal.date_access'] = time.time()
2190 for field in upd_todo:
2191 self._columns[field].set_memory(cr, self, object_id, field, vals[field], user, context)
2192 self._validate(cr, user, [object_id], context)
2193 wf_service = netsvc.LocalService("workflow")
2194 wf_service.trg_write(user, self._name, object_id, cr)
2197 def create(self, cr, user, vals, context=None):
2198 self.vaccum(cr, user)
2200 id_new = self.next_id
2202 vals = self._add_missing_default_values(cr, user, vals, context)
2207 if self._columns[field]._classic_write:
2208 vals2[field] = vals[field]
2210 upd_todo.append(field)
2211 self.datas[id_new] = vals2
2212 self.datas[id_new]['internal.date_access'] = time.time()
2213 self.datas[id_new]['internal.create_uid'] = user
2215 for field in upd_todo:
2216 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
2217 self._validate(cr, user, [id_new], context)
2218 if self._log_create and not (context and context.get('no_store_function', False)):
2219 message = self._description + \
2221 self.name_get(cr, user, [id_new], context=context)[0][1] + \
2223 self.log(cr, user, id_new, message, True, context=context)
2224 wf_service = netsvc.LocalService("workflow")
2225 wf_service.trg_create(user, self._name, id_new, cr)
2228 def _where_calc(self, cr, user, args, active_test=True, context=None):
2233 # if the object has a field named 'active', filter out all inactive
2234 # records unless they were explicitely asked for
2235 if 'active' in self._columns and (active_test and context.get('active_test', True)):
2237 active_in_args = False
2239 if a[0] == 'active':
2240 active_in_args = True
2241 if not active_in_args:
2242 args.insert(0, ('active', '=', 1))
2244 args = [('active', '=', 1)]
2247 e = expression.expression(args)
2248 e.parse(cr, user, self, context)
2252 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
2256 # implicit filter on current user except for superuser
2260 args.insert(0, ('internal.create_uid', '=', user))
2262 result = self._where_calc(cr, user, args, context=context)
2264 return self.datas.keys()
2268 #Find the value of dict
2271 for id, data in self.datas.items():
2272 counter = counter + 1
2274 if limit and (counter > int(limit)):
2279 val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
2280 elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
2281 val = eval('data[arg[0]]'+arg[1] +' arg[2]', locals())
2282 elif arg[1] in ['ilike']:
2283 val = (str(data[arg[0]]).find(str(arg[2]))!=-1)
2293 def unlink(self, cr, uid, ids, context=None):
2295 self._check_access(uid, id, 'unlink')
2296 self.datas.pop(id, None)
2298 cr.execute('delete from wkf_instance where res_type=%s and res_id IN %s', (self._name, tuple(ids)))
2301 def perm_read(self, cr, user, ids, context=None, details=True):
2303 credentials = self.pool.get('res.users').name_get(cr, user, [user])[0]
2304 create_date = time.strftime('%Y-%m-%d %H:%M:%S')
2306 self._check_access(user, id, 'read')
2308 'create_uid': credentials,
2309 'create_date': create_date,
2311 'write_date': False,
2317 def _check_removed_columns(self, cr, log=False):
2318 # nothing to check in memory...
2321 def exists(self, cr, uid, id, context=None):
2322 return id in self.datas
2324 class orm(orm_template):
2325 _sql_constraints = []
2327 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
2328 __logger = logging.getLogger('orm')
2329 __schema = logging.getLogger('orm.schema')
2330 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2332 Get the list of records in list view grouped by the given ``groupby`` fields
2334 :param cr: database cursor
2335 :param uid: current user id
2336 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2337 :param list fields: list of fields present in the list view specified on the object
2338 :param list groupby: fields by which the records will be grouped
2339 :param int offset: optional number of records to skip
2340 :param int limit: optional max number of records to return
2341 :param dict context: context arguments, like lang, time zone
2342 :param order: optional ``order by`` specification, for overriding the natural
2343 sort ordering of the groups, see also :py:meth:`~osv.osv.osv.search`
2344 (supported only for many2one fields currently)
2345 :return: list of dictionaries(one dictionary for each record) containing:
2347 * the values of fields grouped by the fields in ``groupby`` argument
2348 * __domain: list of tuples specifying the search criteria
2349 * __context: dictionary with argument like ``groupby``
2350 :rtype: [{'field_name_1': value, ...]
2351 :raise AccessError: * if user has no read rights on the requested object
2352 * if user tries to bypass access rules for read on the requested object
2355 context = context or {}
2356 self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
2358 fields = self._columns.keys()
2360 query = self._where_calc(cr, uid, domain, context=context)
2361 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2363 # Take care of adding join(s) if groupby is an '_inherits'ed field
2364 groupby_list = groupby
2365 qualified_groupby_field = groupby
2367 if isinstance(groupby, list):
2368 groupby = groupby[0]
2369 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2372 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2373 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2374 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2376 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2377 fget = self.fields_get(cr, uid, fields)
2378 float_int_fields = filter(lambda x: fget[x]['type'] in ('float', 'integer'), fields)
2380 group_count = group_by = groupby
2382 if fget.get(groupby):
2383 if fget[groupby]['type'] in ('date', 'datetime'):
2384 flist = "to_char(%s,'yyyy-mm') as %s " % (qualified_groupby_field, groupby)
2385 groupby = "to_char(%s,'yyyy-mm')" % (qualified_groupby_field)
2386 qualified_groupby_field = groupby
2388 flist = qualified_groupby_field
2390 # Don't allow arbitrary values, as this would be a SQL injection vector!
2391 raise except_orm(_('Invalid group_by'),
2392 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2395 fields_pre = [f for f in float_int_fields if
2396 f == self.CONCURRENCY_CHECK_FIELD
2397 or (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2398 for f in fields_pre:
2399 if f not in ['id', 'sequence']:
2400 group_operator = fget[f].get('group_operator', 'sum')
2403 qualified_field = '"%s"."%s"' % (self._table, f)
2404 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2406 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2408 from_clause, where_clause, where_clause_params = query.get_sql()
2409 where_clause = where_clause and ' WHERE ' + where_clause
2410 limit_str = limit and ' limit %d' % limit or ''
2411 offset_str = offset and ' offset %d' % offset or ''
2412 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2414 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2417 for r in cr.dictfetchall():
2418 for fld, val in r.items():
2419 if val == None: r[fld] = False
2420 alldata[r['id']] = r
2423 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=orderby or groupby, context=context)
2424 # the IDS of records that have groupby field value = False or '' should be sorted too
2425 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2426 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2427 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2428 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2432 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2433 if not isinstance(groupby_list, (str, unicode)):
2434 if groupby or not context.get('group_by_no_leaf', False):
2435 d['__context'] = {'group_by': groupby_list[1:]}
2436 if groupby and groupby in fget:
2437 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2438 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2439 days = calendar.monthrange(dt.year, dt.month)[1]
2441 d[groupby] = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d').strftime('%B %Y')
2442 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2443 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2444 del alldata[d['id']][groupby]
2445 d.update(alldata[d['id']])
2449 def _inherits_join_add(self, parent_model_name, query):
2451 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2453 :param parent_model_name: name of the parent model for which the clauses should be added
2454 :param query: query object on which the JOIN should be added
2456 inherits_field = self._inherits[parent_model_name]
2457 parent_model = self.pool.get(parent_model_name)
2458 parent_table_name = parent_model._table
2459 quoted_parent_table_name = '"%s"' % parent_table_name
2460 if quoted_parent_table_name not in query.tables:
2461 query.tables.append(quoted_parent_table_name)
2462 query.where_clause.append('("%s".%s = %s.id)' % (self._table, inherits_field, parent_table_name))
2464 def _inherits_join_calc(self, field, query):
2466 Adds missing table select and join clause(s) to ``query`` for reaching
2467 the field coming from an '_inherits' parent table (no duplicates).
2469 :param field: name of inherited field to reach
2470 :param query: query object on which the JOIN should be added
2471 :return: qualified name of field, to be used in SELECT clause
2473 current_table = self
2474 while field in current_table._inherit_fields and not field in current_table._columns:
2475 parent_model_name = current_table._inherit_fields[field][0]
2476 parent_table = self.pool.get(parent_model_name)
2477 self._inherits_join_add(parent_model_name, query)
2478 current_table = parent_table
2479 return '"%s".%s' % (current_table._table, field)
2481 def _parent_store_compute(self, cr):
2482 if not self._parent_store:
2484 logger = netsvc.Logger()
2485 logger.notifyChannel('data', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
2486 def browse_rec(root, pos=0):
2488 where = self._parent_name+'='+str(root)
2490 where = self._parent_name+' IS NULL'
2491 if self._parent_order:
2492 where += ' order by '+self._parent_order
2493 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2495 for id in cr.fetchall():
2496 pos2 = browse_rec(id[0], pos2)
2497 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2499 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2500 if self._parent_order:
2501 query += ' order by ' + self._parent_order
2504 for (root,) in cr.fetchall():
2505 pos = browse_rec(root, pos)
2508 def _update_store(self, cr, f, k):
2509 logger = netsvc.Logger()
2510 logger.notifyChannel('data', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
2511 ss = self._columns[k]._symbol_set
2512 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2513 cr.execute('select id from '+self._table)
2514 ids_lst = map(lambda x: x[0], cr.fetchall())
2517 ids_lst = ids_lst[40:]
2518 res = f.get(cr, self, iids, k, ROOT_USER_ID, {})
2519 for key, val in res.items():
2522 # if val is a many2one, just write the ID
2523 if type(val) == tuple:
2525 if (val<>False) or (type(val)<>bool):
2526 cr.execute(update_query, (ss[1](val), key))
2528 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2529 """Raise except_orm if value is not among the valid values for the selection field"""
2530 if self._columns[field]._type == 'reference':
2531 val_model, val_id_str = value.split(',', 1)
2534 val_id = long(val_id_str)
2538 raise except_orm(_('ValidateError'),
2539 _('Invalid value for reference field "%s" (last part must be a non-zero integer): "%s"') % (field, value))
2543 if isinstance(self._columns[field].selection, (tuple, list)):
2544 if val in dict(self._columns[field].selection):
2546 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2548 raise except_orm(_('ValidateError'),
2549 _('The value "%s" for the field "%s" is not in the selection') % (value, field))
2551 def _check_removed_columns(self, cr, log=False):
2552 # iterate on the database columns to drop the NOT NULL constraints
2553 # of fields which were required but have been removed (or will be added by another module)
2554 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2555 columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
2556 cr.execute("SELECT a.attname, a.attnotnull"
2557 " FROM pg_class c, pg_attribute a"
2558 " WHERE c.relname=%s"
2559 " AND c.oid=a.attrelid"
2560 " AND a.attisdropped=%s"
2561 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2562 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2564 for column in cr.dictfetchall():
2566 self.__logger.debug("column %s is in the table %s but not in the corresponding object %s",
2567 column['attname'], self._table, self._name)
2568 if column['attnotnull']:
2569 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2570 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2571 self._table, column['attname'])
2573 def _auto_init(self, cr, context=None):
2576 Call _field_create and, unless _auto is False:
2578 - create the corresponding table in database for the model,
2579 - possibly add the parent columns in database,
2580 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2581 'write_date' in database if _log_access is True (the default),
2582 - report on database columns no more existing in _columns,
2583 - remove no more existing not null constraints,
2584 - alter existing database columns to match _columns,
2585 - create database tables to match _columns,
2586 - add database indices to match _columns,
2587 - save in self._foreign_keys a list a foreign keys to create (see
2591 self._foreign_keys = []
2592 raise_on_invalid_object_name(self._name)
2595 store_compute = False
2597 update_custom_fields = context.get('update_custom_fields', False)
2598 self._field_create(cr, context=context)
2599 create = not self._table_exist(cr)
2601 if getattr(self, '_auto', True):
2604 self._create_table(cr)
2607 if self._parent_store:
2608 if not self._parent_columns_exist(cr):
2609 self._create_parent_columns(cr)
2610 store_compute = True
2612 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2613 if self._log_access:
2614 self._add_log_columns(cr)
2616 self._check_removed_columns(cr, log=False)
2618 # iterate on the "object columns"
2619 column_data = self._select_column_data(cr)
2621 for k, f in self._columns.iteritems():
2622 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
2624 # Don't update custom (also called manual) fields
2625 if f.manual and not update_custom_fields:
2628 if isinstance(f, fields.one2many):
2629 self._o2m_raise_on_missing_reference(cr, f)
2631 elif isinstance(f, fields.many2many):
2632 self._m2m_raise_or_create_relation(cr, f)
2635 res = column_data.get(k)
2637 # The field is not found as-is in database, try if it
2638 # exists with an old name.
2639 if not res and hasattr(f, 'oldname'):
2640 res = column_data.get(f.oldname)
2642 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
2644 column_data[k] = res
2645 self.__schema.debug("Table '%s': renamed column '%s' to '%s'",
2646 self._table, f.oldname, k)
2648 # The field already exists in database. Possibly
2649 # change its type, rename it, drop it or change its
2652 f_pg_type = res['typname']
2653 f_pg_size = res['size']
2654 f_pg_notnull = res['attnotnull']
2655 if isinstance(f, fields.function) and not f.store and\
2656 not getattr(f, 'nodrop', False):
2657 self.__logger.info('column %s (%s) in table %s removed: converted to a function !\n',
2658 k, f.string, self._table)
2659 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
2661 self.__schema.debug("Table '%s': dropped column '%s' with cascade",
2665 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
2670 ('text', 'char', 'VARCHAR(%d)' % (f.size or 0,), '::VARCHAR(%d)'%(f.size or 0,)),
2671 ('varchar', 'text', 'TEXT', ''),
2672 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2673 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
2674 ('timestamp', 'date', 'date', '::date'),
2675 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2676 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2678 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
2679 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2680 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
2681 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size))
2682 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2684 self.__schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
2685 self._table, k, f_pg_size, f.size)
2687 if (f_pg_type==c[0]) and (f._type==c[1]):
2688 if f_pg_type != f_obj_type:
2690 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2691 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
2692 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
2693 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2695 self.__schema.debug("Table '%s': column '%s' changed type from %s to %s",
2696 self._table, k, c[0], c[1])
2699 if f_pg_type != f_obj_type:
2703 newname = k + '_moved' + str(i)
2704 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
2705 "WHERE c.relname=%s " \
2706 "AND a.attname=%s " \
2707 "AND c.oid=a.attrelid ", (self._table, newname))
2708 if not cr.fetchone()[0]:
2712 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2713 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
2714 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2715 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2716 self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
2717 self._table, k, f_pg_type, f._type, newname)
2719 # if the field is required and hasn't got a NOT NULL constraint
2720 if f.required and f_pg_notnull == 0:
2721 # set the field to the default value if any
2722 if k in self._defaults:
2723 if callable(self._defaults[k]):
2724 default = self._defaults[k](self, cr, ROOT_USER_ID, context)
2726 default = self._defaults[k]
2728 if (default is not None):
2729 ss = self._columns[k]._symbol_set
2730 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
2731 cr.execute(query, (ss[1](default),))
2732 # add the NOT NULL constraint
2735 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2737 self.__schema.debug("Table '%s': column '%s': added NOT NULL constraint",
2740 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
2741 "If you want to have it, you should update the records and execute manually:\n"\
2742 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2743 self.__schema.warn(msg, self._table, k, self._table, k)
2745 elif not f.required and f_pg_notnull == 1:
2746 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2748 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2751 indexname = '%s_%s_index' % (self._table, k)
2752 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
2753 res2 = cr.dictfetchall()
2754 if not res2 and f.select:
2755 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2757 if f._type == 'text':
2758 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
2759 msg = "Table '%s': Adding (b-tree) index for text column '%s'."\
2760 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
2761 " because there is a length limit for indexable btree values!\n"\
2762 "Use a search view instead if you simply want to make the field searchable."
2763 self.__schema.warn(msg, self._table, k, f._type)
2764 if res2 and not f.select:
2765 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
2767 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
2768 self.__schema.debug(msg, self._table, k, f._type)
2770 if isinstance(f, fields.many2one):
2771 ref = self.pool.get(f._obj)._table
2772 if ref != 'ir_actions':
2773 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
2774 'pg_attribute as att1, pg_attribute as att2 '
2775 'WHERE con.conrelid = cl1.oid '
2776 'AND cl1.relname = %s '
2777 'AND con.confrelid = cl2.oid '
2778 'AND cl2.relname = %s '
2779 'AND array_lower(con.conkey, 1) = 1 '
2780 'AND con.conkey[1] = att1.attnum '
2781 'AND att1.attrelid = cl1.oid '
2782 'AND att1.attname = %s '
2783 'AND array_lower(con.confkey, 1) = 1 '
2784 'AND con.confkey[1] = att2.attnum '
2785 'AND att2.attrelid = cl2.oid '
2786 'AND att2.attname = %s '
2787 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
2788 res2 = cr.dictfetchall()
2790 if res2[0]['confdeltype'] != POSTGRES_CONFDELTYPES.get(f.ondelete.upper(), 'a'):
2791 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"')
2792 self._foreign_keys.append((self._table, k, ref, f.ondelete))
2794 self.__schema.debug("Table '%s': column '%s': XXX",
2797 # The field doesn't exist in database. Create it if necessary.
2799 if not isinstance(f, fields.function) or f.store:
2800 # add the missing field
2801 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2802 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2803 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2804 self._table, k, get_pg_type(f)[1])
2807 if not create and k in self._defaults:
2808 if callable(self._defaults[k]):
2809 default = self._defaults[k](self, cr, ROOT_USER_ID, context)
2811 default = self._defaults[k]
2813 ss = self._columns[k]._symbol_set
2814 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
2815 cr.execute(query, (ss[1](default),))
2817 netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
2819 # remember the functions to call for the stored fields
2820 if isinstance(f, fields.function):
2822 if f.store is not True: # i.e. if f.store is a dict
2823 order = f.store[f.store.keys()[0]][2]
2824 todo_end.append((order, self._update_store, (f, k)))
2826 # and add constraints if needed
2827 if isinstance(f, fields.many2one):
2828 if not self.pool.get(f._obj):
2829 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2830 ref = self.pool.get(f._obj)._table
2831 # ir_actions is inherited so foreign key doesn't work on it
2832 if ref != 'ir_actions':
2833 self._foreign_keys.append((self._table, k, ref, f.ondelete))
2834 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2835 self._table, k, ref, f.ondelete)
2837 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2841 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2842 self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
2845 msg = "WARNING: unable to set column %s of table %s not null !\n"\
2846 "Try to re-run: openerp-server --update=module\n"\
2847 "If it doesn't work, update records and execute manually:\n"\
2848 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2849 self.__logger.warn(msg, k, self._table, self._table, k)
2853 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2854 create = not bool(cr.fetchone())
2856 cr.commit() # start a new transaction
2858 self._add_sql_constraints(cr)
2861 self._execute_sql(cr)
2864 self._parent_store_compute(cr)
2870 def _auto_end(self, cr, context=None):
2871 """ Create the foreign keys recorded by _auto_init. """
2872 for t, k, r, d in self._foreign_keys:
2873 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
2875 del self._foreign_keys
2878 def _table_exist(self, cr):
2879 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2883 def _create_table(self, cr):
2884 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
2885 cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''")))
2886 self.__schema.debug("Table '%s': created", self._table)
2889 def _parent_columns_exist(self, cr):
2890 cr.execute("""SELECT c.relname
2891 FROM pg_class c, pg_attribute a
2892 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2893 """, (self._table, 'parent_left'))
2897 def _create_parent_columns(self, cr):
2898 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
2899 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
2900 if 'parent_left' not in self._columns:
2901 self.__logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
2903 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2904 self._table, 'parent_left', 'INTEGER')
2905 elif not self._columns['parent_left'].select:
2906 self.__logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
2908 if 'parent_right' not in self._columns:
2909 self.__logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
2911 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2912 self._table, 'parent_right', 'INTEGER')
2913 elif not self._columns['parent_right'].select:
2914 self.__logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
2916 if self._columns[self._parent_name].ondelete != 'cascade':
2917 self.__logger.error("The column %s on object %s must be set as ondelete='cascade'",
2918 self._parent_name, self._name)
2923 def _add_log_columns(self, cr):
2925 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2926 'create_date': 'TIMESTAMP',
2927 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2928 'write_date': 'TIMESTAMP'
2933 FROM pg_class c, pg_attribute a
2934 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2935 """, (self._table, k))
2937 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
2939 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2940 self._table, k, logs[k])
2943 def _select_column_data(self, cr):
2944 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2945 "FROM pg_class c,pg_attribute a,pg_type t " \
2946 "WHERE c.relname=%s " \
2947 "AND c.oid=a.attrelid " \
2948 "AND a.atttypid=t.oid", (self._table,))
2949 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
2952 def _o2m_raise_on_missing_reference(self, cr, f):
2953 # TODO this check should be a method on fields.one2many.
2954 other = self.pool.get(f._obj)
2956 # TODO the condition could use fields_get_keys().
2957 if f._fields_id not in other._columns.keys():
2958 if f._fields_id not in other._inherit_fields.keys():
2959 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
2962 def _m2m_raise_or_create_relation(self, cr, f):
2963 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (f._rel,))
2964 if not cr.dictfetchall():
2965 if not self.pool.get(f._obj):
2966 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2967 ref = self.pool.get(f._obj)._table
2968 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s")) WITH OIDS' % (f._rel, f._id1, f._id2, f._id1, f._id2))
2969 self._foreign_keys.append((f._rel, f._id1, self._table, 'CASCADE'))
2970 self._foreign_keys.append((f._rel, f._id2, ref, 'CASCADE'))
2971 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
2972 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
2973 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref))
2975 self.__schema.debug("Create table '%s': relation between '%s' and '%s'",
2976 f._rel, self._table, ref)
2979 def _add_sql_constraints(self, cr):
2982 Modify this model's database table constraints so they match the one in
2986 for (key, con, _) in self._sql_constraints:
2987 conname = '%s_%s' % (self._table, key)
2989 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
2990 existing_constraints = cr.dictfetchall()
2995 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
2996 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
2997 self._table, conname, con),
2998 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3003 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3004 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3005 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3011 if not existing_constraints:
3012 # constraint does not exists:
3013 sql_actions['add']['execute'] = True
3014 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3015 elif con.lower() not in [item['condef'].lower() for item in existing_constraints]:
3016 # constraint exists but its definition has changed:
3017 sql_actions['drop']['execute'] = True
3018 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3019 sql_actions['add']['execute'] = True
3020 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3022 # we need to add the constraint:
3023 sql_actions = [item for item in sql_actions.values()]
3024 sql_actions.sort(key=lambda x: x['order'])
3025 for sql_action in [action for action in sql_actions if action['execute']]:
3027 cr.execute(sql_action['query'])
3029 self.__schema.debug(sql_action['msg_ok'])
3031 self.__schema.warn(sql_action['msg_err'])
3035 def _execute_sql(self, cr):
3036 """ Execute the SQL code from the _sql attribute (if any)."""
3037 if hasattr(self, "_sql"):
3038 for line in self._sql.split(';'):
3039 line2 = line.replace('\n', '').strip()
3046 def createInstance(cls, pool, cr):
3047 return cls.makeInstance(pool, cr, ['_columns', '_defaults',
3048 '_inherits', '_constraints', '_sql_constraints'])
3050 def __init__(self, pool, cr):
3053 - copy the stored fields' functions in the osv_pool,
3054 - update the _columns with the fields found in ir_model_fields,
3055 - ensure there is a many2one for each _inherits'd parent,
3056 - update the children's _columns,
3057 - give a chance to each field to initialize itself.
3060 super(orm, self).__init__(pool, cr)
3062 if not hasattr(self, '_log_access'):
3063 # if not access is not specify, it is the same value as _auto
3064 self._log_access = getattr(self, "_auto", True)
3066 self._columns = self._columns.copy()
3067 for store_field in self._columns:
3068 f = self._columns[store_field]
3069 if hasattr(f, 'digits_change'):
3071 if not isinstance(f, fields.function):
3075 if self._columns[store_field].store is True:
3076 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
3078 sm = self._columns[store_field].store
3079 for object, aa in sm.items():
3081 (fnct, fields2, order, length) = aa
3083 (fnct, fields2, order) = aa
3086 raise except_orm('Error',
3087 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
3088 self.pool._store_function.setdefault(object, [])
3090 for x, y, z, e, f, l in self.pool._store_function[object]:
3091 if (x==self._name) and (y==store_field) and (e==fields2):
3095 self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order, length))
3096 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
3098 for (key, _, msg) in self._sql_constraints:
3099 self.pool._sql_error[self._table+'_'+key] = msg
3101 # Load manual fields
3103 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
3105 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
3106 for field in cr.dictfetchall():
3107 if field['name'] in self._columns:
3110 'string': field['field_description'],
3111 'required': bool(field['required']),
3112 'readonly': bool(field['readonly']),
3113 'domain': eval(field['domain']) if field['domain'] else None,
3114 'size': field['size'],
3115 'ondelete': field['on_delete'],
3116 'translate': (field['translate']),
3118 #'select': int(field['select_level'])
3121 if field['ttype'] == 'selection':
3122 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
3123 elif field['ttype'] == 'reference':
3124 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
3125 elif field['ttype'] == 'many2one':
3126 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
3127 elif field['ttype'] == 'one2many':
3128 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
3129 elif field['ttype'] == 'many2many':
3130 _rel1 = field['relation'].replace('.', '_')
3131 _rel2 = field['model'].replace('.', '_')
3132 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
3133 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
3135 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
3136 self._inherits_check()
3137 self._inherits_reload()
3138 if not self._sequence:
3139 self._sequence = self._table + '_id_seq'
3140 for k in self._defaults:
3141 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
3142 for f in self._columns:
3143 self._columns[f].restart()
3145 __init__.__doc__ = orm_template.__init__.__doc__ + __init__.__doc__
3148 # Update objects that uses this one to update their _inherits fields
3151 def _inherits_reload_src(self):
3152 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3153 for obj in self.pool.models.values():
3154 if self._name in obj._inherits:
3155 obj._inherits_reload()
3158 def _inherits_reload(self):
3159 """ Recompute the _inherit_fields mapping.
3161 This will also call itself on each inherits'd child model.
3165 for table in self._inherits:
3166 other = self.pool.get(table)
3167 for col in other._columns.keys():
3168 res[col] = (table, self._inherits[table], other._columns[col])
3169 for col in other._inherit_fields.keys():
3170 res[col] = (table, self._inherits[table], other._inherit_fields[col][2])
3171 self._inherit_fields = res
3172 self._all_columns = self._get_column_infos()
3173 self._inherits_reload_src()
3176 def _get_column_infos(self):
3177 """Returns a dict mapping all fields names (direct fields and
3178 inherited field via _inherits) to a ``column_info`` struct
3179 giving detailed columns """
3181 for k, (parent, m2o, col) in self._inherit_fields.iteritems():
3182 result[k] = fields.column_info(k, col, parent, m2o)
3183 for k, col in self._columns.iteritems():
3184 result[k] = fields.column_info(k, col)
3188 def _inherits_check(self):
3189 for table, field_name in self._inherits.items():
3190 if field_name not in self._columns:
3191 logging.getLogger('init').info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.' % (field_name, self._name))
3192 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3193 required=True, ondelete="cascade")
3194 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
3195 logging.getLogger('init').warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.' % (field_name, self._name))
3196 self._columns[field_name].required = True
3197 self._columns[field_name].ondelete = "cascade"
3199 #def __getattr__(self, name):
3201 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3202 # (though inherits doesn't use Python inheritance).
3203 # Handles translating between local ids and remote ids.
3204 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3205 # when you have inherits.
3207 # for model, field in self._inherits.iteritems():
3208 # proxy = self.pool.get(model)
3209 # if hasattr(proxy, name):
3210 # attribute = getattr(proxy, name)
3211 # if not hasattr(attribute, '__call__'):
3215 # return super(orm, self).__getattr__(name)
3217 # def _proxy(cr, uid, ids, *args, **kwargs):
3218 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3219 # lst = [obj[field].id for obj in objects if obj[field]]
3220 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3225 def fields_get(self, cr, user, fields=None, context=None):
3227 Get the description of list of fields
3229 :param cr: database cursor
3230 :param user: current user id
3231 :param fields: list of fields
3232 :param context: context arguments, like lang, time zone
3233 :return: dictionary of field dictionaries, each one describing a field of the business object
3234 :raise AccessError: * if user has no create/write rights on the requested object
3237 ira = self.pool.get('ir.model.access')
3238 write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
3239 ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
3240 return super(orm, self).fields_get(cr, user, fields, context, write_access)
3242 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3245 self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
3247 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
3248 if isinstance(ids, (int, long)):
3252 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3253 result = self._read_flat(cr, user, select, fields, context, load)
3256 for key, v in r.items():
3260 if isinstance(ids, (int, long, dict)):
3261 return result and result[0] or False
3264 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3269 if fields_to_read == None:
3270 fields_to_read = self._columns.keys()
3272 # Construct a clause for the security rules.
3273 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3274 # or will at least contain self._table.
3275 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3277 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3278 fields_pre = [f for f in fields_to_read if
3279 f == self.CONCURRENCY_CHECK_FIELD
3280 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3281 ] + self._inherits.values()
3285 def convert_field(f):
3286 f_qual = "%s.%s" % (self._table, f) # need fully-qualified references in case len(tables) > 1
3287 if f in ('create_date', 'write_date'):
3288 return "date_trunc('second', %s) as %s" % (f_qual, f)
3289 if f == self.CONCURRENCY_CHECK_FIELD:
3290 if self._log_access:
3291 return "COALESCE(%s.write_date, %s.create_date, now())::timestamp AS %s" % (self._table, self._table, f,)
3292 return "now()::timestamp AS %s" % (f,)
3293 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3294 return 'length(%s) as "%s"' % (f_qual, f)
3297 fields_pre2 = map(convert_field, fields_pre)
3298 order_by = self._parent_order or self._order
3299 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
3300 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3302 query += " AND " + (' OR '.join(rule_clause))
3303 query += " ORDER BY " + order_by
3304 for sub_ids in cr.split_for_in_conditions(ids):
3306 cr.execute(query, [tuple(sub_ids)] + rule_params)
3307 if cr.rowcount != len(sub_ids):
3308 raise except_orm(_('AccessError'),
3309 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: read, Document type: %s).')
3310 % (self._description,))
3312 cr.execute(query, (tuple(sub_ids),))
3313 res.extend(cr.dictfetchall())
3315 res = map(lambda x: {'id': x}, ids)
3317 for f in fields_pre:
3318 if f == self.CONCURRENCY_CHECK_FIELD:
3320 if self._columns[f].translate:
3321 ids = [x['id'] for x in res]
3322 #TODO: optimize out of this loop
3323 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3325 r[f] = res_trans.get(r['id'], False) or r[f]
3327 for table in self._inherits:
3328 col = self._inherits[table]
3329 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3332 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3340 if not record[col]: # if the record is deleted from _inherits table?
3342 record.update(res3[record[col]])
3343 if col not in fields_to_read:
3346 # all fields which need to be post-processed by a simple function (symbol_get)
3347 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3350 for f in fields_post:
3351 r[f] = self._columns[f]._symbol_get(r[f])
3352 ids = [x['id'] for x in res]
3354 # all non inherited fields for which the attribute whose name is in load is False
3355 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3357 # Compute POST fields
3359 for f in fields_post:
3360 todo.setdefault(self._columns[f]._multi, [])
3361 todo[self._columns[f]._multi].append(f)
3362 for key, val in todo.items():
3364 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3365 assert res2 is not None, \
3366 'The function field "%s" on the "%s" model returned None\n' \
3367 '(a dictionary was expected).' % (val[0], self._name)
3370 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3371 multi_fields = res2.get(record['id'],{})
3373 record[pos] = multi_fields.get(pos,[])
3376 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3379 record[f] = res2[record['id']]
3384 for field in vals.copy():
3386 if field in self._columns:
3387 fobj = self._columns[field]
3394 for group in groups:
3395 module = group.split(".")[0]
3396 grp = group.split(".")[1]
3397 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3398 (grp, module, 'res.groups', user))
3399 readonly = cr.fetchall()
3400 if readonly[0][0] >= 1:
3403 elif readonly[0][0] == 0:
3409 if type(vals[field]) == type([]):
3411 elif type(vals[field]) == type(0.0):
3413 elif type(vals[field]) == type(''):
3414 vals[field] = '=No Permission='
3419 def perm_read(self, cr, user, ids, context=None, details=True):
3421 Returns some metadata about the given records.
3423 :param details: if True, \*_uid fields are replaced with the name of the user
3424 :return: list of ownership dictionaries for each requested record
3425 :rtype: list of dictionaries with the following keys:
3428 * create_uid: user who created the record
3429 * create_date: date when the record was created
3430 * write_uid: last user who changed the record
3431 * write_date: date of the last change to the record
3432 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3439 uniq = isinstance(ids, (int, long))
3443 if self._log_access:
3444 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3445 quoted_table = '"%s"' % self._table
3446 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3447 query = '''SELECT %s, __imd.module, __imd.name
3448 FROM %s LEFT JOIN ir_model_data __imd
3449 ON (__imd.model = %%s and __imd.res_id = %s.id)
3450 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3451 cr.execute(query, (self._name, tuple(ids)))
3452 res = cr.dictfetchall()
3455 r[key] = r[key] or False
3456 if details and key in ('write_uid', 'create_uid') and r[key]:
3458 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3460 pass # Leave the numeric uid there
3461 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3462 del r['name'], r['module']
3467 def _check_concurrency(self, cr, ids, context):
3470 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3472 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
3473 for sub_ids in cr.split_for_in_conditions(ids):
3476 id_ref = "%s,%s" % (self._name, id)
3477 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3479 ids_to_check.extend([id, update_date])
3480 if not ids_to_check:
3482 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3485 # mention the first one only to keep the error message readable
3486 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3488 def check_access_rule(self, cr, uid, ids, operation, context=None):
3489 """Verifies that the operation given by ``operation`` is allowed for the user
3490 according to ir.rules.
3492 :param operation: one of ``write``, ``unlink``
3493 :raise except_orm: * if current ir.rules do not permit this operation.
3494 :return: None if the operation is allowed
3496 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3498 where_clause = ' and ' + ' and '.join(where_clause)
3499 for sub_ids in cr.split_for_in_conditions(ids):
3500 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3501 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3502 [sub_ids] + where_params)
3503 if cr.rowcount != len(sub_ids):
3504 raise except_orm(_('AccessError'),
3505 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: %s, Document type: %s).')
3506 % (operation, self._description))
3508 def unlink(self, cr, uid, ids, context=None):
3510 Delete records with given ids
3512 :param cr: database cursor
3513 :param uid: current user id
3514 :param ids: id or list of ids
3515 :param context: (optional) context arguments, like lang, time zone
3517 :raise AccessError: * if user has no unlink rights on the requested object
3518 * if user tries to bypass access rules for unlink on the requested object
3519 :raise UserError: if the record is default property for other records
3524 if isinstance(ids, (int, long)):
3527 result_store = self._store_get_values(cr, uid, ids, None, context)
3529 self._check_concurrency(cr, ids, context)
3531 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
3533 properties = self.pool.get('ir.property')
3534 domain = [('res_id', '=', False),
3535 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3537 if properties.search(cr, uid, domain, context=context):
3538 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3540 wf_service = netsvc.LocalService("workflow")
3542 wf_service.trg_delete(uid, self._name, oid, cr)
3545 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3546 pool_model_data = self.pool.get('ir.model.data')
3547 ir_values_obj = self.pool.get('ir.values')
3548 for sub_ids in cr.split_for_in_conditions(ids):
3549 cr.execute('delete from ' + self._table + ' ' \
3550 'where id IN %s', (sub_ids,))
3552 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3553 # as these are not connected with real database foreign keys, and would be dangling references.
3554 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
3555 # to avoid possible side-effects during admin calls.
3556 # Step 1. Calling unlink of ir_model_data only for the affected IDS
3557 reference_ids = pool_model_data.search(cr, ROOT_USER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
3558 # Step 2. Marching towards the real deletion of referenced records
3560 pool_model_data.unlink(cr, ROOT_USER_ID, reference_ids)
3562 # For the same reason, removing the record relevant to ir_values
3563 ir_value_ids = ir_values_obj.search(cr, uid,
3564 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3567 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3569 for order, object, store_ids, fields in result_store:
3570 if object != self._name:
3571 obj = self.pool.get(object)
3572 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3573 rids = map(lambda x: x[0], cr.fetchall())
3575 obj._store_set_values(cr, uid, rids, fields, context)
3582 def write(self, cr, user, ids, vals, context=None):
3584 Update records with given ids with the given field values
3586 :param cr: database cursor
3587 :param user: current user id
3589 :param ids: object id or list of object ids to update according to **vals**
3590 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3591 :type vals: dictionary
3592 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3593 :type context: dictionary
3595 :raise AccessError: * if user has no write rights on the requested object
3596 * if user tries to bypass access rules for write on the requested object
3597 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3598 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3600 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3602 + For a many2many field, a list of tuples is expected.
3603 Here is the list of tuple that are accepted, with the corresponding semantics ::
3605 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3606 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3607 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3608 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3609 (4, ID) link to existing record with id = ID (adds a relationship)
3610 (5) unlink all (like using (3,ID) for all linked records)
3611 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3614 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3616 + For a one2many field, a lits of tuples is expected.
3617 Here is the list of tuple that are accepted, with the corresponding semantics ::
3619 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3620 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3621 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3624 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
3626 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
3627 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
3631 for field in vals.copy():
3633 if field in self._columns:
3634 fobj = self._columns[field]
3635 elif field in self._inherit_fields:
3636 fobj = self._inherit_fields[field][2]
3643 for group in groups:
3644 module = group.split(".")[0]
3645 grp = group.split(".")[1]
3646 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3647 (grp, module, 'res.groups', user))
3648 readonly = cr.fetchall()
3649 if readonly[0][0] >= 1:
3652 elif readonly[0][0] == 0:
3664 if isinstance(ids, (int, long)):
3667 self._check_concurrency(cr, ids, context)
3668 self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
3670 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
3672 # No direct update of parent_left/right
3673 vals.pop('parent_left', None)
3674 vals.pop('parent_right', None)
3676 parents_changed = []
3677 parent_order = self._parent_order or self._order
3678 if self._parent_store and (self._parent_name in vals):
3679 # The parent_left/right computation may take up to
3680 # 5 seconds. No need to recompute the values if the
3681 # parent is the same.
3682 # Note: to respect parent_order, nodes must be processed in
3683 # order, so ``parents_changed`` must be ordered properly.
3684 parent_val = vals[self._parent_name]
3686 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
3687 (self._table, self._parent_name, self._parent_name, parent_order)
3688 cr.execute(query, (tuple(ids), parent_val))
3690 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
3691 (self._table, self._parent_name, parent_order)
3692 cr.execute(query, (tuple(ids),))
3693 parents_changed = map(operator.itemgetter(0), cr.fetchall())
3700 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
3702 if field in self._columns:
3703 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
3704 if (not totranslate) or not self._columns[field].translate:
3705 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
3706 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
3707 direct.append(field)
3709 upd_todo.append(field)
3711 updend.append(field)
3712 if field in self._columns \
3713 and hasattr(self._columns[field], 'selection') \
3715 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3717 if self._log_access:
3718 upd0.append('write_uid=%s')
3719 upd0.append('write_date=now()')
3723 self.check_access_rule(cr, user, ids, 'write', context=context)
3724 for sub_ids in cr.split_for_in_conditions(ids):
3725 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
3726 'where id IN %s', upd1 + [sub_ids])
3727 if cr.rowcount != len(sub_ids):
3728 raise except_orm(_('AccessError'),
3729 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
3734 if self._columns[f].translate:
3735 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
3738 # Inserting value to DB
3739 self.write(cr, user, ids, {f: vals[f]})
3740 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
3743 # call the 'set' method of fields which are not classic_write
3744 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3746 # default element in context must be removed when call a one2many or many2many
3747 rel_context = context.copy()
3748 for c in context.items():
3749 if c[0].startswith('default_'):
3750 del rel_context[c[0]]
3752 for field in upd_todo:
3754 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
3756 for table in self._inherits:
3757 col = self._inherits[table]
3759 for sub_ids in cr.split_for_in_conditions(ids):
3760 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
3761 'where id IN %s', (sub_ids,))
3762 nids.extend([x[0] for x in cr.fetchall()])
3766 if self._inherit_fields[val][0] == table:
3769 self.pool.get(table).write(cr, user, nids, v, context)
3771 self._validate(cr, user, ids, context)
3773 # TODO: use _order to set dest at the right position and not first node of parent
3774 # We can't defer parent_store computation because the stored function
3775 # fields that are computer may refer (directly or indirectly) to
3776 # parent_left/right (via a child_of domain)
3779 self.pool._init_parent[self._name] = True
3781 order = self._parent_order or self._order
3782 parent_val = vals[self._parent_name]
3784 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
3786 clause, params = '%s IS NULL' % (self._parent_name,), ()
3788 for id in parents_changed:
3789 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
3790 pleft, pright = cr.fetchone()
3791 distance = pright - pleft + 1
3793 # Positions of current siblings, to locate proper insertion point;
3794 # this can _not_ be fetched outside the loop, as it needs to be refreshed
3795 # after each update, in case several nodes are sequentially inserted one
3796 # next to the other (i.e computed incrementally)
3797 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
3798 parents = cr.fetchall()
3800 # Find Position of the element
3802 for (parent_pright, parent_id) in parents:
3805 position = parent_pright + 1
3807 # It's the first node of the parent
3812 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
3813 position = cr.fetchone()[0] + 1
3815 if pleft < position <= pright:
3816 raise except_orm(_('UserError'), _('Recursivity Detected.'))
3818 if pleft < position:
3819 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3820 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3821 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
3823 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3824 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3825 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
3827 result += self._store_get_values(cr, user, ids, vals.keys(), context)
3831 for order, object, ids_to_update, fields_to_recompute in result:
3832 key = (object, tuple(fields_to_recompute))
3833 done.setdefault(key, {})
3834 # avoid to do several times the same computation
3836 for id in ids_to_update:
3837 if id not in done[key]:
3838 done[key][id] = True
3840 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
3842 wf_service = netsvc.LocalService("workflow")
3844 wf_service.trg_write(user, self._name, id, cr)
3848 # TODO: Should set perm to user.xxx
3850 def create(self, cr, user, vals, context=None):
3852 Create a new record for the model.
3854 The values for the new record are initialized using the ``vals``
3855 argument, and if necessary the result of ``default_get()``.
3857 :param cr: database cursor
3858 :param user: current user id
3860 :param vals: field values for new record, e.g {'field_name': field_value, ...}
3861 :type vals: dictionary
3862 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3863 :type context: dictionary
3864 :return: id of new record created
3865 :raise AccessError: * if user has no create rights on the requested object
3866 * if user tries to bypass access rules for create on the requested object
3867 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3868 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3870 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
3871 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
3877 self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
3879 vals = self._add_missing_default_values(cr, user, vals, context)
3882 for v in self._inherits:
3883 if self._inherits[v] not in vals:
3886 tocreate[v] = {'id': vals[self._inherits[v]]}
3887 (upd0, upd1, upd2) = ('', '', [])
3889 for v in vals.keys():
3890 if v in self._inherit_fields:
3891 (table, col, col_detail) = self._inherit_fields[v]
3892 tocreate[table][v] = vals[v]
3895 if (v not in self._inherit_fields) and (v not in self._columns):
3898 # Try-except added to filter the creation of those records whose filds are readonly.
3899 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
3901 cr.execute("SELECT nextval('"+self._sequence+"')")
3903 raise except_orm(_('UserError'),
3904 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
3906 id_new = cr.fetchone()[0]
3907 for table in tocreate:
3908 if self._inherits[table] in vals:
3909 del vals[self._inherits[table]]
3911 record_id = tocreate[table].pop('id', None)
3913 if record_id is None or not record_id:
3914 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
3916 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
3918 upd0 += ',' + self._inherits[table]
3920 upd2.append(record_id)
3922 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
3923 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
3925 for bool_field in bool_fields:
3926 if bool_field not in vals:
3927 vals[bool_field] = False
3929 for field in vals.copy():
3931 if field in self._columns:
3932 fobj = self._columns[field]
3934 fobj = self._inherit_fields[field][2]
3940 for group in groups:
3941 module = group.split(".")[0]
3942 grp = group.split(".")[1]
3943 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
3944 (grp, module, 'res.groups', user))
3945 readonly = cr.fetchall()
3946 if readonly[0][0] >= 1:
3949 elif readonly[0][0] == 0:
3957 if self._columns[field]._classic_write:
3958 upd0 = upd0 + ',"' + field + '"'
3959 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
3960 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
3962 if not isinstance(self._columns[field], fields.related):
3963 upd_todo.append(field)
3964 if field in self._columns \
3965 and hasattr(self._columns[field], 'selection') \
3967 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3968 if self._log_access:
3969 upd0 += ',create_uid,create_date'
3972 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
3973 self.check_access_rule(cr, user, [id_new], 'create', context=context)
3974 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3976 if self._parent_store and not context.get('defer_parent_store_computation'):
3978 self.pool._init_parent[self._name] = True
3980 parent = vals.get(self._parent_name, False)
3982 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
3984 result_p = cr.fetchall()
3985 for (pleft,) in result_p:
3990 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
3991 pleft_old = cr.fetchone()[0]
3994 cr.execute('select max(parent_right) from '+self._table)
3995 pleft = cr.fetchone()[0] or 0
3996 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
3997 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
3998 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4000 # default element in context must be remove when call a one2many or many2many
4001 rel_context = context.copy()
4002 for c in context.items():
4003 if c[0].startswith('default_'):
4004 del rel_context[c[0]]
4007 for field in upd_todo:
4008 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4009 self._validate(cr, user, [id_new], context)
4011 if not context.get('no_store_function', False):
4012 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
4015 for order, object, ids, fields2 in result:
4016 if not (object, ids, fields2) in done:
4017 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
4018 done.append((object, ids, fields2))
4020 if self._log_create and not (context and context.get('no_store_function', False)):
4021 message = self._description + \
4023 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4024 "' " + _("created.")
4025 self.log(cr, user, id_new, message, True, context=context)
4026 wf_service = netsvc.LocalService("workflow")
4027 wf_service.trg_create(user, self._name, id_new, cr)
4030 def _store_get_values(self, cr, uid, ids, fields, context):
4031 """Returns an ordered list of fields.functions to call due to
4032 an update operation on ``fields`` of records with ``ids``,
4033 obtained by calling the 'store' functions of these fields,
4034 as setup by their 'store' attribute.
4036 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4038 # FIXME: rewrite, cleanup, use real variable names
4039 # e.g.: http://pastie.org/1222060
4041 fncts = self.pool._store_function.get(self._name, [])
4042 for fnct in range(len(fncts)):
4047 for f in (fields or []):
4048 if f in fncts[fnct][3]:
4054 result.setdefault(fncts[fnct][0], {})
4056 # use admin user for accessing objects having rules defined on store fields
4057 ids2 = fncts[fnct][2](self, cr, ROOT_USER_ID, ids, context)
4058 for id in filter(None, ids2):
4059 result[fncts[fnct][0]].setdefault(id, [])
4060 result[fncts[fnct][0]][id].append(fnct)
4062 for object in result:
4064 for id, fnct in result[object].items():
4065 k2.setdefault(tuple(fnct), [])
4066 k2[tuple(fnct)].append(id)
4067 for fnct, id in k2.items():
4068 dict.setdefault(fncts[fnct[0]][4], [])
4069 dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4], object, id, map(lambda x: fncts[x][1], fnct)))
4077 def _store_set_values(self, cr, uid, ids, fields, context):
4078 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4079 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4084 if self._log_access:
4085 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4089 field_dict.setdefault(r[0], [])
4090 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4091 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4092 for i in self.pool._store_function.get(self._name, []):
4094 up_write_date = write_date + datetime.timedelta(hours=i[5])
4095 if datetime.datetime.now() < up_write_date:
4097 field_dict[r[0]].append(i[1])
4103 if self._columns[f]._multi not in keys:
4104 keys.append(self._columns[f]._multi)
4105 todo.setdefault(self._columns[f]._multi, [])
4106 todo[self._columns[f]._multi].append(f)
4110 # use admin user for accessing objects having rules defined on store fields
4111 result = self._columns[val[0]].get(cr, self, ids, val, ROOT_USER_ID, context=context)
4112 for id, value in result.items():
4114 for f in value.keys():
4115 if f in field_dict[id]:
4122 if self._columns[v]._type in ('many2one', 'one2one'):
4124 value[v] = value[v][0]
4127 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4128 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4131 cr.execute('update "' + self._table + '" set ' + \
4132 ','.join(upd0) + ' where id = %s', upd1)
4136 # use admin user for accessing objects having rules defined on store fields
4137 result = self._columns[f].get(cr, self, ids, f, ROOT_USER_ID, context=context)
4138 for r in result.keys():
4140 if r in field_dict.keys():
4141 if f in field_dict[r]:
4143 for id, value in result.items():
4144 if self._columns[f]._type in ('many2one', 'one2one'):
4149 cr.execute('update "' + self._table + '" set ' + \
4150 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4156 def perm_write(self, cr, user, ids, fields, context=None):
4157 raise NotImplementedError(_('This method does not exist anymore'))
4159 # TODO: ameliorer avec NULL
4160 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4161 """Computes the WHERE clause needed to implement an OpenERP domain.
4162 :param domain: the domain to compute
4164 :param active_test: whether the default filtering of records with ``active``
4165 field set to ``False`` should be applied.
4166 :return: the query expressing the given domain as provided in domain
4167 :rtype: osv.query.Query
4172 # if the object has a field named 'active', filter out all inactive
4173 # records unless they were explicitely asked for
4174 if 'active' in self._columns and (active_test and context.get('active_test', True)):
4176 active_in_args = False
4178 if a[0] == 'active':
4179 active_in_args = True
4180 if not active_in_args:
4181 domain.insert(0, ('active', '=', 1))
4183 domain = [('active', '=', 1)]
4187 e = expression.expression(domain)
4188 e.parse(cr, user, self, context)
4189 tables = e.get_tables()
4190 where_clause, where_params = e.to_sql()
4191 where_clause = where_clause and [where_clause] or []
4193 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4195 return Query(tables, where_clause, where_params)
4197 def _check_qorder(self, word):
4198 if not regex_order.match(word):
4199 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4202 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4203 """Add what's missing in ``query`` to implement all appropriate ir.rules
4204 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4206 :param query: the current query object
4208 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4210 if parent_model and child_object:
4211 # as inherited rules are being applied, we need to add the missing JOIN
4212 # to reach the parent table (if it was not JOINed yet in the query)
4213 child_object._inherits_join_add(parent_model, query)
4214 query.where_clause += added_clause
4215 query.where_clause_params += added_params
4216 for table in added_tables:
4217 if table not in query.tables:
4218 query.tables.append(table)
4222 # apply main rules on the object
4223 rule_obj = self.pool.get('ir.rule')
4224 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
4226 # apply ir.rules from the parents (through _inherits)
4227 for inherited_model in self._inherits:
4228 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
4229 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
4231 def _generate_m2o_order_by(self, order_field, query):
4233 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4234 either native m2o fields or function/related fields that are stored, including
4235 intermediate JOINs for inheritance if required.
4237 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4239 if order_field not in self._columns and order_field in self._inherit_fields:
4240 # also add missing joins for reaching the table containing the m2o field
4241 qualified_field = self._inherits_join_calc(order_field, query)
4242 order_field_column = self._inherit_fields[order_field][2]
4244 qualified_field = '"%s"."%s"' % (self._table, order_field)
4245 order_field_column = self._columns[order_field]
4247 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4248 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4249 logging.getLogger('orm.search').debug("Many2one function/related fields must be stored " \
4250 "to be used as ordering fields! Ignoring sorting for %s.%s",
4251 self._name, order_field)
4254 # figure out the applicable order_by for the m2o
4255 dest_model = self.pool.get(order_field_column._obj)
4256 m2o_order = dest_model._order
4257 if not regex_order.match(m2o_order):
4258 # _order is complex, can't use it here, so we default to _rec_name
4259 m2o_order = dest_model._rec_name
4261 # extract the field names, to be able to qualify them and add desc/asc
4263 for order_part in m2o_order.split(","):
4264 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
4265 m2o_order = m2o_order_list
4267 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4268 # as we don't want to exclude results that have NULL values for the m2o
4269 src_table, src_field = qualified_field.replace('"','').split('.', 1)
4270 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
4271 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
4272 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4275 def _generate_order_by(self, order_spec, query):
4277 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4278 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4280 :raise" except_orm in case order_spec is malformed
4282 order_by_clause = self._order
4284 order_by_elements = []
4285 self._check_qorder(order_spec)
4286 for order_part in order_spec.split(','):
4287 order_split = order_part.strip().split(' ')
4288 order_field = order_split[0].strip()
4289 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4291 if order_field == 'id':
4292 order_by_clause = '"%s"."%s"' % (self._table, order_field)
4293 elif order_field in self._columns:
4294 order_column = self._columns[order_field]
4295 if order_column._classic_read:
4296 inner_clause = '"%s"."%s"' % (self._table, order_field)
4297 elif order_column._type == 'many2one':
4298 inner_clause = self._generate_m2o_order_by(order_field, query)
4300 continue # ignore non-readable or "non-joinable" fields
4301 elif order_field in self._inherit_fields:
4302 parent_obj = self.pool.get(self._inherit_fields[order_field][0])
4303 order_column = parent_obj._columns[order_field]
4304 if order_column._classic_read:
4305 inner_clause = self._inherits_join_calc(order_field, query)
4306 elif order_column._type == 'many2one':
4307 inner_clause = self._generate_m2o_order_by(order_field, query)
4309 continue # ignore non-readable or "non-joinable" fields
4311 if isinstance(inner_clause, list):
4312 for clause in inner_clause:
4313 order_by_elements.append("%s %s" % (clause, order_direction))
4315 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4316 if order_by_elements:
4317 order_by_clause = ",".join(order_by_elements)
4319 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4321 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4323 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4324 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4325 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4326 This is ok at the security level because this method is private and not callable through XML-RPC.
4328 :param access_rights_uid: optional user ID to use when checking access rights
4329 (not for ir.rules, this is only for ir.model.access)
4333 self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
4335 query = self._where_calc(cr, user, args, context=context)
4336 self._apply_ir_rules(cr, user, query, 'read', context=context)
4337 order_by = self._generate_order_by(order, query)
4338 from_clause, where_clause, where_clause_params = query.get_sql()
4340 limit_str = limit and ' limit %d' % limit or ''
4341 offset_str = offset and ' offset %d' % offset or ''
4342 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4345 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4348 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4350 return [x[0] for x in res]
4352 # returns the different values ever entered for one field
4353 # this is used, for example, in the client when the user hits enter on
4355 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4358 if field in self._inherit_fields:
4359 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4361 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4363 def copy_data(self, cr, uid, id, default=None, context=None):
4365 Copy given record's data with all its fields values
4367 :param cr: database cursor
4368 :param user: current user id
4369 :param id: id of the record to copy
4370 :param default: field values to override in the original values of the copied record
4371 :type default: dictionary
4372 :param context: context arguments, like lang, time zone
4373 :type context: dictionary
4374 :return: dictionary containing all the field values
4380 # avoid recursion through already copied records in case of circular relationship
4381 seen_map = context.setdefault('__copy_data_seen',{})
4382 if id in seen_map.setdefault(self._name,[]):
4384 seen_map[self._name].append(id)
4388 if 'state' not in default:
4389 if 'state' in self._defaults:
4390 if callable(self._defaults['state']):
4391 default['state'] = self._defaults['state'](self, cr, uid, context)
4393 default['state'] = self._defaults['state']
4395 context_wo_lang = context.copy()
4396 if 'lang' in context:
4397 del context_wo_lang['lang']
4398 data = self.read(cr, uid, [id,], context=context_wo_lang)
4402 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4404 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4405 fields = self.fields_get(cr, uid, context=context)
4407 ftype = fields[f]['type']
4409 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
4413 data[f] = default[f]
4414 elif 'function' in fields[f]:
4416 elif ftype == 'many2one':
4418 data[f] = data[f] and data[f][0]
4421 elif ftype in ('one2many', 'one2one'):
4423 rel = self.pool.get(fields[f]['relation'])
4425 # duplicate following the order of the ids
4426 # because we'll rely on it later for copying
4427 # translations in copy_translation()!
4429 for rel_id in data[f]:
4430 # the lines are first duplicated using the wrong (old)
4431 # parent but then are reassigned to the correct one thanks
4432 # to the (0, 0, ...)
4433 d = rel.copy_data(cr, uid, rel_id, context=context)
4435 res.append((0, 0, d))
4437 elif ftype == 'many2many':
4438 data[f] = [(6, 0, data[f])]
4442 # make sure we don't break the current parent_store structure and
4443 # force a clean recompute!
4444 for parent_column in ['parent_left', 'parent_right']:
4445 data.pop(parent_column, None)
4447 for v in self._inherits:
4448 del data[self._inherits[v]]
4451 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4455 # avoid recursion through already copied records in case of circular relationship
4456 seen_map = context.setdefault('__copy_translations_seen',{})
4457 if old_id in seen_map.setdefault(self._name,[]):
4459 seen_map[self._name].append(old_id)
4461 trans_obj = self.pool.get('ir.translation')
4462 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4463 fields = self.fields_get(cr, uid, context=context)
4465 translation_records = []
4466 for field_name, field_def in fields.items():
4467 # we must recursively copy the translations for o2o and o2m
4468 if field_def['type'] in ('one2one', 'one2many'):
4469 target_obj = self.pool.get(field_def['relation'])
4470 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4471 # here we rely on the order of the ids to match the translations
4472 # as foreseen in copy_data()
4473 old_children = sorted(old_record[field_name])
4474 new_children = sorted(new_record[field_name])
4475 for (old_child, new_child) in zip(old_children, new_children):
4476 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4477 # and for translatable fields we keep them for copy
4478 elif field_def.get('translate'):
4480 if field_name in self._columns:
4481 trans_name = self._name + "," + field_name
4482 elif field_name in self._inherit_fields:
4483 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4485 trans_ids = trans_obj.search(cr, uid, [
4486 ('name', '=', trans_name),
4487 ('res_id', '=', old_id)
4489 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4491 for record in translation_records:
4493 record['res_id'] = new_id
4494 trans_obj.create(cr, uid, record, context=context)
4497 def copy(self, cr, uid, id, default=None, context=None):
4499 Duplicate record with given id updating it with default values
4501 :param cr: database cursor
4502 :param uid: current user id
4503 :param id: id of the record to copy
4504 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4505 :type default: dictionary
4506 :param context: context arguments, like lang, time zone
4507 :type context: dictionary
4513 context = context.copy()
4514 data = self.copy_data(cr, uid, id, default, context)
4515 new_id = self.create(cr, uid, data, context)
4516 self.copy_translations(cr, uid, id, new_id, context)
4519 def exists(self, cr, uid, ids, context=None):
4520 if type(ids) in (int, long):
4522 query = 'SELECT count(1) FROM "%s"' % (self._table)
4523 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4524 return cr.fetchone()[0] == len(ids)
4526 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4527 warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4528 self._name, DeprecationWarning, stacklevel=3)
4529 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4530 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4531 return self._check_recursion(cr, uid, ids, context, parent)
4533 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4535 Verifies that there is no loop in a hierarchical structure of records,
4536 by following the parent relationship using the **parent** field until a loop
4537 is detected or until a top-level record is found.
4539 :param cr: database cursor
4540 :param uid: current user id
4541 :param ids: list of ids of records to check
4542 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4543 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4547 parent = self._parent_name
4549 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
4552 for i in range(0, len(ids), cr.IN_MAX):
4553 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
4554 cr.execute(query, (tuple(sub_ids_parent),))
4555 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
4556 ids_parent = ids_parent2
4557 for i in ids_parent:
4562 def _get_xml_ids(self, cr, uid, ids, *args, **kwargs):
4563 """Find out the XML ID(s) of any database record.
4565 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
4567 :return: map of ids to the list of their fully qualified XML IDs
4568 (empty list when there's none).
4570 model_data_obj = self.pool.get('ir.model.data')
4571 data_ids = model_data_obj.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
4572 data_results = model_data_obj.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
4575 # can't use dict.fromkeys() as the list would be shared!
4577 for record in data_results:
4578 result[record['res_id']].append('%(module)s.%(name)s' % record)
4581 def get_xml_id(self, cr, uid, ids, *args, **kwargs):
4582 """Find out the XML ID of any database record, if there
4583 is one. This method works as a possible implementation
4584 for a function field, to be able to add it to any
4585 model object easily, referencing it as ``osv.osv.get_xml_id``.
4587 When multiple XML IDs exist for a record, only one
4588 of them is returned (randomly).
4590 **Synopsis**: ``get_xml_id(cr, uid, ids) -> { 'id': 'module.xml_id' }``
4592 :return: map of ids to their fully qualified XML ID,
4593 defaulting to an empty string when there's none
4594 (to be usable as a function field).
4596 results = self._get_xml_ids(cr, uid, ids)
4597 for k, v in results.items():
4604 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: