1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
23 # Object relationnal mapping to postgresql module
24 # . Hierarchical structure
25 # . Constraints consistency, validations
26 # . Object meta Data depends on its status
27 # . Optimised processing by complex query (multiple actions at once)
28 # . Default fields value
29 # . Permissions optimisation
30 # . Persistant object: DB postgresql
32 # . Multi-level caching system
33 # . 2 different inheritancies
35 # - classicals (varchar, integer, boolean, ...)
36 # - relations (one2many, many2one, many2many)
52 import openerp.netsvc as netsvc
53 from lxml import etree
54 from openerp.tools.config import config
55 from openerp.tools.translate import _
58 from query import Query
59 import openerp.tools as tools
60 from openerp.tools.safe_eval import safe_eval as eval
62 # List of etree._Element subclasses that we choose to ignore when parsing XML.
63 from openerp.tools import SKIPPED_ELEMENT_TYPES
65 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
66 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
68 # Mapping between openerp module names and their osv classes.
69 module_class_list = {}
72 def check_object_name(name):
73 """ Check if the given name is a valid openerp object name.
75 The _name attribute in osv and osv_memory object is subject to
76 some restrictions. This function returns True or False whether
77 the given name is allowed or not.
79 TODO: this is an approximation. The goal in this approximation
80 is to disallow uppercase characters (in some places, we quote
81 table/column names and in other not, which leads to this kind
84 psycopg2.ProgrammingError: relation "xxx" does not exist).
86 The same restriction should apply to both osv and osv_memory
87 objects for consistency.
90 if regex_object_name.match(name) is None:
94 def raise_on_invalid_object_name(name):
95 if not check_object_name(name):
96 msg = "The _name attribute %s is not valid." % name
97 logger = netsvc.Logger()
98 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
99 raise except_orm('ValueError', msg)
101 POSTGRES_CONFDELTYPES = {
109 def last_day_of_current_month():
110 today = datetime.date.today()
111 last_day = str(calendar.monthrange(today.year, today.month)[1])
112 return time.strftime('%Y-%m-' + last_day)
114 def intersect(la, lb):
115 return filter(lambda x: x in lb, la)
117 class except_orm(Exception):
118 def __init__(self, name, value):
121 self.args = (name, value)
123 class BrowseRecordError(Exception):
126 # Readonly python database object browser
127 class browse_null(object):
132 def __getitem__(self, name):
135 def __getattr__(self, name):
136 return None # XXX: return self ?
144 def __nonzero__(self):
147 def __unicode__(self):
152 # TODO: execute an object method on browse_record_list
154 class browse_record_list(list):
156 def __init__(self, lst, context=None):
159 super(browse_record_list, self).__init__(lst)
160 self.context = context
163 class browse_record(object):
164 logger = netsvc.Logger()
166 def __init__(self, cr, uid, id, table, cache, context=None, list_class=None, fields_process=None):
168 table : the object (inherited from orm)
169 context : dictionary with an optional context
171 if fields_process is None:
175 self._list_class = list_class or browse_record_list
180 self._table_name = self._table._name
181 self.__logger = logging.getLogger(
182 'osv.browse_record.' + self._table_name)
183 self._context = context
184 self._fields_process = fields_process
186 cache.setdefault(table._name, {})
187 self._data = cache[table._name]
189 if not (id and isinstance(id, (int, long,))):
190 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
191 # if not table.exists(cr, uid, id, context):
192 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
194 if id not in self._data:
195 self._data[id] = {'id': id}
199 def __getitem__(self, name):
203 if name not in self._data[self._id]:
204 # build the list of fields we will fetch
206 # fetch the definition of the field which was asked for
207 if name in self._table._columns:
208 col = self._table._columns[name]
209 elif name in self._table._inherit_fields:
210 col = self._table._inherit_fields[name][2]
211 elif hasattr(self._table, str(name)):
212 attr = getattr(self._table, name)
214 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
215 return lambda *args, **argv: attr(self._cr, self._uid, [self._id], *args, **argv)
219 self.logger.notifyChannel("browse_record", netsvc.LOG_WARNING,
220 "Field '%s' does not exist in object '%s': \n%s" % (
221 name, self, ''.join(traceback.format_exc())))
222 raise KeyError("Field '%s' does not exist in object '%s'" % (
225 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
227 # gen the list of "local" (ie not inherited) fields which are classic or many2one
228 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
229 # gen the list of inherited fields
230 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
231 # complete the field list with the inherited fields which are classic or many2one
232 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
233 # otherwise we fetch only that field
235 fields_to_fetch = [(name, col)]
236 ids = filter(lambda id: name not in self._data[id], self._data.keys())
238 field_names = map(lambda x: x[0], fields_to_fetch)
239 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
241 # TODO: improve this, very slow for reports
242 if self._fields_process:
243 lang = self._context.get('lang', 'en_US') or 'en_US'
244 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
246 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
247 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
249 for field_name, field_column in fields_to_fetch:
250 if field_column._type in self._fields_process:
251 for result_line in field_values:
252 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
253 if result_line[field_name]:
254 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
257 # Where did those ids come from? Perhaps old entries in ir_model_dat?
258 self.__logger.warn("No field_values found for ids %s in %s", ids, self)
259 raise KeyError('Field %s not found in %s'%(name, self))
260 # create browse records for 'remote' objects
261 for result_line in field_values:
263 for field_name, field_column in fields_to_fetch:
264 if field_column._type in ('many2one', 'one2one'):
265 if result_line[field_name]:
266 obj = self._table.pool.get(field_column._obj)
267 if isinstance(result_line[field_name], (list, tuple)):
268 value = result_line[field_name][0]
270 value = result_line[field_name]
272 # FIXME: this happen when a _inherits object
273 # overwrite a field of it parent. Need
274 # testing to be sure we got the right
275 # object and not the parent one.
276 if not isinstance(value, browse_record):
278 # In some cases the target model is not available yet, so we must ignore it,
279 # which is safe in most cases, this value will just be loaded later when needed.
280 # This situation can be caused by custom fields that connect objects with m2o without
281 # respecting module dependencies, causing relationships to be connected to soon when
282 # the target is not loaded yet.
284 new_data[field_name] = browse_record(self._cr,
285 self._uid, value, obj, self._cache,
286 context=self._context,
287 list_class=self._list_class,
288 fields_process=self._fields_process)
290 new_data[field_name] = value
292 new_data[field_name] = browse_null()
294 new_data[field_name] = browse_null()
295 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
296 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
297 elif field_column._type in ('reference'):
298 if result_line[field_name]:
299 if isinstance(result_line[field_name], browse_record):
300 new_data[field_name] = result_line[field_name]
302 ref_obj, ref_id = result_line[field_name].split(',')
303 ref_id = long(ref_id)
305 obj = self._table.pool.get(ref_obj)
306 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
308 new_data[field_name] = browse_null()
310 new_data[field_name] = browse_null()
312 new_data[field_name] = result_line[field_name]
313 self._data[result_line['id']].update(new_data)
315 if not name in self._data[self._id]:
316 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
317 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
318 "Fields to fetch: %s, Field values: %s"%(field_names, field_values))
319 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
320 "Cached: %s, Table: %s"%(self._data[self._id], self._table))
321 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
322 return self._data[self._id][name]
324 def __getattr__(self, name):
328 raise AttributeError(e)
330 def __contains__(self, name):
331 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
333 def __hasattr__(self, name):
340 return "browse_record(%s, %d)" % (self._table_name, self._id)
342 def __eq__(self, other):
343 if not isinstance(other, browse_record):
345 return (self._table_name, self._id) == (other._table_name, other._id)
347 def __ne__(self, other):
348 if not isinstance(other, browse_record):
350 return (self._table_name, self._id) != (other._table_name, other._id)
352 # we need to define __unicode__ even though we've already defined __str__
353 # because we have overridden __getattr__
354 def __unicode__(self):
355 return unicode(str(self))
358 return hash((self._table_name, self._id))
366 (type returned by postgres when the column was created, type expression to create the column)
370 fields.boolean: 'bool',
371 fields.integer: 'int4',
372 fields.integer_big: 'int8',
376 fields.datetime: 'timestamp',
377 fields.binary: 'bytea',
378 fields.many2one: 'int4',
380 if type(f) in type_dict:
381 f_type = (type_dict[type(f)], type_dict[type(f)])
382 elif isinstance(f, fields.float):
384 f_type = ('numeric', 'NUMERIC')
386 f_type = ('float8', 'DOUBLE PRECISION')
387 elif isinstance(f, (fields.char, fields.reference)):
388 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
389 elif isinstance(f, fields.selection):
390 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
391 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
392 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
395 f_size = getattr(f, 'size', None) or 16
398 f_type = ('int4', 'INTEGER')
400 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
401 elif isinstance(f, fields.function) and eval('fields.'+(f._type), globals()) in type_dict:
402 t = eval('fields.'+(f._type), globals())
403 f_type = (type_dict[t], type_dict[t])
404 elif isinstance(f, fields.function) and f._type == 'float':
406 f_type = ('numeric', 'NUMERIC')
408 f_type = ('float8', 'DOUBLE PRECISION')
409 elif isinstance(f, fields.function) and f._type == 'selection':
410 f_type = ('text', 'text')
411 elif isinstance(f, fields.function) and f._type == 'char':
412 f_type = ('varchar', 'VARCHAR(%d)' % (f.size))
414 logger = netsvc.Logger()
415 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
420 class MetaModel(type):
421 """ Metaclass for the Model.
423 This class is used as the metaclass for the Model class to discover
424 the models defined in a module (i.e. without instanciating them).
425 If the automatic discovery is not needed, it is possible to set the
426 model's _register attribute to False.
430 module_to_models = {}
432 def __init__(self, name, bases, attrs):
433 if not self._register:
434 self._register = True
435 super(MetaModel, self).__init__(name, bases, attrs)
438 module_name = self.__module__.split('.')[0]
439 if not hasattr(self, '_module'):
440 self._module = module_name
442 # Remember which models to instanciate for this module.
443 self.module_to_models.setdefault(self._module, []).append(self)
446 class orm_template(object):
447 """ Base class for OpenERP models.
449 OpenERP models are created by inheriting from this class (although
450 not directly; more specifically by inheriting from osv or
451 osv_memory). The constructor is called once, usually directly
452 after the class definition, e.g.:
458 The system will later instanciate the class once per database (on
459 which the class' module is installed).
467 _parent_name = 'parent_id'
468 _parent_store = False
469 _parent_order = False
475 # Mapping from inherits'd field name to triple (m, r, f)
476 # where m is the model from which it is inherits'd,
477 # r is the (local) field towards m,
478 # and f is the _column object itself.
484 CONCURRENCY_CHECK_FIELD = '__last_update'
485 def log(self, cr, uid, id, message, secondary=False, context=None):
486 return self.pool.get('res.log').create(cr, uid,
489 'res_model': self._name,
490 'secondary': secondary,
496 def view_init(self, cr, uid, fields_list, context=None):
497 """Override this method to do specific things when a view on the object is opened."""
500 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
501 raise NotImplementedError(_('The read_group method is not implemented on this object !'))
503 def _field_create(self, cr, context=None):
504 """ Create entries in ir_model_fields for all the model's fields.
506 If necessary, also create an entry in ir_model, and if called from the
507 modules loading scheme (by receiving 'module' in the context), also
508 create entries in ir_model_data (for the model and the fields).
510 - create an entry in ir_model (if there is not already one),
511 - create an entry in ir_model_data (if there is not already one, and if
512 'module' is in the context),
513 - update ir_model_fields with the fields found in _columns
514 (TODO there is some redundancy as _columns is updated from
515 ir_model_fields in __init__).
520 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
522 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
523 model_id = cr.fetchone()[0]
524 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
526 model_id = cr.fetchone()[0]
527 if 'module' in context:
528 name_id = 'model_'+self._name.replace('.', '_')
529 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
531 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
532 (name_id, context['module'], 'ir.model', model_id)
537 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
539 for rec in cr.dictfetchall():
540 cols[rec['name']] = rec
542 for (k, f) in self._columns.items():
544 'model_id': model_id,
547 'field_description': f.string.replace("'", " "),
549 'relation': f._obj or '',
550 'view_load': (f.view_load and 1) or 0,
551 'select_level': tools.ustr(f.select or 0),
552 'readonly': (f.readonly and 1) or 0,
553 'required': (f.required and 1) or 0,
554 'selectable': (f.selectable and 1) or 0,
555 'translate': (f.translate and 1) or 0,
556 'relation_field': (f._type=='one2many' and isinstance(f, fields.one2many)) and f._fields_id or '',
558 # When its a custom field,it does not contain f.select
559 if context.get('field_state', 'base') == 'manual':
560 if context.get('field_name', '') == k:
561 vals['select_level'] = context.get('select', '0')
562 #setting value to let the problem NOT occur next time
564 vals['select_level'] = cols[k]['select_level']
567 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
568 id = cr.fetchone()[0]
570 cr.execute("""INSERT INTO ir_model_fields (
571 id, model_id, model, name, field_description, ttype,
572 relation,view_load,state,select_level,relation_field, translate
574 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
576 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
577 vals['relation'], bool(vals['view_load']), 'base',
578 vals['select_level'], vals['relation_field'], bool(vals['translate'])
580 if 'module' in context:
581 name1 = 'field_' + self._table + '_' + k
582 cr.execute("select name from ir_model_data where name=%s", (name1,))
584 name1 = name1 + "_" + str(id)
585 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
586 (name1, context['module'], 'ir.model.fields', id)
589 for key, val in vals.items():
590 if cols[k][key] != vals[key]:
591 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
593 cr.execute("""UPDATE ir_model_fields SET
594 model_id=%s, field_description=%s, ttype=%s, relation=%s,
595 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s
597 model=%s AND name=%s""", (
598 vals['model_id'], vals['field_description'], vals['ttype'],
599 vals['relation'], bool(vals['view_load']),
600 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['model'], vals['name']
605 def _auto_init(self, cr, context=None):
606 raise_on_invalid_object_name(self._name)
607 self._field_create(cr, context=context)
609 def _auto_end(self, cr, context=None):
613 # Goal: try to apply inheritance at the instanciation level and
614 # put objects in the pool var
617 def makeInstance(cls, pool, cr, attributes):
618 """ Instanciate a given model.
620 This class method instanciates the class of some model (i.e. a class
621 deriving from osv or osv_memory). The class might be the class passed
622 in argument or, if it inherits from another class, a class constructed
623 by combining the two classes.
625 The ``attributes`` argument specifies which parent class attributes
628 TODO: the creation of the combined class is repeated at each call of
629 this method. This is probably unnecessary.
632 parent_names = getattr(cls, '_inherit', None)
634 if isinstance(parent_names, (str, unicode)):
635 name = cls._name or parent_names
636 parent_names = [parent_names]
641 raise TypeError('_name is mandatory in case of multiple inheritance')
643 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
644 parent_class = pool.get(parent_name).__class__
645 if not pool.get(parent_name):
646 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
647 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
650 new = copy.copy(getattr(pool.get(parent_name), s))
652 # Don't _inherit custom fields.
656 if hasattr(new, 'update'):
657 new.update(cls.__dict__.get(s, {}))
658 elif s=='_constraints':
659 for c in cls.__dict__.get(s, []):
661 for c2 in range(len(new)):
662 #For _constraints, we should check field and methods as well
663 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
664 or getattr(new[c2][0],'__name__', True) == \
665 getattr(c[0],'__name__', False)):
666 # If new class defines a constraint with
667 # same function name, we let it override
675 new.extend(cls.__dict__.get(s, []))
677 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
678 obj = object.__new__(cls)
679 obj.__init__(pool, cr)
683 """ Register this model.
685 This doesn't create an instance but simply register the model
686 as being part of the module where it is defined.
688 TODO make it possible to not even have to call the constructor
693 # Set the module name (e.g. base, sale, accounting, ...) on the class.
694 module = cls.__module__.split('.')[0]
695 if not hasattr(cls, '_module'):
698 # Remember which models to instanciate for this module.
699 module_class_list.setdefault(cls._module, []).append(cls)
701 # Since we don't return an instance here, the __init__
702 # method won't be called.
705 def __init__(self, pool, cr):
706 """ Initialize a model and make it part of the given registry."""
707 pool.add(self._name, self)
710 if not self._name and not hasattr(self, '_inherit'):
711 name = type(self).__name__.split('.')[0]
712 msg = "The class %s has to have a _name attribute" % name
714 logger = netsvc.Logger()
715 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
716 raise except_orm('ValueError', msg)
718 if not self._description:
719 self._description = self._name
721 self._table = self._name.replace('.', '_')
723 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
724 """Fetch records as objects allowing to use dot notation to browse fields and relations
726 :param cr: database cursor
727 :param user: current user id
728 :param select: id or list of ids
729 :param context: context arguments, like lang, time zone
730 :rtype: object or list of objects requested
733 self._list_class = list_class or browse_record_list
735 # need to accepts ints and longs because ids coming from a method
736 # launched by button in the interface have a type long...
737 if isinstance(select, (int, long)):
738 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
739 elif isinstance(select, list):
740 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
744 def __export_row(self, cr, uid, row, fields, context=None):
748 def check_type(field_type):
749 if field_type == 'float':
751 elif field_type == 'integer':
753 elif field_type == 'boolean':
757 def selection_field(in_field):
758 col_obj = self.pool.get(in_field.keys()[0])
759 if f[i] in col_obj._columns.keys():
760 return col_obj._columns[f[i]]
761 elif f[i] in col_obj._inherits.keys():
762 selection_field(col_obj._inherits)
767 data = map(lambda x: '', range(len(fields)))
769 for fpos in range(len(fields)):
778 model_data = self.pool.get('ir.model.data')
779 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
781 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
783 r = '%s.%s' % (d['module'], d['name'])
790 # To display external name of selection field when its exported
792 if f[i] in self._columns.keys():
793 cols = self._columns[f[i]]
794 elif f[i] in self._inherit_fields.keys():
795 cols = selection_field(self._inherits)
796 if cols and cols._type == 'selection':
797 sel_list = cols.selection
798 if r and type(sel_list) == type([]):
799 r = [x[1] for x in sel_list if r==x[0]]
800 r = r and r[0] or False
802 if f[i] in self._columns:
803 r = check_type(self._columns[f[i]]._type)
804 elif f[i] in self._inherit_fields:
805 r = check_type(self._inherit_fields[f[i]][2]._type)
806 data[fpos] = r or False
808 if isinstance(r, (browse_record_list, list)):
810 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
813 if [x for x in fields2 if x]:
817 lines2 = self.__export_row(cr, uid, row2, fields2,
820 for fpos2 in range(len(fields)):
821 if lines2 and lines2[0][fpos2]:
822 data[fpos2] = lines2[0][fpos2]
826 name_relation = self.pool.get(rr._table_name)._rec_name
827 if isinstance(rr[name_relation], browse_record):
828 rr = rr[name_relation]
829 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
830 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
831 dt += tools.ustr(rr_name or '') + ','
841 if isinstance(r, browse_record):
842 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
843 r = r and r[0] and r[0][1] or ''
844 data[fpos] = tools.ustr(r or '')
845 return [data] + lines
847 def export_data(self, cr, uid, ids, fields_to_export, context=None):
849 Export fields for selected objects
851 :param cr: database cursor
852 :param uid: current user id
853 :param ids: list of ids
854 :param fields_to_export: list of fields
855 :param context: context arguments, like lang, time zone
856 :rtype: dictionary with a *datas* matrix
858 This method is used when exporting data via client menu
863 cols = self._columns.copy()
864 for f in self._inherit_fields:
865 cols.update({f: self._inherit_fields[f][2]})
867 if x=='.id': return [x]
868 return x.replace(':id','/id').replace('.id','/.id').split('/')
869 fields_to_export = map(fsplit, fields_to_export)
871 for row in self.browse(cr, uid, ids, context):
872 datas += self.__export_row(cr, uid, row, fields_to_export, context)
873 return {'datas': datas}
875 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
877 Import given data in given module
879 :param cr: database cursor
880 :param uid: current user id
881 :param fields: list of fields
882 :param data: data to import
883 :param mode: 'init' or 'update' for record creation
884 :param current_module: module name
885 :param noupdate: flag for record creation
886 :param context: context arguments, like lang, time zone,
887 :param filename: optional file to store partial import state for recovery
890 This method is used when importing data via client menu.
892 Example of fields to import for a sale.order::
895 partner_id, (=name_search)
896 order_line/.id, (=database_id)
898 order_line/product_id/id, (=xml id)
899 order_line/price_unit,
900 order_line/product_uom_qty,
901 order_line/product_uom/id (=xml_id)
905 def _replace_field(x):
906 x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
907 return x.replace(':id','/id').split('/')
908 fields = map(_replace_field, fields)
909 logger = netsvc.Logger()
910 ir_model_data_obj = self.pool.get('ir.model.data')
912 # mode: id (XML id) or .id (database id) or False for name_get
913 def _get_id(model_name, id, current_module=False, mode='id'):
916 obj_model = self.pool.get(model_name)
917 ids = obj_model.search(cr, uid, [('id', '=', int(id))])
919 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, id))
922 module, xml_id = id.rsplit('.', 1)
924 module, xml_id = current_module, id
925 record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
926 ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id'])
927 if not ir_model_data:
928 raise ValueError('No references to %s.%s' % (module, xml_id))
929 id = ir_model_data[0]['res_id']
931 obj_model = self.pool.get(model_name)
932 ids = obj_model.name_search(cr, uid, id, operator='=', context=context)
934 raise ValueError('No record found for %s' % (id,))
939 # datas: a list of records, each record is defined by a list of values
940 # prefix: a list of prefix fields ['line_ids']
941 # position: the line to process, skip is False if it's the first line of the current record
943 # (res, position, warning, res_id) with
944 # res: the record for the next line to process (including it's one2many)
945 # position: the new position for the next line
946 # res_id: the ID of the record if it's a modification
947 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0, skip=0):
948 line = datas[position]
956 for i in range(len(fields)):
959 raise Exception(_('Please check that all your lines have %d columns.'
960 'Stopped around line %d having %d columns.') % \
961 (len(fields), position+2, len(line)))
966 if field[:len(prefix)] <> prefix:
971 # ID of the record using a XML ID
972 if field[len(prefix)]=='id':
974 data_res_id = _get_id(model_name, line[i], current_module, 'id')
980 # ID of the record using a database ID
981 elif field[len(prefix)]=='.id':
982 data_res_id = _get_id(model_name, line[i], current_module, '.id')
985 # recursive call for getting children and returning [(0,0,{})] or [(1,ID,{})]
986 if fields_def[field[len(prefix)]]['type']=='one2many':
987 if field[len(prefix)] in done:
989 done[field[len(prefix)]] = True
990 relation_obj = self.pool.get(fields_def[field[len(prefix)]]['relation'])
991 newfd = relation_obj.fields_get( cr, uid, context=context )
995 while pos < len(datas):
996 res2 = process_liness(self, datas, prefix + [field[len(prefix)]], current_module, relation_obj._name, newfd, pos, first)
999 (newrow, pos, w2, data_res_id2, xml_id2) = res2
1000 nbrmax = max(nbrmax, pos)
1003 if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
1005 res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
1007 elif fields_def[field[len(prefix)]]['type']=='many2one':
1008 relation = fields_def[field[len(prefix)]]['relation']
1009 if len(field) == len(prefix)+1:
1012 mode = field[len(prefix)+1]
1013 res = _get_id(relation, line[i], current_module, mode)
1015 elif fields_def[field[len(prefix)]]['type']=='many2many':
1016 relation = fields_def[field[len(prefix)]]['relation']
1017 if len(field) == len(prefix)+1:
1020 mode = field[len(prefix)+1]
1022 # TODO: improve this by using csv.csv_reader
1024 for db_id in line[i].split(config.get('csv_internal_sep')):
1025 res.append( _get_id(relation, db_id, current_module, mode) )
1028 elif fields_def[field[len(prefix)]]['type'] == 'integer':
1029 res = line[i] and int(line[i]) or 0
1030 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
1031 res = line[i].lower() not in ('0', 'false', 'off')
1032 elif fields_def[field[len(prefix)]]['type'] == 'float':
1033 res = line[i] and float(line[i]) or 0.0
1034 elif fields_def[field[len(prefix)]]['type'] == 'selection':
1035 for key, val in fields_def[field[len(prefix)]]['selection']:
1036 if tools.ustr(line[i]) in [tools.ustr(key), tools.ustr(val)]:
1039 if line[i] and not res:
1040 logger.notifyChannel("import", netsvc.LOG_WARNING,
1041 _("key '%s' not found in selection field '%s'") % \
1042 (tools.ustr(line[i]), tools.ustr(field[len(prefix)])))
1043 warning += [_("Key/value '%s' not found in selection field '%s'") % (tools.ustr(line[i]), tools.ustr(field[len(prefix)]))]
1048 row[field[len(prefix)]] = res or False
1050 result = (row, nbrmax, warning, data_res_id, xml_id)
1053 fields_def = self.fields_get(cr, uid, context=context)
1055 if config.get('import_partial', False) and filename:
1056 data = pickle.load(file(config.get('import_partial')))
1059 while position<len(datas):
1062 (res, position, warning, res_id, xml_id) = \
1063 process_liness(self, datas, [], current_module, self._name, fields_def, position=position)
1066 return (-1, res, 'Line ' + str(position) +' : ' + '!\n'.join(warning), '')
1069 ir_model_data_obj._update(cr, uid, self._name,
1070 current_module, res, mode=mode, xml_id=xml_id,
1071 noupdate=noupdate, res_id=res_id, context=context)
1072 except Exception, e:
1073 return (-1, res, 'Line ' + str(position) +' : ' + str(e), '')
1075 if config.get('import_partial', False) and filename and (not (position%100)):
1076 data = pickle.load(file(config.get('import_partial')))
1077 data[filename] = position
1078 pickle.dump(data, file(config.get('import_partial'), 'wb'))
1079 if context.get('defer_parent_store_computation'):
1080 self._parent_store_compute(cr)
1083 if context.get('defer_parent_store_computation'):
1084 self._parent_store_compute(cr)
1085 return (position, 0, 0, 0)
1087 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
1089 Read records with given ids with the given fields
1091 :param cr: database cursor
1092 :param user: current user id
1093 :param ids: id or list of the ids of the records to read
1094 :param fields: optional list of field names to return (default: all fields would be returned)
1095 :type fields: list (example ['field_name_1', ...])
1096 :param context: optional context dictionary - it may contains keys for specifying certain options
1097 like ``context_lang``, ``context_tz`` to alter the results of the call.
1098 A special ``bin_size`` boolean flag may also be passed in the context to request the
1099 value of all fields.binary columns to be returned as the size of the binary instead of its
1100 contents. This can also be selectively overriden by passing a field-specific flag
1101 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1102 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1103 :return: list of dictionaries((dictionary per record asked)) with requested field values
1104 :rtype: [{‘name_of_the_field’: value, ...}, ...]
1105 :raise AccessError: * if user has no read rights on the requested object
1106 * if user tries to bypass access rules for read on the requested object
1109 raise NotImplementedError(_('The read method is not implemented on this object !'))
1111 def get_invalid_fields(self, cr, uid):
1112 return list(self._invalids)
1114 def _validate(self, cr, uid, ids, context=None):
1115 context = context or {}
1116 lng = context.get('lang', False) or 'en_US'
1117 trans = self.pool.get('ir.translation')
1119 for constraint in self._constraints:
1120 fun, msg, fields = constraint
1121 if not fun(self, cr, uid, ids):
1122 # Check presence of __call__ directly instead of using
1123 # callable() because it will be deprecated as of Python 3.0
1124 if hasattr(msg, '__call__'):
1125 tmp_msg = msg(self, cr, uid, ids, context=context)
1126 if isinstance(tmp_msg, tuple):
1127 tmp_msg, params = tmp_msg
1128 translated_msg = tmp_msg % params
1130 translated_msg = tmp_msg
1132 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
1134 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1136 self._invalids.update(fields)
1139 raise except_orm('ValidateError', '\n'.join(error_msgs))
1141 self._invalids.clear()
1143 def default_get(self, cr, uid, fields_list, context=None):
1145 Returns default values for the fields in fields_list.
1147 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1148 :type fields_list: list
1149 :param context: optional context dictionary - it may contains keys for specifying certain options
1150 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1151 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1152 or override a default value for a field.
1153 A special ``bin_size`` boolean flag may also be passed in the context to request the
1154 value of all fields.binary columns to be returned as the size of the binary instead of its
1155 contents. This can also be selectively overriden by passing a field-specific flag
1156 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1157 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1158 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1160 # trigger view init hook
1161 self.view_init(cr, uid, fields_list, context)
1167 # get the default values for the inherited fields
1168 for t in self._inherits.keys():
1169 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1172 # get the default values defined in the object
1173 for f in fields_list:
1174 if f in self._defaults:
1175 if callable(self._defaults[f]):
1176 defaults[f] = self._defaults[f](self, cr, uid, context)
1178 defaults[f] = self._defaults[f]
1180 fld_def = ((f in self._columns) and self._columns[f]) \
1181 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1184 if isinstance(fld_def, fields.property):
1185 property_obj = self.pool.get('ir.property')
1186 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1188 if isinstance(prop_value, (browse_record, browse_null)):
1189 defaults[f] = prop_value.id
1191 defaults[f] = prop_value
1193 if f not in defaults:
1196 # get the default values set by the user and override the default
1197 # values defined in the object
1198 ir_values_obj = self.pool.get('ir.values')
1199 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1200 for id, field, field_value in res:
1201 if field in fields_list:
1202 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1203 if fld_def._type in ('many2one', 'one2one'):
1204 obj = self.pool.get(fld_def._obj)
1205 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1207 if fld_def._type in ('many2many'):
1208 obj = self.pool.get(fld_def._obj)
1210 for i in range(len(field_value)):
1211 if not obj.search(cr, uid, [('id', '=',
1214 field_value2.append(field_value[i])
1215 field_value = field_value2
1216 if fld_def._type in ('one2many'):
1217 obj = self.pool.get(fld_def._obj)
1219 for i in range(len(field_value)):
1220 field_value2.append({})
1221 for field2 in field_value[i]:
1222 if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'):
1223 obj2 = self.pool.get(obj._columns[field2]._obj)
1224 if not obj2.search(cr, uid,
1225 [('id', '=', field_value[i][field2])]):
1227 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type in ('many2one', 'one2one'):
1228 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1229 if not obj2.search(cr, uid,
1230 [('id', '=', field_value[i][field2])]):
1232 # TODO add test for many2many and one2many
1233 field_value2[i][field2] = field_value[i][field2]
1234 field_value = field_value2
1235 defaults[field] = field_value
1237 # get the default values from the context
1238 for key in context or {}:
1239 if key.startswith('default_') and (key[8:] in fields_list):
1240 defaults[key[8:]] = context[key]
1244 def perm_read(self, cr, user, ids, context=None, details=True):
1245 raise NotImplementedError(_('The perm_read method is not implemented on this object !'))
1247 def unlink(self, cr, uid, ids, context=None):
1248 raise NotImplementedError(_('The unlink method is not implemented on this object !'))
1250 def write(self, cr, user, ids, vals, context=None):
1251 raise NotImplementedError(_('The write method is not implemented on this object !'))
1253 def create(self, cr, user, vals, context=None):
1254 raise NotImplementedError(_('The create method is not implemented on this object !'))
1256 def fields_get_keys(self, cr, user, context=None):
1257 res = self._columns.keys()
1258 # TODO I believe this loop can be replace by
1259 # res.extend(self._inherit_fields.key())
1260 for parent in self._inherits:
1261 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1265 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
1266 """ Returns the definition of each field.
1268 The returned value is a dictionary (indiced by field name) of
1269 dictionaries. The _inherits'd fields are included. The string,
1270 help, and selection (if present) attributes are translated.
1278 translation_obj = self.pool.get('ir.translation')
1279 for parent in self._inherits:
1280 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
1282 for f, field in self._columns.iteritems():
1283 if allfields and f not in allfields:
1286 res[f] = fields.field_to_dict(self, cr, user, context, field)
1288 if not write_access:
1289 res[f]['readonly'] = True
1290 res[f]['states'] = {}
1292 if hasattr(res[f], 'string'):
1293 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
1295 res[f]['string'] = res_trans
1296 if hasattr(res[f], 'help'):
1297 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
1299 res[f]['help'] = help_trans
1300 if hasattr(res[f], 'selection'):
1301 if isinstance(field.selection, (tuple, list)):
1302 sel = field.selection
1304 for key, val in sel:
1307 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
1308 sel2.append((key, val2 or val))
1309 res[f]['selection'] = sel2
1315 # Overload this method if you need a window title which depends on the context
1317 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1320 def __view_look_dom(self, cr, user, node, view_id, context=None):
1328 if isinstance(s, unicode):
1329 return s.encode('utf8')
1332 # return True if node can be displayed to current user
1333 def check_group(node):
1334 if node.get('groups'):
1335 groups = node.get('groups').split(',')
1336 access_pool = self.pool.get('ir.model.access')
1337 can_see = any(access_pool.check_groups(cr, user, group) for group in groups)
1339 node.set('invisible', '1')
1340 if 'attrs' in node.attrib:
1341 del(node.attrib['attrs']) #avoid making field visible later
1342 del(node.attrib['groups'])
1347 if node.tag in ('field', 'node', 'arrow'):
1348 if node.get('object'):
1353 if f.tag in ('field'):
1354 xml += etree.tostring(f, encoding="utf-8")
1356 new_xml = etree.fromstring(encode(xml))
1357 ctx = context.copy()
1358 ctx['base_model_name'] = self._name
1359 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1364 attrs = {'views': views}
1366 if node.get('name'):
1369 if node.get('name') in self._columns:
1370 column = self._columns[node.get('name')]
1372 column = self._inherit_fields[node.get('name')][2]
1377 relation = self.pool.get(column._obj)
1382 if f.tag in ('form', 'tree', 'graph'):
1384 ctx = context.copy()
1385 ctx['base_model_name'] = self._name
1386 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1387 views[str(f.tag)] = {
1391 attrs = {'views': views}
1392 if node.get('widget') and node.get('widget') == 'selection':
1393 # Prepare the cached selection list for the client. This needs to be
1394 # done even when the field is invisible to the current user, because
1395 # other events could need to change its value to any of the selectable ones
1396 # (such as on_change events, refreshes, etc.)
1398 # If domain and context are strings, we keep them for client-side, otherwise
1399 # we evaluate them server-side to consider them when generating the list of
1401 # TODO: find a way to remove this hack, by allow dynamic domains
1403 if column._domain and not isinstance(column._domain, basestring):
1404 dom = column._domain
1405 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1406 search_context = dict(context)
1407 if column._context and not isinstance(column._context, basestring):
1408 search_context.update(column._context)
1409 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1410 if (node.get('required') and not int(node.get('required'))) or not column.required:
1411 attrs['selection'].append((False, ''))
1412 fields[node.get('name')] = attrs
1414 elif node.tag in ('form', 'tree'):
1415 result = self.view_header_get(cr, user, False, node.tag, context)
1417 node.set('string', result)
1419 elif node.tag == 'calendar':
1420 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1421 if node.get(additional_field):
1422 fields[node.get(additional_field)] = {}
1424 if 'groups' in node.attrib:
1428 if ('lang' in context) and not result:
1429 if node.get('string'):
1430 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1431 if trans == node.get('string') and ('base_model_name' in context):
1432 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1433 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1434 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1436 node.set('string', trans)
1437 if node.get('confirm'):
1438 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm'))
1440 node.set('confirm', trans)
1442 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
1444 node.set('sum', trans)
1445 if node.get('help'):
1446 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('help'))
1448 node.set('help', trans)
1451 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1452 fields.update(self.__view_look_dom(cr, user, f, view_id, context))
1456 def _disable_workflow_buttons(self, cr, user, node):
1458 # admin user can always activate workflow buttons
1461 # TODO handle the case of more than one workflow for a model or multiple
1462 # transitions with different groups and same signal
1463 usersobj = self.pool.get('res.users')
1464 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1465 for button in buttons:
1466 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1467 cr.execute("""SELECT DISTINCT t.group_id
1469 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1470 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1473 AND t.group_id is NOT NULL
1474 """, (self._name, button.get('name')))
1475 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1476 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1477 button.set('readonly', str(int(not can_click)))
1480 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1481 fields_def = self.__view_look_dom(cr, user, node, view_id, context=context)
1482 node = self._disable_workflow_buttons(cr, user, node)
1483 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1485 if node.tag == 'diagram':
1486 if node.getchildren()[0].tag == 'node':
1487 node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, fields_def.keys(), context)
1488 if node.getchildren()[1].tag == 'arrow':
1489 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, fields_def.keys(), context)
1490 for key, value in node_fields.items():
1492 for key, value in arrow_fields.items():
1495 fields = self.fields_get(cr, user, fields_def.keys(), context)
1496 for field in fields_def:
1498 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1499 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1500 elif field in fields:
1501 fields[field].update(fields_def[field])
1503 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1504 res = cr.fetchall()[:]
1506 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1507 msg = "\n * ".join([r[0] for r in res])
1508 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1509 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1510 raise except_orm('View error', msg)
1513 def __get_default_calendar_view(self):
1514 """Generate a default calendar view (For internal use only).
1517 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
1518 '<calendar string="%s"') % (self._description)
1520 if (self._date_name not in self._columns):
1522 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
1523 if dt in self._columns:
1524 self._date_name = dt
1529 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
1532 arch += ' date_start="%s"' % (self._date_name)
1534 for color in ["user_id", "partner_id", "x_user_id", "x_partner_id"]:
1535 if color in self._columns:
1536 arch += ' color="' + color + '"'
1539 dt_stop_flag = False
1541 for dt_stop in ["date_stop", "date_end", "x_date_stop", "x_date_end"]:
1542 if dt_stop in self._columns:
1543 arch += ' date_stop="' + dt_stop + '"'
1547 if not dt_stop_flag:
1548 for dt_delay in ["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"]:
1549 if dt_delay in self._columns:
1550 arch += ' date_delay="' + dt_delay + '"'
1554 ' <field name="%s"/>\n'
1555 '</calendar>') % (self._rec_name)
1559 def __get_default_search_view(self, cr, uid, context=None):
1560 form_view = self.fields_view_get(cr, uid, False, 'form', context=context)
1561 tree_view = self.fields_view_get(cr, uid, False, 'tree', context=context)
1563 fields_to_search = set()
1564 fields = self.fields_get(cr, uid, context=context)
1565 for field in fields:
1566 if fields[field].get('select'):
1567 fields_to_search.add(field)
1568 for view in (form_view, tree_view):
1569 view_root = etree.fromstring(view['arch'])
1570 # Only care about select=1 in xpath below, because select=2 is covered
1571 # by the custom advanced search in clients
1572 fields_to_search = fields_to_search.union(view_root.xpath("//field[@select=1]/@name"))
1574 tree_view_root = view_root # as provided by loop above
1575 search_view = etree.Element("search", attrib={'string': tree_view_root.get("string", "")})
1576 field_group = etree.Element("group")
1577 search_view.append(field_group)
1579 for field_name in fields_to_search:
1580 field_group.append(etree.Element("field", attrib={'name': field_name}))
1582 return etree.tostring(search_view, encoding="utf-8").replace('\t', '')
1585 # if view_id, view_type is not required
1587 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1589 Get the detailed composition of the requested view like fields, model, view architecture
1591 :param cr: database cursor
1592 :param user: current user id
1593 :param view_id: id of the view or None
1594 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
1595 :param context: context arguments, like lang, time zone
1596 :param toolbar: true to include contextual actions
1597 :param submenu: example (portal_project module)
1598 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
1599 :raise AttributeError:
1600 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
1601 * if some tag other than 'position' is found in parent view
1602 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
1609 if isinstance(s, unicode):
1610 return s.encode('utf8')
1613 def raise_view_error(error_msg, child_view_id):
1614 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
1615 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
1616 % (child_view.xml_id, self._name, error_msg))
1618 def _inherit_apply(src, inherit, inherit_id=None):
1619 def _find(node, node2):
1620 if node2.tag == 'xpath':
1621 res = node.xpath(node2.get('expr'))
1627 for n in node.getiterator(node2.tag):
1629 if node2.tag == 'field':
1630 # only compare field names, a field can be only once in a given view
1631 # at a given level (and for multilevel expressions, we should use xpath
1632 # inheritance spec anyway)
1633 if node2.get('name') == n.get('name'):
1637 for attr in node2.attrib:
1638 if attr == 'position':
1641 if n.get(attr) == node2.get(attr):
1648 # End: _find(node, node2)
1650 doc_dest = etree.fromstring(encode(inherit))
1651 toparse = [doc_dest]
1654 node2 = toparse.pop(0)
1655 if isinstance(node2, SKIPPED_ELEMENT_TYPES):
1657 if node2.tag == 'data':
1658 toparse += [ c for c in doc_dest ]
1660 node = _find(src, node2)
1661 if node is not None:
1663 if node2.get('position'):
1664 pos = node2.get('position')
1665 if pos == 'replace':
1666 parent = node.getparent()
1668 src = copy.deepcopy(node2[0])
1671 node.addprevious(child)
1672 node.getparent().remove(node)
1673 elif pos == 'attributes':
1674 for child in node2.getiterator('attribute'):
1675 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
1677 node.set(attribute[0], attribute[1])
1679 del(node.attrib[attribute[0]])
1681 sib = node.getnext()
1685 elif pos == 'after':
1690 sib.addprevious(child)
1691 elif pos == 'before':
1692 node.addprevious(child)
1694 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
1697 ' %s="%s"' % (attr, node2.get(attr))
1698 for attr in node2.attrib
1699 if attr != 'position'
1701 tag = "<%s%s>" % (node2.tag, attrs)
1702 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
1704 # End: _inherit_apply(src, inherit)
1706 result = {'type': view_type, 'model': self._name}
1710 parent_view_model = None
1712 view_ref = context.get(view_type + '_view_ref', False)
1713 if view_ref and not view_id:
1715 module, view_ref = view_ref.split('.', 1)
1716 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
1717 view_ref_res = cr.fetchone()
1719 view_id = view_ref_res[0]
1722 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
1724 WHERE id=%s""", (view_id,))
1726 cr.execute('''SELECT
1727 arch,name,field_parent,id,type,inherit_id,model
1734 ORDER BY priority''', (self._name, view_type))
1735 sql_res = cr.fetchone()
1741 view_id = ok or sql_res[3]
1742 parent_view_model = sql_res[6]
1744 # if a view was found
1746 result['type'] = sql_res[4]
1747 result['view_id'] = sql_res[3]
1748 result['arch'] = sql_res[0]
1750 def _inherit_apply_rec(result, inherit_id):
1751 # get all views which inherit from (ie modify) this view
1752 cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
1753 sql_inherit = cr.fetchall()
1754 for (inherit, id) in sql_inherit:
1755 result = _inherit_apply(result, inherit, id)
1756 result = _inherit_apply_rec(result, id)
1759 inherit_result = etree.fromstring(encode(result['arch']))
1760 result['arch'] = _inherit_apply_rec(inherit_result, sql_res[3])
1762 result['name'] = sql_res[1]
1763 result['field_parent'] = sql_res[2] or False
1766 # otherwise, build some kind of default view
1767 if view_type == 'form':
1768 res = self.fields_get(cr, user, context=context)
1769 xml = '<?xml version="1.0" encoding="utf-8"?> ' \
1770 '<form string="%s">' % (self._description,)
1772 if res[x]['type'] not in ('one2many', 'many2many'):
1773 xml += '<field name="%s"/>' % (x,)
1774 if res[x]['type'] == 'text':
1778 elif view_type == 'tree':
1779 _rec_name = self._rec_name
1780 if _rec_name not in self._columns:
1781 _rec_name = self._columns.keys()[0]
1782 xml = '<?xml version="1.0" encoding="utf-8"?>' \
1783 '<tree string="%s"><field name="%s"/></tree>' \
1784 % (self._description, _rec_name)
1786 elif view_type == 'calendar':
1787 xml = self.__get_default_calendar_view()
1789 elif view_type == 'search':
1790 xml = self.__get_default_search_view(cr, user, context)
1793 xml = '<?xml version="1.0"?>' # what happens here, graph case?
1794 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
1795 result['arch'] = etree.fromstring(encode(xml))
1796 result['name'] = 'default'
1797 result['field_parent'] = False
1798 result['view_id'] = 0
1800 if parent_view_model != self._name:
1801 ctx = context.copy()
1802 ctx['base_model_name'] = parent_view_model
1805 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
1806 result['arch'] = xarch
1807 result['fields'] = xfields
1810 if context and context.get('active_id', False):
1811 data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
1813 act_id = data_menu.id
1815 data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
1816 result['submenu'] = getattr(data_action, 'menus', False)
1820 for key in ('report_sxw_content', 'report_rml_content',
1821 'report_sxw', 'report_rml',
1822 'report_sxw_content_data', 'report_rml_content_data'):
1826 ir_values_obj = self.pool.get('ir.values')
1827 resprint = ir_values_obj.get(cr, user, 'action',
1828 'client_print_multi', [(self._name, False)], False,
1830 resaction = ir_values_obj.get(cr, user, 'action',
1831 'client_action_multi', [(self._name, False)], False,
1834 resrelate = ir_values_obj.get(cr, user, 'action',
1835 'client_action_relate', [(self._name, False)], False,
1837 resprint = map(clean, resprint)
1838 resaction = map(clean, resaction)
1839 resaction = filter(lambda x: not x.get('multi', False), resaction)
1840 resprint = filter(lambda x: not x.get('multi', False), resprint)
1841 resrelate = map(lambda x: x[2], resrelate)
1843 for x in resprint + resaction + resrelate:
1844 x['string'] = x['name']
1846 result['toolbar'] = {
1848 'action': resaction,
1853 _view_look_dom_arch = __view_look_dom_arch
1855 def search_count(self, cr, user, args, context=None):
1858 res = self.search(cr, user, args, context=context, count=True)
1859 if isinstance(res, list):
1863 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
1865 Search for records based on a search domain.
1867 :param cr: database cursor
1868 :param user: current user id
1869 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
1870 :param offset: optional number of results to skip in the returned values (default: 0)
1871 :param limit: optional max number of records to return (default: **None**)
1872 :param order: optional columns to sort by (default: self._order=id )
1873 :param context: optional context arguments, like lang, time zone
1874 :type context: dictionary
1875 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
1876 :return: id or list of ids of records matching the criteria
1877 :rtype: integer or list of integers
1878 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
1880 **Expressing a search domain (args)**
1882 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
1884 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
1885 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
1886 The semantics of most of these operators are obvious.
1887 The ``child_of`` operator will look for records who are children or grand-children of a given record,
1888 according to the semantics of this model (i.e following the relationship field named by
1889 ``self._parent_name``, by default ``parent_id``.
1890 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
1892 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
1893 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
1894 Be very careful about this when you combine them the first time.
1896 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
1898 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
1900 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
1902 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
1905 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
1907 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
1909 Private implementation of search() method, allowing specifying the uid to use for the access right check.
1910 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
1911 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
1913 :param access_rights_uid: optional user ID to use when checking access rights
1914 (not for ir.rules, this is only for ir.model.access)
1916 raise NotImplementedError(_('The search method is not implemented on this object !'))
1918 def name_get(self, cr, user, ids, context=None):
1921 :param cr: database cursor
1922 :param user: current user id
1924 :param ids: list of ids
1925 :param context: context arguments, like lang, time zone
1926 :type context: dictionary
1927 :return: tuples with the text representation of requested objects for to-many relationships
1934 if isinstance(ids, (int, long)):
1936 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
1937 [self._rec_name], context, load='_classic_write')]
1939 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
1941 Search for records and their display names according to a search domain.
1943 :param cr: database cursor
1944 :param user: current user id
1945 :param name: object name to search
1946 :param args: list of tuples specifying search criteria [('field_name', 'operator', 'value'), ...]
1947 :param operator: operator for search criterion
1948 :param context: context arguments, like lang, time zone
1949 :type context: dictionary
1950 :param limit: optional max number of records to return
1951 :return: list of object names matching the search criteria, used to provide completion for to-many relationships
1953 This method is equivalent of :py:meth:`~osv.osv.osv.search` on **name** + :py:meth:`~osv.osv.osv.name_get` on the result.
1954 See :py:meth:`~osv.osv.osv.search` for an explanation of the possible values for the search domain specified in **args**.
1957 return self._name_search(cr, user, name, args, operator, context, limit)
1959 def name_create(self, cr, uid, name, context=None):
1961 Creates a new record by calling :py:meth:`~osv.osv.osv.create` with only one
1962 value provided: the name of the new record (``_rec_name`` field).
1963 The new record will also be initialized with any default values applicable
1964 to this model, or provided through the context. The usual behavior of
1965 :py:meth:`~osv.osv.osv.create` applies.
1966 Similarly, this method may raise an exception if the model has multiple
1967 required fields and some do not have default values.
1969 :param name: name of the record to create
1971 :return: the :py:meth:`~osv.osv.osv.name_get` value for the newly-created record.
1973 rec_id = self.create(cr, uid, {self._rec_name: name}, context);
1974 return self.name_get(cr, uid, [rec_id], context)[0]
1976 # private implementation of name_search, allows passing a dedicated user for the name_get part to
1977 # solve some access rights issues
1978 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
1985 args += [(self._rec_name, operator, name)]
1986 access_rights_uid = name_get_uid or user
1987 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
1988 res = self.name_get(cr, access_rights_uid, ids, context)
1991 def copy(self, cr, uid, id, default=None, context=None):
1992 raise NotImplementedError(_('The copy method is not implemented on this object !'))
1994 def exists(self, cr, uid, id, context=None):
1995 raise NotImplementedError(_('The exists method is not implemented on this object !'))
1997 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2000 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
2002 fields = self._columns.keys() + self._inherit_fields.keys()
2003 #FIXME: collect all calls to _get_source into one SQL call.
2005 res[lang] = {'code': lang}
2007 if f in self._columns:
2008 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2010 res[lang][f] = res_trans
2012 res[lang][f] = self._columns[f].string
2013 for table in self._inherits:
2014 cols = intersect(self._inherit_fields.keys(), fields)
2015 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2018 res[lang]['code'] = lang
2019 for f in res2[lang]:
2020 res[lang][f] = res2[lang][f]
2023 def write_string(self, cr, uid, id, langs, vals, context=None):
2024 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
2025 #FIXME: try to only call the translation in one SQL
2028 if field in self._columns:
2029 src = self._columns[field].string
2030 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2031 for table in self._inherits:
2032 cols = intersect(self._inherit_fields.keys(), vals)
2034 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2037 def _check_removed_columns(self, cr, log=False):
2038 raise NotImplementedError()
2040 def _add_missing_default_values(self, cr, uid, values, context=None):
2041 missing_defaults = []
2042 avoid_tables = [] # avoid overriding inherited values when parent is set
2043 for tables, parent_field in self._inherits.items():
2044 if parent_field in values:
2045 avoid_tables.append(tables)
2046 for field in self._columns.keys():
2047 if not field in values:
2048 missing_defaults.append(field)
2049 for field in self._inherit_fields.keys():
2050 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2051 missing_defaults.append(field)
2053 if len(missing_defaults):
2054 # override defaults with the provided values, never allow the other way around
2055 defaults = self.default_get(cr, uid, missing_defaults, context)
2057 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2058 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2059 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2060 defaults[dv] = [(6, 0, defaults[dv])]
2061 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2062 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2063 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2064 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2065 defaults.update(values)
2069 class orm_memory(orm_template):
2071 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
2072 _inherit_fields = {}
2078 def createInstance(cls, pool, cr):
2079 return cls.makeInstance(pool, cr, ['_columns', '_defaults'])
2081 def __init__(self, pool, cr):
2082 super(orm_memory, self).__init__(pool, cr)
2086 self._max_count = config.get('osv_memory_count_limit')
2087 self._max_hours = config.get('osv_memory_age_limit')
2088 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
2090 def _check_access(self, uid, object_id, mode):
2091 if uid != 1 and self.datas[object_id]['internal.create_uid'] != uid:
2092 raise except_orm(_('AccessError'), '%s access is only allowed on your own records for osv_memory objects except for the super-user' % mode.capitalize())
2094 def vaccum(self, cr, uid, force=False):
2095 """Run the vaccuum cleaning system, expiring and removing old records from the
2096 virtual osv_memory tables if the "max count" or "max age" conditions are enabled
2097 and have been reached. This method can be called very often (e.g. everytime a record
2098 is created), but will only actually trigger the cleanup process once out of
2099 "_check_time" times (by default once out of 20 calls)."""
2101 if (not force) and (self.check_id % self._check_time):
2105 # Age-based expiration
2107 max = time.time() - self._max_hours * 60 * 60
2108 for k,v in self.datas.iteritems():
2109 if v['internal.date_access'] < max:
2111 self.unlink(cr, 1, tounlink)
2113 # Count-based expiration
2114 if self._max_count and len(self.datas) > self._max_count:
2115 # sort by access time to remove only the first/oldest ones in LRU fashion
2116 records = self.datas.items()
2117 records.sort(key=lambda x:x[1]['internal.date_access'])
2118 self.unlink(cr, 1, [x[0] for x in records[:len(self.datas)-self._max_count]])
2122 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
2125 if not fields_to_read:
2126 fields_to_read = self._columns.keys()
2130 if isinstance(ids, (int, long)):
2134 for f in fields_to_read:
2135 record = self.datas.get(id)
2137 self._check_access(user, id, 'read')
2138 r[f] = record.get(f, False)
2139 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
2142 if id in self.datas:
2143 self.datas[id]['internal.date_access'] = time.time()
2144 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
2145 for f in fields_post:
2146 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
2147 for record in result:
2148 record[f] = res2[record['id']]
2149 if isinstance(ids_orig, (int, long)):
2153 def write(self, cr, user, ids, vals, context=None):
2159 if self._columns[field]._classic_write:
2160 vals2[field] = vals[field]
2162 upd_todo.append(field)
2163 for object_id in ids:
2164 self._check_access(user, object_id, mode='write')
2165 self.datas[object_id].update(vals2)
2166 self.datas[object_id]['internal.date_access'] = time.time()
2167 for field in upd_todo:
2168 self._columns[field].set_memory(cr, self, object_id, field, vals[field], user, context)
2169 self._validate(cr, user, [object_id], context)
2170 wf_service = netsvc.LocalService("workflow")
2171 wf_service.trg_write(user, self._name, object_id, cr)
2174 def create(self, cr, user, vals, context=None):
2175 self.vaccum(cr, user)
2177 id_new = self.next_id
2179 vals = self._add_missing_default_values(cr, user, vals, context)
2184 if self._columns[field]._classic_write:
2185 vals2[field] = vals[field]
2187 upd_todo.append(field)
2188 self.datas[id_new] = vals2
2189 self.datas[id_new]['internal.date_access'] = time.time()
2190 self.datas[id_new]['internal.create_uid'] = user
2192 for field in upd_todo:
2193 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
2194 self._validate(cr, user, [id_new], context)
2195 if self._log_create and not (context and context.get('no_store_function', False)):
2196 message = self._description + \
2198 self.name_get(cr, user, [id_new], context=context)[0][1] + \
2200 self.log(cr, user, id_new, message, True, context=context)
2201 wf_service = netsvc.LocalService("workflow")
2202 wf_service.trg_create(user, self._name, id_new, cr)
2205 def _where_calc(self, cr, user, args, active_test=True, context=None):
2210 # if the object has a field named 'active', filter out all inactive
2211 # records unless they were explicitely asked for
2212 if 'active' in self._columns and (active_test and context.get('active_test', True)):
2214 active_in_args = False
2216 if a[0] == 'active':
2217 active_in_args = True
2218 if not active_in_args:
2219 args.insert(0, ('active', '=', 1))
2221 args = [('active', '=', 1)]
2224 e = expression.expression(args)
2225 e.parse(cr, user, self, context)
2229 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
2233 # implicit filter on current user except for superuser
2237 args.insert(0, ('internal.create_uid', '=', user))
2239 result = self._where_calc(cr, user, args, context=context)
2241 return self.datas.keys()
2245 #Find the value of dict
2248 for id, data in self.datas.items():
2249 counter = counter + 1
2251 if limit and (counter > int(limit)):
2256 val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
2257 elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
2258 val = eval('data[arg[0]]'+arg[1] +' arg[2]', locals())
2259 elif arg[1] in ['ilike']:
2260 val = (str(data[arg[0]]).find(str(arg[2]))!=-1)
2270 def unlink(self, cr, uid, ids, context=None):
2272 self._check_access(uid, id, 'unlink')
2273 self.datas.pop(id, None)
2275 cr.execute('delete from wkf_instance where res_type=%s and res_id IN %s', (self._name, tuple(ids)))
2278 def perm_read(self, cr, user, ids, context=None, details=True):
2280 credentials = self.pool.get('res.users').name_get(cr, user, [user])[0]
2281 create_date = time.strftime('%Y-%m-%d %H:%M:%S')
2283 self._check_access(user, id, 'read')
2285 'create_uid': credentials,
2286 'create_date': create_date,
2288 'write_date': False,
2294 def _check_removed_columns(self, cr, log=False):
2295 # nothing to check in memory...
2298 def exists(self, cr, uid, id, context=None):
2299 return id in self.datas
2301 class orm(orm_template):
2302 _sql_constraints = []
2304 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
2305 __logger = logging.getLogger('orm')
2306 __schema = logging.getLogger('orm.schema')
2307 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2309 Get the list of records in list view grouped by the given ``groupby`` fields
2311 :param cr: database cursor
2312 :param uid: current user id
2313 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2314 :param list fields: list of fields present in the list view specified on the object
2315 :param list groupby: fields by which the records will be grouped
2316 :param int offset: optional number of records to skip
2317 :param int limit: optional max number of records to return
2318 :param dict context: context arguments, like lang, time zone
2319 :param order: optional ``order by`` specification, for overriding the natural
2320 sort ordering of the groups, see also :py:meth:`~osv.osv.osv.search`
2321 (supported only for many2one fields currently)
2322 :return: list of dictionaries(one dictionary for each record) containing:
2324 * the values of fields grouped by the fields in ``groupby`` argument
2325 * __domain: list of tuples specifying the search criteria
2326 * __context: dictionary with argument like ``groupby``
2327 :rtype: [{'field_name_1': value, ...]
2328 :raise AccessError: * if user has no read rights on the requested object
2329 * if user tries to bypass access rules for read on the requested object
2332 context = context or {}
2333 self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
2335 fields = self._columns.keys()
2337 query = self._where_calc(cr, uid, domain, context=context)
2338 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2340 # Take care of adding join(s) if groupby is an '_inherits'ed field
2341 groupby_list = groupby
2342 qualified_groupby_field = groupby
2344 if isinstance(groupby, list):
2345 groupby = groupby[0]
2346 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2349 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2350 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2351 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2353 fget = self.fields_get(cr, uid, fields)
2354 float_int_fields = filter(lambda x: fget[x]['type'] in ('float', 'integer'), fields)
2356 group_count = group_by = groupby
2358 if fget.get(groupby):
2359 if fget[groupby]['type'] in ('date', 'datetime'):
2360 flist = "to_char(%s,'yyyy-mm') as %s " % (qualified_groupby_field, groupby)
2361 groupby = "to_char(%s,'yyyy-mm')" % (qualified_groupby_field)
2362 qualified_groupby_field = groupby
2364 flist = qualified_groupby_field
2366 # Don't allow arbitrary values, as this would be a SQL injection vector!
2367 raise except_orm(_('Invalid group_by'),
2368 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2371 fields_pre = [f for f in float_int_fields if
2372 f == self.CONCURRENCY_CHECK_FIELD
2373 or (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2374 for f in fields_pre:
2375 if f not in ['id', 'sequence']:
2376 group_operator = fget[f].get('group_operator', 'sum')
2379 qualified_field = '"%s"."%s"' % (self._table, f)
2380 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2382 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2384 from_clause, where_clause, where_clause_params = query.get_sql()
2385 where_clause = where_clause and ' WHERE ' + where_clause
2386 limit_str = limit and ' limit %d' % limit or ''
2387 offset_str = offset and ' offset %d' % offset or ''
2388 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2390 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2393 for r in cr.dictfetchall():
2394 for fld, val in r.items():
2395 if val == None: r[fld] = False
2396 alldata[r['id']] = r
2399 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=orderby or groupby, context=context)
2400 # the IDS of records that have groupby field value = False or '' should be sorted too
2401 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2402 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2403 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2404 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2408 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2409 if not isinstance(groupby_list, (str, unicode)):
2410 if groupby or not context.get('group_by_no_leaf', False):
2411 d['__context'] = {'group_by': groupby_list[1:]}
2412 if groupby and groupby in fget:
2413 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2414 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2415 days = calendar.monthrange(dt.year, dt.month)[1]
2417 d[groupby] = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d').strftime('%B %Y')
2418 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2419 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2420 del alldata[d['id']][groupby]
2421 d.update(alldata[d['id']])
2425 def _inherits_join_add(self, parent_model_name, query):
2427 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2429 :param parent_model_name: name of the parent model for which the clauses should be added
2430 :param query: query object on which the JOIN should be added
2432 inherits_field = self._inherits[parent_model_name]
2433 parent_model = self.pool.get(parent_model_name)
2434 parent_table_name = parent_model._table
2435 quoted_parent_table_name = '"%s"' % parent_table_name
2436 if quoted_parent_table_name not in query.tables:
2437 query.tables.append(quoted_parent_table_name)
2438 query.where_clause.append('("%s".%s = %s.id)' % (self._table, inherits_field, parent_table_name))
2440 def _inherits_join_calc(self, field, query):
2442 Adds missing table select and join clause(s) to ``query`` for reaching
2443 the field coming from an '_inherits' parent table (no duplicates).
2445 :param field: name of inherited field to reach
2446 :param query: query object on which the JOIN should be added
2447 :return: qualified name of field, to be used in SELECT clause
2449 current_table = self
2450 while field in current_table._inherit_fields and not field in current_table._columns:
2451 parent_model_name = current_table._inherit_fields[field][0]
2452 parent_table = self.pool.get(parent_model_name)
2453 self._inherits_join_add(parent_model_name, query)
2454 current_table = parent_table
2455 return '"%s".%s' % (current_table._table, field)
2457 def _parent_store_compute(self, cr):
2458 if not self._parent_store:
2460 logger = netsvc.Logger()
2461 logger.notifyChannel('data', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
2462 def browse_rec(root, pos=0):
2464 where = self._parent_name+'='+str(root)
2466 where = self._parent_name+' IS NULL'
2467 if self._parent_order:
2468 where += ' order by '+self._parent_order
2469 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2471 for id in cr.fetchall():
2472 pos2 = browse_rec(id[0], pos2)
2473 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2475 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2476 if self._parent_order:
2477 query += ' order by ' + self._parent_order
2480 for (root,) in cr.fetchall():
2481 pos = browse_rec(root, pos)
2484 def _update_store(self, cr, f, k):
2485 logger = netsvc.Logger()
2486 logger.notifyChannel('data', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
2487 ss = self._columns[k]._symbol_set
2488 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2489 cr.execute('select id from '+self._table)
2490 ids_lst = map(lambda x: x[0], cr.fetchall())
2493 ids_lst = ids_lst[40:]
2494 res = f.get(cr, self, iids, k, 1, {})
2495 for key, val in res.items():
2498 # if val is a many2one, just write the ID
2499 if type(val) == tuple:
2501 if (val<>False) or (type(val)<>bool):
2502 cr.execute(update_query, (ss[1](val), key))
2504 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2505 """Raise except_orm if value is not among the valid values for the selection field"""
2506 if self._columns[field]._type == 'reference':
2507 val_model, val_id_str = value.split(',', 1)
2510 val_id = long(val_id_str)
2514 raise except_orm(_('ValidateError'),
2515 _('Invalid value for reference field "%s" (last part must be a non-zero integer): "%s"') % (field, value))
2519 if isinstance(self._columns[field].selection, (tuple, list)):
2520 if val in dict(self._columns[field].selection):
2522 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2524 raise except_orm(_('ValidateError'),
2525 _('The value "%s" for the field "%s" is not in the selection') % (value, field))
2527 def _check_removed_columns(self, cr, log=False):
2528 # iterate on the database columns to drop the NOT NULL constraints
2529 # of fields which were required but have been removed (or will be added by another module)
2530 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2531 columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
2532 cr.execute("SELECT a.attname, a.attnotnull"
2533 " FROM pg_class c, pg_attribute a"
2534 " WHERE c.relname=%s"
2535 " AND c.oid=a.attrelid"
2536 " AND a.attisdropped=%s"
2537 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2538 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2540 for column in cr.dictfetchall():
2542 self.__logger.debug("column %s is in the table %s but not in the corresponding object %s",
2543 column['attname'], self._table, self._name)
2544 if column['attnotnull']:
2545 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2546 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2547 self._table, column['attname'])
2549 def _auto_init(self, cr, context=None):
2552 Call _field_create and, unless _auto is False:
2554 - create the corresponding table in database for the model,
2555 - possibly add the parent columns in database,
2556 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2557 'write_date' in database if _log_access is True (the default),
2558 - report on database columns no more existing in _columns,
2559 - remove no more existing not null constraints,
2560 - alter existing database columns to match _columns,
2561 - create database tables to match _columns,
2562 - add database indices to match _columns,
2563 - save in self._foreign_keys a list a foreign keys to create (see
2567 self._foreign_keys = []
2568 raise_on_invalid_object_name(self._name)
2571 store_compute = False
2573 update_custom_fields = context.get('update_custom_fields', False)
2574 self._field_create(cr, context=context)
2575 create = not self._table_exist(cr)
2577 if getattr(self, '_auto', True):
2580 self._create_table(cr)
2583 if self._parent_store:
2584 if not self._parent_columns_exist(cr):
2585 self._create_parent_columns(cr)
2586 store_compute = True
2588 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2589 if self._log_access:
2590 self._add_log_columns(cr)
2592 self._check_removed_columns(cr, log=False)
2594 # iterate on the "object columns"
2595 column_data = self._select_column_data(cr)
2597 for k, f in self._columns.iteritems():
2598 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
2600 # Don't update custom (also called manual) fields
2601 if f.manual and not update_custom_fields:
2604 if isinstance(f, fields.one2many):
2605 self._o2m_raise_on_missing_reference(cr, f)
2607 elif isinstance(f, fields.many2many):
2608 self._m2m_raise_or_create_relation(cr, f)
2611 res = column_data.get(k)
2613 # The field is not found as-is in database, try if it
2614 # exists with an old name.
2615 if not res and hasattr(f, 'oldname'):
2616 res = column_data.get(f.oldname)
2618 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
2620 column_data[k] = res
2621 self.__schema.debug("Table '%s': renamed column '%s' to '%s'",
2622 self._table, f.oldname, k)
2624 # The field already exists in database. Possibly
2625 # change its type, rename it, drop it or change its
2628 f_pg_type = res['typname']
2629 f_pg_size = res['size']
2630 f_pg_notnull = res['attnotnull']
2631 if isinstance(f, fields.function) and not f.store and\
2632 not getattr(f, 'nodrop', False):
2633 self.__logger.info('column %s (%s) in table %s removed: converted to a function !\n',
2634 k, f.string, self._table)
2635 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
2637 self.__schema.debug("Table '%s': dropped column '%s' with cascade",
2641 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
2646 ('text', 'char', 'VARCHAR(%d)' % (f.size or 0,), '::VARCHAR(%d)'%(f.size or 0,)),
2647 ('varchar', 'text', 'TEXT', ''),
2648 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2649 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
2650 ('timestamp', 'date', 'date', '::date'),
2651 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2652 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2654 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
2655 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2656 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
2657 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size))
2658 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2660 self.__schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
2661 self._table, k, f_pg_size, f.size)
2663 if (f_pg_type==c[0]) and (f._type==c[1]):
2664 if f_pg_type != f_obj_type:
2666 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2667 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
2668 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
2669 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2671 self.__schema.debug("Table '%s': column '%s' changed type from %s to %s",
2672 self._table, k, c[0], c[1])
2675 if f_pg_type != f_obj_type:
2679 newname = k + '_moved' + str(i)
2680 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
2681 "WHERE c.relname=%s " \
2682 "AND a.attname=%s " \
2683 "AND c.oid=a.attrelid ", (self._table, newname))
2684 if not cr.fetchone()[0]:
2688 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2689 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
2690 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2691 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2692 self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
2693 self._table, k, f_pg_type, f._type, newname)
2695 # if the field is required and hasn't got a NOT NULL constraint
2696 if f.required and f_pg_notnull == 0:
2697 # set the field to the default value if any
2698 if k in self._defaults:
2699 if callable(self._defaults[k]):
2700 default = self._defaults[k](self, cr, 1, context)
2702 default = self._defaults[k]
2704 if (default is not None):
2705 ss = self._columns[k]._symbol_set
2706 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
2707 cr.execute(query, (ss[1](default),))
2708 # add the NOT NULL constraint
2711 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2713 self.__schema.debug("Table '%s': column '%s': added NOT NULL constraint",
2716 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
2717 "If you want to have it, you should update the records and execute manually:\n"\
2718 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2719 self.__schema.warn(msg, self._table, k, self._table, k)
2721 elif not f.required and f_pg_notnull == 1:
2722 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2724 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2727 indexname = '%s_%s_index' % (self._table, k)
2728 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
2729 res2 = cr.dictfetchall()
2730 if not res2 and f.select:
2731 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2733 if f._type == 'text':
2734 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
2735 msg = "Table '%s': Adding (b-tree) index for text column '%s'."\
2736 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
2737 " because there is a length limit for indexable btree values!\n"\
2738 "Use a search view instead if you simply want to make the field searchable."
2739 self.__schema.warn(msg, self._table, k, f._type)
2740 if res2 and not f.select:
2741 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
2743 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
2744 self.__schema.debug(msg, self._table, k, f._type)
2746 if isinstance(f, fields.many2one):
2747 ref = self.pool.get(f._obj)._table
2748 if ref != 'ir_actions':
2749 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
2750 'pg_attribute as att1, pg_attribute as att2 '
2751 'WHERE con.conrelid = cl1.oid '
2752 'AND cl1.relname = %s '
2753 'AND con.confrelid = cl2.oid '
2754 'AND cl2.relname = %s '
2755 'AND array_lower(con.conkey, 1) = 1 '
2756 'AND con.conkey[1] = att1.attnum '
2757 'AND att1.attrelid = cl1.oid '
2758 'AND att1.attname = %s '
2759 'AND array_lower(con.confkey, 1) = 1 '
2760 'AND con.confkey[1] = att2.attnum '
2761 'AND att2.attrelid = cl2.oid '
2762 'AND att2.attname = %s '
2763 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
2764 res2 = cr.dictfetchall()
2766 if res2[0]['confdeltype'] != POSTGRES_CONFDELTYPES.get(f.ondelete.upper(), 'a'):
2767 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"')
2768 self._foreign_keys.append((self._table, k, ref, f.ondelete))
2770 self.__schema.debug("Table '%s': column '%s': XXX",
2773 # The field doesn't exist in database. Create it if necessary.
2775 if not isinstance(f, fields.function) or f.store:
2776 # add the missing field
2777 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2778 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2779 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2780 self._table, k, get_pg_type(f)[1])
2783 if not create and k in self._defaults:
2784 if callable(self._defaults[k]):
2785 default = self._defaults[k](self, cr, 1, context)
2787 default = self._defaults[k]
2789 ss = self._columns[k]._symbol_set
2790 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
2791 cr.execute(query, (ss[1](default),))
2793 netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
2795 # remember the functions to call for the stored fields
2796 if isinstance(f, fields.function):
2798 if f.store is not True: # i.e. if f.store is a dict
2799 order = f.store[f.store.keys()[0]][2]
2800 todo_end.append((order, self._update_store, (f, k)))
2802 # and add constraints if needed
2803 if isinstance(f, fields.many2one):
2804 if not self.pool.get(f._obj):
2805 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2806 ref = self.pool.get(f._obj)._table
2807 # ir_actions is inherited so foreign key doesn't work on it
2808 if ref != 'ir_actions':
2809 self._foreign_keys.append((self._table, k, ref, f.ondelete))
2810 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2811 self._table, k, ref, f.ondelete)
2813 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2817 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2818 self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
2821 msg = "WARNING: unable to set column %s of table %s not null !\n"\
2822 "Try to re-run: openerp-server --update=module\n"\
2823 "If it doesn't work, update records and execute manually:\n"\
2824 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2825 self.__logger.warn(msg, k, self._table, self._table, k)
2829 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2830 create = not bool(cr.fetchone())
2832 cr.commit() # start a new transaction
2834 self._add_sql_constraints(cr)
2837 self._execute_sql(cr)
2840 self._parent_store_compute(cr)
2846 def _auto_end(self, cr, context=None):
2847 """ Create the foreign keys recorded by _auto_init. """
2848 for t, k, r, d in self._foreign_keys:
2849 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
2851 del self._foreign_keys
2854 def _table_exist(self, cr):
2855 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2859 def _create_table(self, cr):
2860 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
2861 cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''")))
2862 self.__schema.debug("Table '%s': created", self._table)
2865 def _parent_columns_exist(self, cr):
2866 cr.execute("""SELECT c.relname
2867 FROM pg_class c, pg_attribute a
2868 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2869 """, (self._table, 'parent_left'))
2873 def _create_parent_columns(self, cr):
2874 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
2875 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
2876 if 'parent_left' not in self._columns:
2877 self.__logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
2879 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2880 self._table, 'parent_left', 'INTEGER')
2881 elif not self._columns['parent_left'].select:
2882 self.__logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
2884 if 'parent_right' not in self._columns:
2885 self.__logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
2887 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2888 self._table, 'parent_right', 'INTEGER')
2889 elif not self._columns['parent_right'].select:
2890 self.__logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
2892 if self._columns[self._parent_name].ondelete != 'cascade':
2893 self.__logger.error("The column %s on object %s must be set as ondelete='cascade'",
2894 self._parent_name, self._name)
2899 def _add_log_columns(self, cr):
2901 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2902 'create_date': 'TIMESTAMP',
2903 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2904 'write_date': 'TIMESTAMP'
2909 FROM pg_class c, pg_attribute a
2910 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2911 """, (self._table, k))
2913 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
2915 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2916 self._table, k, logs[k])
2919 def _select_column_data(self, cr):
2920 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2921 "FROM pg_class c,pg_attribute a,pg_type t " \
2922 "WHERE c.relname=%s " \
2923 "AND c.oid=a.attrelid " \
2924 "AND a.atttypid=t.oid", (self._table,))
2925 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
2928 def _o2m_raise_on_missing_reference(self, cr, f):
2929 # TODO this check should be a method on fields.one2many.
2930 other = self.pool.get(f._obj)
2932 # TODO the condition could use fields_get_keys().
2933 if f._fields_id not in other._columns.keys():
2934 if f._fields_id not in other._inherit_fields.keys():
2935 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
2938 def _m2m_raise_or_create_relation(self, cr, f):
2939 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (f._rel,))
2940 if not cr.dictfetchall():
2941 if not self.pool.get(f._obj):
2942 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2943 ref = self.pool.get(f._obj)._table
2944 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s")) WITH OIDS' % (f._rel, f._id1, f._id2, f._id1, f._id2))
2945 self._foreign_keys.append((f._rel, f._id1, self._table, 'CASCADE'))
2946 self._foreign_keys.append((f._rel, f._id2, ref, 'CASCADE'))
2947 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
2948 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
2949 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref))
2951 self.__schema.debug("Create table '%s': relation between '%s' and '%s'",
2952 f._rel, self._table, ref)
2955 def _add_sql_constraints(self, cr):
2958 Modify this model's database table constraints so they match the one in
2962 for (key, con, _) in self._sql_constraints:
2963 conname = '%s_%s' % (self._table, key)
2965 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
2966 existing_constraints = cr.dictfetchall()
2971 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
2972 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
2973 self._table, conname, con),
2974 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
2979 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
2980 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
2981 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
2987 if not existing_constraints:
2988 # constraint does not exists:
2989 sql_actions['add']['execute'] = True
2990 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
2991 elif con.lower() not in [item['condef'].lower() for item in existing_constraints]:
2992 # constraint exists but its definition has changed:
2993 sql_actions['drop']['execute'] = True
2994 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
2995 sql_actions['add']['execute'] = True
2996 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
2998 # we need to add the constraint:
2999 sql_actions = [item for item in sql_actions.values()]
3000 sql_actions.sort(key=lambda x: x['order'])
3001 for sql_action in [action for action in sql_actions if action['execute']]:
3003 cr.execute(sql_action['query'])
3005 self.__schema.debug(sql_action['msg_ok'])
3007 self.__schema.warn(sql_action['msg_err'])
3011 def _execute_sql(self, cr):
3012 """ Execute the SQL code from the _sql attribute (if any)."""
3013 if hasattr(self, "_sql"):
3014 for line in self._sql.split(';'):
3015 line2 = line.replace('\n', '').strip()
3022 def createInstance(cls, pool, cr):
3023 return cls.makeInstance(pool, cr, ['_columns', '_defaults',
3024 '_inherits', '_constraints', '_sql_constraints'])
3026 def __init__(self, pool, cr):
3029 - copy the stored fields' functions in the osv_pool,
3030 - update the _columns with the fields found in ir_model_fields,
3031 - ensure there is a many2one for each _inherits'd parent,
3032 - update the children's _columns,
3033 - give a chance to each field to initialize itself.
3036 super(orm, self).__init__(pool, cr)
3038 if not hasattr(self, '_log_access'):
3039 # if not access is not specify, it is the same value as _auto
3040 self._log_access = getattr(self, "_auto", True)
3042 self._columns = self._columns.copy()
3043 for store_field in self._columns:
3044 f = self._columns[store_field]
3045 if hasattr(f, 'digits_change'):
3047 if not isinstance(f, fields.function):
3051 if self._columns[store_field].store is True:
3052 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
3054 sm = self._columns[store_field].store
3055 for object, aa in sm.items():
3057 (fnct, fields2, order, length) = aa
3059 (fnct, fields2, order) = aa
3062 raise except_orm('Error',
3063 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
3064 self.pool._store_function.setdefault(object, [])
3066 for x, y, z, e, f, l in self.pool._store_function[object]:
3067 if (x==self._name) and (y==store_field) and (e==fields2):
3071 self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order, length))
3072 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
3074 for (key, _, msg) in self._sql_constraints:
3075 self.pool._sql_error[self._table+'_'+key] = msg
3077 # Load manual fields
3079 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
3081 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
3082 for field in cr.dictfetchall():
3083 if field['name'] in self._columns:
3086 'string': field['field_description'],
3087 'required': bool(field['required']),
3088 'readonly': bool(field['readonly']),
3089 'domain': eval(field['domain']) if field['domain'] else None,
3090 'size': field['size'],
3091 'ondelete': field['on_delete'],
3092 'translate': (field['translate']),
3094 #'select': int(field['select_level'])
3097 if field['ttype'] == 'selection':
3098 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
3099 elif field['ttype'] == 'reference':
3100 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
3101 elif field['ttype'] == 'many2one':
3102 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
3103 elif field['ttype'] == 'one2many':
3104 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
3105 elif field['ttype'] == 'many2many':
3106 _rel1 = field['relation'].replace('.', '_')
3107 _rel2 = field['model'].replace('.', '_')
3108 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
3109 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
3111 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
3112 self._inherits_check()
3113 self._inherits_reload()
3114 if not self._sequence:
3115 self._sequence = self._table + '_id_seq'
3116 for k in self._defaults:
3117 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
3118 for f in self._columns:
3119 self._columns[f].restart()
3121 __init__.__doc__ = orm_template.__init__.__doc__ + __init__.__doc__
3124 # Update objects that uses this one to update their _inherits fields
3127 def _inherits_reload_src(self):
3128 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3129 for obj in self.pool.models.values():
3130 if self._name in obj._inherits:
3131 obj._inherits_reload()
3133 def _inherits_reload(self):
3134 """ Recompute the _inherit_fields mapping.
3136 This will also call itself on each inherits'd child model.
3140 for table in self._inherits:
3141 other = self.pool.get(table)
3142 res.update(other._inherit_fields)
3143 for col in other._columns.keys():
3144 res[col] = (table, self._inherits[table], other._columns[col])
3145 for col in other._inherit_fields.keys():
3146 res[col] = (table, self._inherits[table], other._inherit_fields[col][2])
3147 self._inherit_fields = res
3148 self._inherits_reload_src()
3150 def _inherits_check(self):
3151 for table, field_name in self._inherits.items():
3152 if field_name not in self._columns:
3153 logging.getLogger('init').info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.' % (field_name, self._name))
3154 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3155 required=True, ondelete="cascade")
3156 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
3157 logging.getLogger('init').warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.' % (field_name, self._name))
3158 self._columns[field_name].required = True
3159 self._columns[field_name].ondelete = "cascade"
3161 #def __getattr__(self, name):
3163 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3164 # (though inherits doesn't use Python inheritance).
3165 # Handles translating between local ids and remote ids.
3166 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3167 # when you have inherits.
3169 # for model, field in self._inherits.iteritems():
3170 # proxy = self.pool.get(model)
3171 # if hasattr(proxy, name):
3172 # attribute = getattr(proxy, name)
3173 # if not hasattr(attribute, '__call__'):
3177 # return super(orm, self).__getattr__(name)
3179 # def _proxy(cr, uid, ids, *args, **kwargs):
3180 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3181 # lst = [obj[field].id for obj in objects if obj[field]]
3182 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3187 def fields_get(self, cr, user, fields=None, context=None):
3189 Get the description of list of fields
3191 :param cr: database cursor
3192 :param user: current user id
3193 :param fields: list of fields
3194 :param context: context arguments, like lang, time zone
3195 :return: dictionary of field dictionaries, each one describing a field of the business object
3196 :raise AccessError: * if user has no create/write rights on the requested object
3199 ira = self.pool.get('ir.model.access')
3200 write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
3201 ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
3202 return super(orm, self).fields_get(cr, user, fields, context, write_access)
3204 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3207 self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
3209 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
3210 if isinstance(ids, (int, long)):
3214 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3215 result = self._read_flat(cr, user, select, fields, context, load)
3218 for key, v in r.items():
3222 if isinstance(ids, (int, long, dict)):
3223 return result and result[0] or False
3226 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3231 if fields_to_read == None:
3232 fields_to_read = self._columns.keys()
3234 # Construct a clause for the security rules.
3235 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3236 # or will at least contain self._table.
3237 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3239 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3240 fields_pre = [f for f in fields_to_read if
3241 f == self.CONCURRENCY_CHECK_FIELD
3242 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3243 ] + self._inherits.values()
3247 def convert_field(f):
3248 f_qual = "%s.%s" % (self._table, f) # need fully-qualified references in case len(tables) > 1
3249 if f in ('create_date', 'write_date'):
3250 return "date_trunc('second', %s) as %s" % (f_qual, f)
3251 if f == self.CONCURRENCY_CHECK_FIELD:
3252 if self._log_access:
3253 return "COALESCE(%s.write_date, %s.create_date, now())::timestamp AS %s" % (self._table, self._table, f,)
3254 return "now()::timestamp AS %s" % (f,)
3255 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3256 return 'length(%s) as "%s"' % (f_qual, f)
3259 fields_pre2 = map(convert_field, fields_pre)
3260 order_by = self._parent_order or self._order
3261 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
3262 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3264 query += " AND " + (' OR '.join(rule_clause))
3265 query += " ORDER BY " + order_by
3266 for sub_ids in cr.split_for_in_conditions(ids):
3268 cr.execute(query, [tuple(sub_ids)] + rule_params)
3269 if cr.rowcount != len(sub_ids):
3270 raise except_orm(_('AccessError'),
3271 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: read, Document type: %s).')
3272 % (self._description,))
3274 cr.execute(query, (tuple(sub_ids),))
3275 res.extend(cr.dictfetchall())
3277 res = map(lambda x: {'id': x}, ids)
3279 for f in fields_pre:
3280 if f == self.CONCURRENCY_CHECK_FIELD:
3282 if self._columns[f].translate:
3283 ids = [x['id'] for x in res]
3284 #TODO: optimize out of this loop
3285 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3287 r[f] = res_trans.get(r['id'], False) or r[f]
3289 for table in self._inherits:
3290 col = self._inherits[table]
3291 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3294 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3302 if not record[col]: # if the record is deleted from _inherits table?
3304 record.update(res3[record[col]])
3305 if col not in fields_to_read:
3308 # all fields which need to be post-processed by a simple function (symbol_get)
3309 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3312 for f in fields_post:
3313 r[f] = self._columns[f]._symbol_get(r[f])
3314 ids = [x['id'] for x in res]
3316 # all non inherited fields for which the attribute whose name is in load is False
3317 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3319 # Compute POST fields
3321 for f in fields_post:
3322 todo.setdefault(self._columns[f]._multi, [])
3323 todo[self._columns[f]._multi].append(f)
3324 for key, val in todo.items():
3326 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3327 assert res2 is not None, \
3328 'The function field "%s" on the "%s" model returned None\n' \
3329 '(a dictionary was expected).' % (val[0], self._name)
3332 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3333 multi_fields = res2.get(record['id'],{})
3335 record[pos] = multi_fields.get(pos,[])
3338 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3341 record[f] = res2[record['id']]
3346 for field in vals.copy():
3348 if field in self._columns:
3349 fobj = self._columns[field]
3356 for group in groups:
3357 module = group.split(".")[0]
3358 grp = group.split(".")[1]
3359 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3360 (grp, module, 'res.groups', user))
3361 readonly = cr.fetchall()
3362 if readonly[0][0] >= 1:
3365 elif readonly[0][0] == 0:
3371 if type(vals[field]) == type([]):
3373 elif type(vals[field]) == type(0.0):
3375 elif type(vals[field]) == type(''):
3376 vals[field] = '=No Permission='
3381 def perm_read(self, cr, user, ids, context=None, details=True):
3383 Returns some metadata about the given records.
3385 :param details: if True, \*_uid fields are replaced with the name of the user
3386 :return: list of ownership dictionaries for each requested record
3387 :rtype: list of dictionaries with the following keys:
3390 * create_uid: user who created the record
3391 * create_date: date when the record was created
3392 * write_uid: last user who changed the record
3393 * write_date: date of the last change to the record
3394 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3401 uniq = isinstance(ids, (int, long))
3405 if self._log_access:
3406 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3407 quoted_table = '"%s"' % self._table
3408 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3409 query = '''SELECT %s, __imd.module, __imd.name
3410 FROM %s LEFT JOIN ir_model_data __imd
3411 ON (__imd.model = %%s and __imd.res_id = %s.id)
3412 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3413 cr.execute(query, (self._name, tuple(ids)))
3414 res = cr.dictfetchall()
3417 r[key] = r[key] or False
3418 if details and key in ('write_uid', 'create_uid') and r[key]:
3420 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3422 pass # Leave the numeric uid there
3423 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3424 del r['name'], r['module']
3429 def _check_concurrency(self, cr, ids, context):
3432 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3434 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
3435 for sub_ids in cr.split_for_in_conditions(ids):
3438 id_ref = "%s,%s" % (self._name, id)
3439 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3441 ids_to_check.extend([id, update_date])
3442 if not ids_to_check:
3444 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3447 # mention the first one only to keep the error message readable
3448 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3450 def check_access_rule(self, cr, uid, ids, operation, context=None):
3451 """Verifies that the operation given by ``operation`` is allowed for the user
3452 according to ir.rules.
3454 :param operation: one of ``write``, ``unlink``
3455 :raise except_orm: * if current ir.rules do not permit this operation.
3456 :return: None if the operation is allowed
3458 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3460 where_clause = ' and ' + ' and '.join(where_clause)
3461 for sub_ids in cr.split_for_in_conditions(ids):
3462 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3463 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3464 [sub_ids] + where_params)
3465 if cr.rowcount != len(sub_ids):
3466 raise except_orm(_('AccessError'),
3467 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: %s, Document type: %s).')
3468 % (operation, self._description))
3470 def unlink(self, cr, uid, ids, context=None):
3472 Delete records with given ids
3474 :param cr: database cursor
3475 :param uid: current user id
3476 :param ids: id or list of ids
3477 :param context: (optional) context arguments, like lang, time zone
3479 :raise AccessError: * if user has no unlink rights on the requested object
3480 * if user tries to bypass access rules for unlink on the requested object
3481 :raise UserError: if the record is default property for other records
3486 if isinstance(ids, (int, long)):
3489 result_store = self._store_get_values(cr, uid, ids, None, context)
3491 self._check_concurrency(cr, ids, context)
3493 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
3495 properties = self.pool.get('ir.property')
3496 domain = [('res_id', '=', False),
3497 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3499 if properties.search(cr, uid, domain, context=context):
3500 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3502 wf_service = netsvc.LocalService("workflow")
3504 wf_service.trg_delete(uid, self._name, oid, cr)
3507 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3508 pool_model_data = self.pool.get('ir.model.data')
3509 ir_values_obj = self.pool.get('ir.values')
3510 for sub_ids in cr.split_for_in_conditions(ids):
3511 cr.execute('delete from ' + self._table + ' ' \
3512 'where id IN %s', (sub_ids,))
3514 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3515 # as these are not connected with real database foreign keys, and would be dangling references.
3516 # Step 1. Calling unlink of ir_model_data only for the affected IDS.
3517 reference_ids = pool_model_data.search(cr, uid, [('res_id','in',list(sub_ids)),('model','=',self._name)], context=context)
3518 # Step 2. Marching towards the real deletion of referenced records
3520 pool_model_data.unlink(cr, uid, reference_ids, context=context)
3522 # For the same reason, removing the record relevant to ir_values
3523 ir_value_ids = ir_values_obj.search(cr, uid,
3524 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3527 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3529 for order, object, store_ids, fields in result_store:
3530 if object != self._name:
3531 obj = self.pool.get(object)
3532 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3533 rids = map(lambda x: x[0], cr.fetchall())
3535 obj._store_set_values(cr, uid, rids, fields, context)
3542 def write(self, cr, user, ids, vals, context=None):
3544 Update records with given ids with the given field values
3546 :param cr: database cursor
3547 :param user: current user id
3549 :param ids: object id or list of object ids to update according to **vals**
3550 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3551 :type vals: dictionary
3552 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3553 :type context: dictionary
3555 :raise AccessError: * if user has no write rights on the requested object
3556 * if user tries to bypass access rules for write on the requested object
3557 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3558 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3560 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3562 + For a many2many field, a list of tuples is expected.
3563 Here is the list of tuple that are accepted, with the corresponding semantics ::
3565 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3566 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3567 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3568 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3569 (4, ID) link to existing record with id = ID (adds a relationship)
3570 (5) unlink all (like using (3,ID) for all linked records)
3571 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3574 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3576 + For a one2many field, a lits of tuples is expected.
3577 Here is the list of tuple that are accepted, with the corresponding semantics ::
3579 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3580 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3581 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3584 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
3586 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
3587 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
3591 for field in vals.copy():
3593 if field in self._columns:
3594 fobj = self._columns[field]
3595 elif field in self._inherit_fields:
3596 fobj = self._inherit_fields[field][2]
3603 for group in groups:
3604 module = group.split(".")[0]
3605 grp = group.split(".")[1]
3606 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3607 (grp, module, 'res.groups', user))
3608 readonly = cr.fetchall()
3609 if readonly[0][0] >= 1:
3612 elif readonly[0][0] == 0:
3624 if isinstance(ids, (int, long)):
3627 self._check_concurrency(cr, ids, context)
3628 self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
3630 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
3632 # No direct update of parent_left/right
3633 vals.pop('parent_left', None)
3634 vals.pop('parent_right', None)
3636 parents_changed = []
3637 parent_order = self._parent_order or self._order
3638 if self._parent_store and (self._parent_name in vals):
3639 # The parent_left/right computation may take up to
3640 # 5 seconds. No need to recompute the values if the
3641 # parent is the same.
3642 # Note: to respect parent_order, nodes must be processed in
3643 # order, so ``parents_changed`` must be ordered properly.
3644 parent_val = vals[self._parent_name]
3646 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
3647 (self._table, self._parent_name, self._parent_name, parent_order)
3648 cr.execute(query, (tuple(ids), parent_val))
3650 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
3651 (self._table, self._parent_name, parent_order)
3652 cr.execute(query, (tuple(ids),))
3653 parents_changed = map(operator.itemgetter(0), cr.fetchall())
3660 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
3662 if field in self._columns:
3663 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
3664 if (not totranslate) or not self._columns[field].translate:
3665 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
3666 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
3667 direct.append(field)
3669 upd_todo.append(field)
3671 updend.append(field)
3672 if field in self._columns \
3673 and hasattr(self._columns[field], 'selection') \
3675 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3677 if self._log_access:
3678 upd0.append('write_uid=%s')
3679 upd0.append('write_date=now()')
3683 self.check_access_rule(cr, user, ids, 'write', context=context)
3684 for sub_ids in cr.split_for_in_conditions(ids):
3685 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
3686 'where id IN %s', upd1 + [sub_ids])
3687 if cr.rowcount != len(sub_ids):
3688 raise except_orm(_('AccessError'),
3689 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
3694 if self._columns[f].translate:
3695 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
3698 # Inserting value to DB
3699 self.write(cr, user, ids, {f: vals[f]})
3700 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
3703 # call the 'set' method of fields which are not classic_write
3704 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3706 # default element in context must be removed when call a one2many or many2many
3707 rel_context = context.copy()
3708 for c in context.items():
3709 if c[0].startswith('default_'):
3710 del rel_context[c[0]]
3712 for field in upd_todo:
3714 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
3716 for table in self._inherits:
3717 col = self._inherits[table]
3719 for sub_ids in cr.split_for_in_conditions(ids):
3720 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
3721 'where id IN %s', (sub_ids,))
3722 nids.extend([x[0] for x in cr.fetchall()])
3726 if self._inherit_fields[val][0] == table:
3729 self.pool.get(table).write(cr, user, nids, v, context)
3731 self._validate(cr, user, ids, context)
3733 # TODO: use _order to set dest at the right position and not first node of parent
3734 # We can't defer parent_store computation because the stored function
3735 # fields that are computer may refer (directly or indirectly) to
3736 # parent_left/right (via a child_of domain)
3739 self.pool._init_parent[self._name] = True
3741 order = self._parent_order or self._order
3742 parent_val = vals[self._parent_name]
3744 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
3746 clause, params = '%s IS NULL' % (self._parent_name,), ()
3748 for id in parents_changed:
3749 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
3750 pleft, pright = cr.fetchone()
3751 distance = pright - pleft + 1
3753 # Positions of current siblings, to locate proper insertion point;
3754 # this can _not_ be fetched outside the loop, as it needs to be refreshed
3755 # after each update, in case several nodes are sequentially inserted one
3756 # next to the other (i.e computed incrementally)
3757 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
3758 parents = cr.fetchall()
3760 # Find Position of the element
3762 for (parent_pright, parent_id) in parents:
3765 position = parent_pright + 1
3767 # It's the first node of the parent
3772 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
3773 position = cr.fetchone()[0] + 1
3775 if pleft < position <= pright:
3776 raise except_orm(_('UserError'), _('Recursivity Detected.'))
3778 if pleft < position:
3779 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3780 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3781 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
3783 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3784 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3785 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
3787 result += self._store_get_values(cr, user, ids, vals.keys(), context)
3791 for order, object, ids_to_update, fields_to_recompute in result:
3792 key = (object, tuple(fields_to_recompute))
3793 done.setdefault(key, {})
3794 # avoid to do several times the same computation
3796 for id in ids_to_update:
3797 if id not in done[key]:
3798 done[key][id] = True
3800 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
3802 wf_service = netsvc.LocalService("workflow")
3804 wf_service.trg_write(user, self._name, id, cr)
3808 # TODO: Should set perm to user.xxx
3810 def create(self, cr, user, vals, context=None):
3812 Create a new record for the model.
3814 The values for the new record are initialized using the ``vals``
3815 argument, and if necessary the result of ``default_get()``.
3817 :param cr: database cursor
3818 :param user: current user id
3820 :param vals: field values for new record, e.g {'field_name': field_value, ...}
3821 :type vals: dictionary
3822 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3823 :type context: dictionary
3824 :return: id of new record created
3825 :raise AccessError: * if user has no create rights on the requested object
3826 * if user tries to bypass access rules for create on the requested object
3827 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3828 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3830 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
3831 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
3837 self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
3839 vals = self._add_missing_default_values(cr, user, vals, context)
3842 for v in self._inherits:
3843 if self._inherits[v] not in vals:
3846 tocreate[v] = {'id': vals[self._inherits[v]]}
3847 (upd0, upd1, upd2) = ('', '', [])
3849 for v in vals.keys():
3850 if v in self._inherit_fields:
3851 (table, col, col_detail) = self._inherit_fields[v]
3852 tocreate[table][v] = vals[v]
3855 if (v not in self._inherit_fields) and (v not in self._columns):
3858 # Try-except added to filter the creation of those records whose filds are readonly.
3859 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
3861 cr.execute("SELECT nextval('"+self._sequence+"')")
3863 raise except_orm(_('UserError'),
3864 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
3866 id_new = cr.fetchone()[0]
3867 for table in tocreate:
3868 if self._inherits[table] in vals:
3869 del vals[self._inherits[table]]
3871 record_id = tocreate[table].pop('id', None)
3873 if record_id is None or not record_id:
3874 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
3876 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
3878 upd0 += ',' + self._inherits[table]
3880 upd2.append(record_id)
3882 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
3883 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
3885 for bool_field in bool_fields:
3886 if bool_field not in vals:
3887 vals[bool_field] = False
3889 for field in vals.copy():
3891 if field in self._columns:
3892 fobj = self._columns[field]
3894 fobj = self._inherit_fields[field][2]
3900 for group in groups:
3901 module = group.split(".")[0]
3902 grp = group.split(".")[1]
3903 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
3904 (grp, module, 'res.groups', user))
3905 readonly = cr.fetchall()
3906 if readonly[0][0] >= 1:
3909 elif readonly[0][0] == 0:
3917 if self._columns[field]._classic_write:
3918 upd0 = upd0 + ',"' + field + '"'
3919 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
3920 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
3922 if not isinstance(self._columns[field], fields.related):
3923 upd_todo.append(field)
3924 if field in self._columns \
3925 and hasattr(self._columns[field], 'selection') \
3927 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3928 if self._log_access:
3929 upd0 += ',create_uid,create_date'
3932 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
3933 self.check_access_rule(cr, user, [id_new], 'create', context=context)
3934 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3936 if self._parent_store and not context.get('defer_parent_store_computation'):
3938 self.pool._init_parent[self._name] = True
3940 parent = vals.get(self._parent_name, False)
3942 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
3944 result_p = cr.fetchall()
3945 for (pleft,) in result_p:
3950 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
3951 pleft_old = cr.fetchone()[0]
3954 cr.execute('select max(parent_right) from '+self._table)
3955 pleft = cr.fetchone()[0] or 0
3956 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
3957 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
3958 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
3960 # default element in context must be remove when call a one2many or many2many
3961 rel_context = context.copy()
3962 for c in context.items():
3963 if c[0].startswith('default_'):
3964 del rel_context[c[0]]
3967 for field in upd_todo:
3968 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
3969 self._validate(cr, user, [id_new], context)
3971 if not context.get('no_store_function', False):
3972 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
3975 for order, object, ids, fields2 in result:
3976 if not (object, ids, fields2) in done:
3977 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
3978 done.append((object, ids, fields2))
3980 if self._log_create and not (context and context.get('no_store_function', False)):
3981 message = self._description + \
3983 self.name_get(cr, user, [id_new], context=context)[0][1] + \
3984 "' " + _("created.")
3985 self.log(cr, user, id_new, message, True, context=context)
3986 wf_service = netsvc.LocalService("workflow")
3987 wf_service.trg_create(user, self._name, id_new, cr)
3990 def _store_get_values(self, cr, uid, ids, fields, context):
3991 """Returns an ordered list of fields.functions to call due to
3992 an update operation on ``fields`` of records with ``ids``,
3993 obtained by calling the 'store' functions of these fields,
3994 as setup by their 'store' attribute.
3996 :return: [(priority, model_name, [record_ids,], [function_fields,])]
3998 # FIXME: rewrite, cleanup, use real variable names
3999 # e.g.: http://pastie.org/1222060
4001 fncts = self.pool._store_function.get(self._name, [])
4002 for fnct in range(len(fncts)):
4007 for f in (fields or []):
4008 if f in fncts[fnct][3]:
4014 result.setdefault(fncts[fnct][0], {})
4016 # uid == 1 for accessing objects having rules defined on store fields
4017 ids2 = fncts[fnct][2](self, cr, 1, ids, context)
4018 for id in filter(None, ids2):
4019 result[fncts[fnct][0]].setdefault(id, [])
4020 result[fncts[fnct][0]][id].append(fnct)
4022 for object in result:
4024 for id, fnct in result[object].items():
4025 k2.setdefault(tuple(fnct), [])
4026 k2[tuple(fnct)].append(id)
4027 for fnct, id in k2.items():
4028 dict.setdefault(fncts[fnct[0]][4], [])
4029 dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4], object, id, map(lambda x: fncts[x][1], fnct)))
4037 def _store_set_values(self, cr, uid, ids, fields, context):
4038 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4039 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4044 if self._log_access:
4045 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4049 field_dict.setdefault(r[0], [])
4050 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4051 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4052 for i in self.pool._store_function.get(self._name, []):
4054 up_write_date = write_date + datetime.timedelta(hours=i[5])
4055 if datetime.datetime.now() < up_write_date:
4057 field_dict[r[0]].append(i[1])
4063 if self._columns[f]._multi not in keys:
4064 keys.append(self._columns[f]._multi)
4065 todo.setdefault(self._columns[f]._multi, [])
4066 todo[self._columns[f]._multi].append(f)
4070 # uid == 1 for accessing objects having rules defined on store fields
4071 result = self._columns[val[0]].get(cr, self, ids, val, 1, context=context)
4072 for id, value in result.items():
4074 for f in value.keys():
4075 if f in field_dict[id]:
4082 if self._columns[v]._type in ('many2one', 'one2one'):
4084 value[v] = value[v][0]
4087 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4088 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4091 cr.execute('update "' + self._table + '" set ' + \
4092 ','.join(upd0) + ' where id = %s', upd1)
4096 # uid == 1 for accessing objects having rules defined on store fields
4097 result = self._columns[f].get(cr, self, ids, f, 1, context=context)
4098 for r in result.keys():
4100 if r in field_dict.keys():
4101 if f in field_dict[r]:
4103 for id, value in result.items():
4104 if self._columns[f]._type in ('many2one', 'one2one'):
4109 cr.execute('update "' + self._table + '" set ' + \
4110 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4116 def perm_write(self, cr, user, ids, fields, context=None):
4117 raise NotImplementedError(_('This method does not exist anymore'))
4119 # TODO: ameliorer avec NULL
4120 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4121 """Computes the WHERE clause needed to implement an OpenERP domain.
4122 :param domain: the domain to compute
4124 :param active_test: whether the default filtering of records with ``active``
4125 field set to ``False`` should be applied.
4126 :return: the query expressing the given domain as provided in domain
4127 :rtype: osv.query.Query
4132 # if the object has a field named 'active', filter out all inactive
4133 # records unless they were explicitely asked for
4134 if 'active' in self._columns and (active_test and context.get('active_test', True)):
4136 active_in_args = False
4138 if a[0] == 'active':
4139 active_in_args = True
4140 if not active_in_args:
4141 domain.insert(0, ('active', '=', 1))
4143 domain = [('active', '=', 1)]
4147 e = expression.expression(domain)
4148 e.parse(cr, user, self, context)
4149 tables = e.get_tables()
4150 where_clause, where_params = e.to_sql()
4151 where_clause = where_clause and [where_clause] or []
4153 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4155 return Query(tables, where_clause, where_params)
4157 def _check_qorder(self, word):
4158 if not regex_order.match(word):
4159 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4162 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4163 """Add what's missing in ``query`` to implement all appropriate ir.rules
4164 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4166 :param query: the current query object
4168 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4170 if parent_model and child_object:
4171 # as inherited rules are being applied, we need to add the missing JOIN
4172 # to reach the parent table (if it was not JOINed yet in the query)
4173 child_object._inherits_join_add(parent_model, query)
4174 query.where_clause += added_clause
4175 query.where_clause_params += added_params
4176 for table in added_tables:
4177 if table not in query.tables:
4178 query.tables.append(table)
4182 # apply main rules on the object
4183 rule_obj = self.pool.get('ir.rule')
4184 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
4186 # apply ir.rules from the parents (through _inherits)
4187 for inherited_model in self._inherits:
4188 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
4189 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
4191 def _generate_m2o_order_by(self, order_field, query):
4193 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4194 either native m2o fields or function/related fields that are stored, including
4195 intermediate JOINs for inheritance if required.
4197 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4199 if order_field not in self._columns and order_field in self._inherit_fields:
4200 # also add missing joins for reaching the table containing the m2o field
4201 qualified_field = self._inherits_join_calc(order_field, query)
4202 order_field_column = self._inherit_fields[order_field][2]
4204 qualified_field = '"%s"."%s"' % (self._table, order_field)
4205 order_field_column = self._columns[order_field]
4207 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4208 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4209 logging.getLogger('orm.search').debug("Many2one function/related fields must be stored " \
4210 "to be used as ordering fields! Ignoring sorting for %s.%s",
4211 self._name, order_field)
4214 # figure out the applicable order_by for the m2o
4215 dest_model = self.pool.get(order_field_column._obj)
4216 m2o_order = dest_model._order
4217 if not regex_order.match(m2o_order):
4218 # _order is complex, can't use it here, so we default to _rec_name
4219 m2o_order = dest_model._rec_name
4221 # extract the field names, to be able to qualify them and add desc/asc
4223 for order_part in m2o_order.split(","):
4224 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
4225 m2o_order = m2o_order_list
4227 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4228 # as we don't want to exclude results that have NULL values for the m2o
4229 src_table, src_field = qualified_field.replace('"','').split('.', 1)
4230 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
4231 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
4232 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4235 def _generate_order_by(self, order_spec, query):
4237 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4238 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4240 :raise" except_orm in case order_spec is malformed
4242 order_by_clause = self._order
4244 order_by_elements = []
4245 self._check_qorder(order_spec)
4246 for order_part in order_spec.split(','):
4247 order_split = order_part.strip().split(' ')
4248 order_field = order_split[0].strip()
4249 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4251 if order_field == 'id':
4252 order_by_clause = '"%s"."%s"' % (self._table, order_field)
4253 elif order_field in self._columns:
4254 order_column = self._columns[order_field]
4255 if order_column._classic_read:
4256 inner_clause = '"%s"."%s"' % (self._table, order_field)
4257 elif order_column._type == 'many2one':
4258 inner_clause = self._generate_m2o_order_by(order_field, query)
4260 continue # ignore non-readable or "non-joinable" fields
4261 elif order_field in self._inherit_fields:
4262 parent_obj = self.pool.get(self._inherit_fields[order_field][0])
4263 order_column = parent_obj._columns[order_field]
4264 if order_column._classic_read:
4265 inner_clause = self._inherits_join_calc(order_field, query)
4266 elif order_column._type == 'many2one':
4267 inner_clause = self._generate_m2o_order_by(order_field, query)
4269 continue # ignore non-readable or "non-joinable" fields
4271 if isinstance(inner_clause, list):
4272 for clause in inner_clause:
4273 order_by_elements.append("%s %s" % (clause, order_direction))
4275 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4276 if order_by_elements:
4277 order_by_clause = ",".join(order_by_elements)
4279 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4281 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4283 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4284 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4285 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4286 This is ok at the security level because this method is private and not callable through XML-RPC.
4288 :param access_rights_uid: optional user ID to use when checking access rights
4289 (not for ir.rules, this is only for ir.model.access)
4293 self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
4295 query = self._where_calc(cr, user, args, context=context)
4296 self._apply_ir_rules(cr, user, query, 'read', context=context)
4297 order_by = self._generate_order_by(order, query)
4298 from_clause, where_clause, where_clause_params = query.get_sql()
4300 limit_str = limit and ' limit %d' % limit or ''
4301 offset_str = offset and ' offset %d' % offset or ''
4302 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4305 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4308 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4310 return [x[0] for x in res]
4312 # returns the different values ever entered for one field
4313 # this is used, for example, in the client when the user hits enter on
4315 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4318 if field in self._inherit_fields:
4319 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4321 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4323 def copy_data(self, cr, uid, id, default=None, context=None):
4325 Copy given record's data with all its fields values
4327 :param cr: database cursor
4328 :param user: current user id
4329 :param id: id of the record to copy
4330 :param default: field values to override in the original values of the copied record
4331 :type default: dictionary
4332 :param context: context arguments, like lang, time zone
4333 :type context: dictionary
4334 :return: dictionary containing all the field values
4340 # avoid recursion through already copied records in case of circular relationship
4341 seen_map = context.setdefault('__copy_data_seen',{})
4342 if id in seen_map.setdefault(self._name,[]):
4344 seen_map[self._name].append(id)
4348 if 'state' not in default:
4349 if 'state' in self._defaults:
4350 if callable(self._defaults['state']):
4351 default['state'] = self._defaults['state'](self, cr, uid, context)
4353 default['state'] = self._defaults['state']
4355 context_wo_lang = context.copy()
4356 if 'lang' in context:
4357 del context_wo_lang['lang']
4358 data = self.read(cr, uid, [id,], context=context_wo_lang)
4362 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4364 fields = self.fields_get(cr, uid, context=context)
4366 ftype = fields[f]['type']
4368 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
4372 data[f] = default[f]
4373 elif 'function' in fields[f]:
4375 elif ftype == 'many2one':
4377 data[f] = data[f] and data[f][0]
4380 elif ftype in ('one2many', 'one2one'):
4382 rel = self.pool.get(fields[f]['relation'])
4384 # duplicate following the order of the ids
4385 # because we'll rely on it later for copying
4386 # translations in copy_translation()!
4388 for rel_id in data[f]:
4389 # the lines are first duplicated using the wrong (old)
4390 # parent but then are reassigned to the correct one thanks
4391 # to the (0, 0, ...)
4392 d = rel.copy_data(cr, uid, rel_id, context=context)
4394 res.append((0, 0, d))
4396 elif ftype == 'many2many':
4397 data[f] = [(6, 0, data[f])]
4401 # make sure we don't break the current parent_store structure and
4402 # force a clean recompute!
4403 for parent_column in ['parent_left', 'parent_right']:
4404 data.pop(parent_column, None)
4406 for v in self._inherits:
4407 del data[self._inherits[v]]
4410 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4414 # avoid recursion through already copied records in case of circular relationship
4415 seen_map = context.setdefault('__copy_translations_seen',{})
4416 if old_id in seen_map.setdefault(self._name,[]):
4418 seen_map[self._name].append(old_id)
4420 trans_obj = self.pool.get('ir.translation')
4421 fields = self.fields_get(cr, uid, context=context)
4423 translation_records = []
4424 for field_name, field_def in fields.items():
4425 # we must recursively copy the translations for o2o and o2m
4426 if field_def['type'] in ('one2one', 'one2many'):
4427 target_obj = self.pool.get(field_def['relation'])
4428 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4429 # here we rely on the order of the ids to match the translations
4430 # as foreseen in copy_data()
4431 old_children = sorted(old_record[field_name])
4432 new_children = sorted(new_record[field_name])
4433 for (old_child, new_child) in zip(old_children, new_children):
4434 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4435 # and for translatable fields we keep them for copy
4436 elif field_def.get('translate'):
4438 if field_name in self._columns:
4439 trans_name = self._name + "," + field_name
4440 elif field_name in self._inherit_fields:
4441 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4443 trans_ids = trans_obj.search(cr, uid, [
4444 ('name', '=', trans_name),
4445 ('res_id', '=', old_id)
4447 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4449 for record in translation_records:
4451 record['res_id'] = new_id
4452 trans_obj.create(cr, uid, record, context=context)
4455 def copy(self, cr, uid, id, default=None, context=None):
4457 Duplicate record with given id updating it with default values
4459 :param cr: database cursor
4460 :param uid: current user id
4461 :param id: id of the record to copy
4462 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4463 :type default: dictionary
4464 :param context: context arguments, like lang, time zone
4465 :type context: dictionary
4471 context = context.copy()
4472 data = self.copy_data(cr, uid, id, default, context)
4473 new_id = self.create(cr, uid, data, context)
4474 self.copy_translations(cr, uid, id, new_id, context)
4477 def exists(self, cr, uid, ids, context=None):
4478 if type(ids) in (int, long):
4480 query = 'SELECT count(1) FROM "%s"' % (self._table)
4481 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4482 return cr.fetchone()[0] == len(ids)
4484 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4485 warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4486 self._name, DeprecationWarning, stacklevel=3)
4487 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4488 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4489 return self._check_recursion(cr, uid, ids, context, parent)
4491 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4493 Verifies that there is no loop in a hierarchical structure of records,
4494 by following the parent relationship using the **parent** field until a loop
4495 is detected or until a top-level record is found.
4497 :param cr: database cursor
4498 :param uid: current user id
4499 :param ids: list of ids of records to check
4500 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4501 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4505 parent = self._parent_name
4507 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
4510 for i in range(0, len(ids), cr.IN_MAX):
4511 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
4512 cr.execute(query, (tuple(sub_ids_parent),))
4513 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
4514 ids_parent = ids_parent2
4515 for i in ids_parent:
4520 def _get_xml_ids(self, cr, uid, ids, *args, **kwargs):
4521 """Find out the XML ID(s) of any database record.
4523 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
4525 :return: map of ids to the list of their fully qualified XML IDs
4526 (empty list when there's none).
4528 model_data_obj = self.pool.get('ir.model.data')
4529 data_ids = model_data_obj.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
4530 data_results = model_data_obj.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
4533 # can't use dict.fromkeys() as the list would be shared!
4535 for record in data_results:
4536 result[record['res_id']].append('%(module)s.%(name)s' % record)
4539 def get_xml_id(self, cr, uid, ids, *args, **kwargs):
4540 """Find out the XML ID of any database record, if there
4541 is one. This method works as a possible implementation
4542 for a function field, to be able to add it to any
4543 model object easily, referencing it as ``osv.osv.get_xml_id``.
4545 When multiple XML IDs exist for a record, only one
4546 of them is returned (randomly).
4548 **Synopsis**: ``get_xml_id(cr, uid, ids) -> { 'id': 'module.xml_id' }``
4550 :return: map of ids to their fully qualified XML ID,
4551 defaulting to an empty string when there's none
4552 (to be usable as a function field).
4554 results = self._get_xml_ids(cr, uid, ids)
4555 for k, v in results.items():
4562 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: