1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
23 # Object relationnal mapping to postgresql module
24 # . Hierarchical structure
25 # . Constraints consistency, validations
26 # . Object meta Data depends on its status
27 # . Optimised processing by complex query (multiple actions at once)
28 # . Default fields value
29 # . Permissions optimisation
30 # . Persistant object: DB postgresql
32 # . Multi-level caching system
33 # . 2 different inheritancies
35 # - classicals (varchar, integer, boolean, ...)
36 # - relations (one2many, many2one, many2many)
52 import openerp.netsvc as netsvc
53 from lxml import etree
54 from openerp.tools.config import config
55 from openerp.tools.translate import _
58 from query import Query
59 import openerp.tools as tools
60 from openerp.tools.safe_eval import safe_eval as eval
62 # List of etree._Element subclasses that we choose to ignore when parsing XML.
63 from openerp.tools import SKIPPED_ELEMENT_TYPES
65 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
66 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
68 # Mapping between openerp module names and their osv classes.
69 module_class_list = {}
72 def check_object_name(name):
73 """ Check if the given name is a valid openerp object name.
75 The _name attribute in osv and osv_memory object is subject to
76 some restrictions. This function returns True or False whether
77 the given name is allowed or not.
79 TODO: this is an approximation. The goal in this approximation
80 is to disallow uppercase characters (in some places, we quote
81 table/column names and in other not, which leads to this kind
84 psycopg2.ProgrammingError: relation "xxx" does not exist).
86 The same restriction should apply to both osv and osv_memory
87 objects for consistency.
90 if regex_object_name.match(name) is None:
94 def raise_on_invalid_object_name(name):
95 if not check_object_name(name):
96 msg = "The _name attribute %s is not valid." % name
97 logger = netsvc.Logger()
98 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
99 raise except_orm('ValueError', msg)
101 POSTGRES_CONFDELTYPES = {
109 def last_day_of_current_month():
110 today = datetime.date.today()
111 last_day = str(calendar.monthrange(today.year, today.month)[1])
112 return time.strftime('%Y-%m-' + last_day)
114 def intersect(la, lb):
115 return filter(lambda x: x in lb, la)
117 class except_orm(Exception):
118 def __init__(self, name, value):
121 self.args = (name, value)
123 class BrowseRecordError(Exception):
126 # Readonly python database object browser
127 class browse_null(object):
132 def __getitem__(self, name):
135 def __getattr__(self, name):
136 return None # XXX: return self ?
144 def __nonzero__(self):
147 def __unicode__(self):
152 # TODO: execute an object method on browse_record_list
154 class browse_record_list(list):
156 def __init__(self, lst, context=None):
159 super(browse_record_list, self).__init__(lst)
160 self.context = context
163 class browse_record(object):
164 logger = netsvc.Logger()
166 def __init__(self, cr, uid, id, table, cache, context=None, list_class=None, fields_process=None):
168 table : the object (inherited from orm)
169 context : dictionary with an optional context
171 if fields_process is None:
175 self._list_class = list_class or browse_record_list
180 self._table_name = self._table._name
181 self.__logger = logging.getLogger(
182 'osv.browse_record.' + self._table_name)
183 self._context = context
184 self._fields_process = fields_process
186 cache.setdefault(table._name, {})
187 self._data = cache[table._name]
189 if not (id and isinstance(id, (int, long,))):
190 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
191 # if not table.exists(cr, uid, id, context):
192 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
194 if id not in self._data:
195 self._data[id] = {'id': id}
199 def __getitem__(self, name):
203 if name not in self._data[self._id]:
204 # build the list of fields we will fetch
206 # fetch the definition of the field which was asked for
207 if name in self._table._columns:
208 col = self._table._columns[name]
209 elif name in self._table._inherit_fields:
210 col = self._table._inherit_fields[name][2]
211 elif hasattr(self._table, str(name)):
212 attr = getattr(self._table, name)
214 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
215 return lambda *args, **argv: attr(self._cr, self._uid, [self._id], *args, **argv)
219 self.logger.notifyChannel("browse_record", netsvc.LOG_WARNING,
220 "Field '%s' does not exist in object '%s': \n%s" % (
221 name, self, ''.join(traceback.format_exc())))
222 raise KeyError("Field '%s' does not exist in object '%s'" % (
225 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
227 # gen the list of "local" (ie not inherited) fields which are classic or many2one
228 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
229 # gen the list of inherited fields
230 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
231 # complete the field list with the inherited fields which are classic or many2one
232 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
233 # otherwise we fetch only that field
235 fields_to_fetch = [(name, col)]
236 ids = filter(lambda id: name not in self._data[id], self._data.keys())
238 field_names = map(lambda x: x[0], fields_to_fetch)
239 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
241 # TODO: improve this, very slow for reports
242 if self._fields_process:
243 lang = self._context.get('lang', 'en_US') or 'en_US'
244 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
246 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
247 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
249 for field_name, field_column in fields_to_fetch:
250 if field_column._type in self._fields_process:
251 for result_line in field_values:
252 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
253 if result_line[field_name]:
254 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
257 # Where did those ids come from? Perhaps old entries in ir_model_dat?
258 self.__logger.warn("No field_values found for ids %s in %s", ids, self)
259 raise KeyError('Field %s not found in %s'%(name, self))
260 # create browse records for 'remote' objects
261 for result_line in field_values:
263 for field_name, field_column in fields_to_fetch:
264 if field_column._type in ('many2one', 'one2one'):
265 if result_line[field_name]:
266 obj = self._table.pool.get(field_column._obj)
267 if isinstance(result_line[field_name], (list, tuple)):
268 value = result_line[field_name][0]
270 value = result_line[field_name]
272 # FIXME: this happen when a _inherits object
273 # overwrite a field of it parent. Need
274 # testing to be sure we got the right
275 # object and not the parent one.
276 if not isinstance(value, browse_record):
278 # In some cases the target model is not available yet, so we must ignore it,
279 # which is safe in most cases, this value will just be loaded later when needed.
280 # This situation can be caused by custom fields that connect objects with m2o without
281 # respecting module dependencies, causing relationships to be connected to soon when
282 # the target is not loaded yet.
284 new_data[field_name] = browse_record(self._cr,
285 self._uid, value, obj, self._cache,
286 context=self._context,
287 list_class=self._list_class,
288 fields_process=self._fields_process)
290 new_data[field_name] = value
292 new_data[field_name] = browse_null()
294 new_data[field_name] = browse_null()
295 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
296 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
297 elif field_column._type in ('reference'):
298 if result_line[field_name]:
299 if isinstance(result_line[field_name], browse_record):
300 new_data[field_name] = result_line[field_name]
302 ref_obj, ref_id = result_line[field_name].split(',')
303 ref_id = long(ref_id)
305 obj = self._table.pool.get(ref_obj)
306 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
308 new_data[field_name] = browse_null()
310 new_data[field_name] = browse_null()
312 new_data[field_name] = result_line[field_name]
313 self._data[result_line['id']].update(new_data)
315 if not name in self._data[self._id]:
316 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
317 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
318 "Fields to fetch: %s, Field values: %s"%(field_names, field_values))
319 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
320 "Cached: %s, Table: %s"%(self._data[self._id], self._table))
321 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
322 return self._data[self._id][name]
324 def __getattr__(self, name):
328 raise AttributeError(e)
330 def __contains__(self, name):
331 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
333 def __hasattr__(self, name):
340 return "browse_record(%s, %d)" % (self._table_name, self._id)
342 def __eq__(self, other):
343 if not isinstance(other, browse_record):
345 return (self._table_name, self._id) == (other._table_name, other._id)
347 def __ne__(self, other):
348 if not isinstance(other, browse_record):
350 return (self._table_name, self._id) != (other._table_name, other._id)
352 # we need to define __unicode__ even though we've already defined __str__
353 # because we have overridden __getattr__
354 def __unicode__(self):
355 return unicode(str(self))
358 return hash((self._table_name, self._id))
366 (type returned by postgres when the column was created, type expression to create the column)
370 fields.boolean: 'bool',
371 fields.integer: 'int4',
372 fields.integer_big: 'int8',
376 fields.datetime: 'timestamp',
377 fields.binary: 'bytea',
378 fields.many2one: 'int4',
380 if type(f) in type_dict:
381 f_type = (type_dict[type(f)], type_dict[type(f)])
382 elif isinstance(f, fields.float):
384 f_type = ('numeric', 'NUMERIC')
386 f_type = ('float8', 'DOUBLE PRECISION')
387 elif isinstance(f, (fields.char, fields.reference)):
388 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
389 elif isinstance(f, fields.selection):
390 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
391 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
392 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
395 f_size = getattr(f, 'size', None) or 16
398 f_type = ('int4', 'INTEGER')
400 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
401 elif isinstance(f, fields.function) and eval('fields.'+(f._type), globals()) in type_dict:
402 t = eval('fields.'+(f._type), globals())
403 f_type = (type_dict[t], type_dict[t])
404 elif isinstance(f, fields.function) and f._type == 'float':
406 f_type = ('numeric', 'NUMERIC')
408 f_type = ('float8', 'DOUBLE PRECISION')
409 elif isinstance(f, fields.function) and f._type == 'selection':
410 f_type = ('text', 'text')
411 elif isinstance(f, fields.function) and f._type == 'char':
412 f_type = ('varchar', 'VARCHAR(%d)' % (f.size))
414 logger = netsvc.Logger()
415 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
420 class MetaModel(type):
421 """ Metaclass for the Model.
423 This class is used as the metaclass for the Model class to discover
424 the models defined in a module (i.e. without instanciating them).
425 If the automatic discovery is not needed, it is possible to set the
426 model's _register attribute to False.
430 module_to_models = {}
432 def __init__(self, name, bases, attrs):
433 if not self._register:
434 self._register = True
435 super(MetaModel, self).__init__(name, bases, attrs)
438 module_name = self.__module__.split('.')[0]
439 if not hasattr(self, '_module'):
440 self._module = module_name
442 # Remember which models to instanciate for this module.
443 self.module_to_models.setdefault(self._module, []).append(self)
446 class orm_template(object):
447 """ Base class for OpenERP models.
449 OpenERP models are created by inheriting from this class (although
450 not directly; more specifically by inheriting from osv or
451 osv_memory). The constructor is called once, usually directly
452 after the class definition, e.g.:
458 The system will later instanciate the class once per database (on
459 which the class' module is installed).
467 _parent_name = 'parent_id'
468 _parent_store = False
469 _parent_order = False
475 # Mapping from inherits'd field name to triple (m, r, f)
476 # where m is the model from which it is inherits'd,
477 # r is the (local) field towards m,
478 # and f is the _column object itself.
484 CONCURRENCY_CHECK_FIELD = '__last_update'
485 def log(self, cr, uid, id, message, secondary=False, context=None):
486 return self.pool.get('res.log').create(cr, uid,
489 'res_model': self._name,
490 'secondary': secondary,
496 def view_init(self, cr, uid, fields_list, context=None):
497 """Override this method to do specific things when a view on the object is opened."""
500 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
501 raise NotImplementedError(_('The read_group method is not implemented on this object !'))
503 def _field_create(self, cr, context=None):
504 """ Create entries in ir_model_fields for all the model's fields.
506 If necessary, also create an entry in ir_model, and if called from the
507 modules loading scheme (by receiving 'module' in the context), also
508 create entries in ir_model_data (for the model and the fields).
510 - create an entry in ir_model (if there is not already one),
511 - create an entry in ir_model_data (if there is not already one, and if
512 'module' is in the context),
513 - update ir_model_fields with the fields found in _columns
514 (TODO there is some redundancy as _columns is updated from
515 ir_model_fields in __init__).
520 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
522 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
523 model_id = cr.fetchone()[0]
524 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
526 model_id = cr.fetchone()[0]
527 if 'module' in context:
528 name_id = 'model_'+self._name.replace('.', '_')
529 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
531 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
532 (name_id, context['module'], 'ir.model', model_id)
537 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
539 for rec in cr.dictfetchall():
540 cols[rec['name']] = rec
542 for (k, f) in self._columns.items():
544 'model_id': model_id,
547 'field_description': f.string.replace("'", " "),
549 'relation': f._obj or '',
550 'view_load': (f.view_load and 1) or 0,
551 'select_level': tools.ustr(f.select or 0),
552 'readonly': (f.readonly and 1) or 0,
553 'required': (f.required and 1) or 0,
554 'selectable': (f.selectable and 1) or 0,
555 'translate': (f.translate and 1) or 0,
556 'relation_field': (f._type=='one2many' and isinstance(f, fields.one2many)) and f._fields_id or '',
558 # When its a custom field,it does not contain f.select
559 if context.get('field_state', 'base') == 'manual':
560 if context.get('field_name', '') == k:
561 vals['select_level'] = context.get('select', '0')
562 #setting value to let the problem NOT occur next time
564 vals['select_level'] = cols[k]['select_level']
567 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
568 id = cr.fetchone()[0]
570 cr.execute("""INSERT INTO ir_model_fields (
571 id, model_id, model, name, field_description, ttype,
572 relation,view_load,state,select_level,relation_field, translate
574 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
576 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
577 vals['relation'], bool(vals['view_load']), 'base',
578 vals['select_level'], vals['relation_field'], bool(vals['translate'])
580 if 'module' in context:
581 name1 = 'field_' + self._table + '_' + k
582 cr.execute("select name from ir_model_data where name=%s", (name1,))
584 name1 = name1 + "_" + str(id)
585 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
586 (name1, context['module'], 'ir.model.fields', id)
589 for key, val in vals.items():
590 if cols[k][key] != vals[key]:
591 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
593 cr.execute("""UPDATE ir_model_fields SET
594 model_id=%s, field_description=%s, ttype=%s, relation=%s,
595 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s
597 model=%s AND name=%s""", (
598 vals['model_id'], vals['field_description'], vals['ttype'],
599 vals['relation'], bool(vals['view_load']),
600 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['model'], vals['name']
605 def _auto_init(self, cr, context=None):
606 raise_on_invalid_object_name(self._name)
607 self._field_create(cr, context=context)
609 def _auto_end(self, cr, context=None):
613 # Goal: try to apply inheritance at the instanciation level and
614 # put objects in the pool var
617 def makeInstance(cls, pool, cr, attributes):
618 """ Instanciate a given model.
620 This class method instanciates the class of some model (i.e. a class
621 deriving from osv or osv_memory). The class might be the class passed
622 in argument or, if it inherits from another class, a class constructed
623 by combining the two classes.
625 The ``attributes`` argument specifies which parent class attributes
628 TODO: the creation of the combined class is repeated at each call of
629 this method. This is probably unnecessary.
632 parent_names = getattr(cls, '_inherit', None)
634 if isinstance(parent_names, (str, unicode)):
635 name = cls._name or parent_names
636 parent_names = [parent_names]
641 raise TypeError('_name is mandatory in case of multiple inheritance')
643 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
644 parent_class = pool.get(parent_name).__class__
645 if not pool.get(parent_name):
646 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
647 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
650 new = copy.copy(getattr(pool.get(parent_name), s))
652 # Don't _inherit custom fields.
656 if hasattr(new, 'update'):
657 new.update(cls.__dict__.get(s, {}))
658 elif s=='_constraints':
659 for c in cls.__dict__.get(s, []):
661 for c2 in range(len(new)):
662 #For _constraints, we should check field and methods as well
663 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
664 or getattr(new[c2][0],'__name__', True) == \
665 getattr(c[0],'__name__', False)):
666 # If new class defines a constraint with
667 # same function name, we let it override
675 new.extend(cls.__dict__.get(s, []))
677 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
678 obj = object.__new__(cls)
679 obj.__init__(pool, cr)
683 """ Register this model.
685 This doesn't create an instance but simply register the model
686 as being part of the module where it is defined.
688 TODO make it possible to not even have to call the constructor
693 # Set the module name (e.g. base, sale, accounting, ...) on the class.
694 module = cls.__module__.split('.')[0]
695 if not hasattr(cls, '_module'):
698 # Remember which models to instanciate for this module.
699 module_class_list.setdefault(cls._module, []).append(cls)
701 # Since we don't return an instance here, the __init__
702 # method won't be called.
705 def __init__(self, pool, cr):
706 """ Initialize a model and make it part of the given registry."""
707 pool.add(self._name, self)
710 if not self._name and not hasattr(self, '_inherit'):
711 name = type(self).__name__.split('.')[0]
712 msg = "The class %s has to have a _name attribute" % name
714 logger = netsvc.Logger()
715 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
716 raise except_orm('ValueError', msg)
718 if not self._description:
719 self._description = self._name
721 self._table = self._name.replace('.', '_')
723 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
724 """Fetch records as objects allowing to use dot notation to browse fields and relations
726 :param cr: database cursor
727 :param user: current user id
728 :param select: id or list of ids
729 :param context: context arguments, like lang, time zone
730 :rtype: object or list of objects requested
733 self._list_class = list_class or browse_record_list
735 # need to accepts ints and longs because ids coming from a method
736 # launched by button in the interface have a type long...
737 if isinstance(select, (int, long)):
738 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
739 elif isinstance(select, list):
740 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
744 def __export_row(self, cr, uid, row, fields, context=None):
748 def check_type(field_type):
749 if field_type == 'float':
751 elif field_type == 'integer':
753 elif field_type == 'boolean':
757 def selection_field(in_field):
758 col_obj = self.pool.get(in_field.keys()[0])
759 if f[i] in col_obj._columns.keys():
760 return col_obj._columns[f[i]]
761 elif f[i] in col_obj._inherits.keys():
762 selection_field(col_obj._inherits)
767 data = map(lambda x: '', range(len(fields)))
769 for fpos in range(len(fields)):
778 model_data = self.pool.get('ir.model.data')
779 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
781 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
783 r = '%s.%s' % (d['module'], d['name'])
790 # To display external name of selection field when its exported
792 if f[i] in self._columns.keys():
793 cols = self._columns[f[i]]
794 elif f[i] in self._inherit_fields.keys():
795 cols = selection_field(self._inherits)
796 if cols and cols._type == 'selection':
797 sel_list = cols.selection
798 if r and type(sel_list) == type([]):
799 r = [x[1] for x in sel_list if r==x[0]]
800 r = r and r[0] or False
802 if f[i] in self._columns:
803 r = check_type(self._columns[f[i]]._type)
804 elif f[i] in self._inherit_fields:
805 r = check_type(self._inherit_fields[f[i]][2]._type)
806 data[fpos] = r or False
808 if isinstance(r, (browse_record_list, list)):
810 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
813 if [x for x in fields2 if x]:
817 lines2 = self.__export_row(cr, uid, row2, fields2,
820 for fpos2 in range(len(fields)):
821 if lines2 and lines2[0][fpos2]:
822 data[fpos2] = lines2[0][fpos2]
826 name_relation = self.pool.get(rr._table_name)._rec_name
827 if isinstance(rr[name_relation], browse_record):
828 rr = rr[name_relation]
829 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
830 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
831 dt += tools.ustr(rr_name or '') + ','
841 if isinstance(r, browse_record):
842 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
843 r = r and r[0] and r[0][1] or ''
844 data[fpos] = tools.ustr(r or '')
845 return [data] + lines
847 def export_data(self, cr, uid, ids, fields_to_export, context=None):
849 Export fields for selected objects
851 :param cr: database cursor
852 :param uid: current user id
853 :param ids: list of ids
854 :param fields_to_export: list of fields
855 :param context: context arguments, like lang, time zone
856 :rtype: dictionary with a *datas* matrix
858 This method is used when exporting data via client menu
863 cols = self._columns.copy()
864 for f in self._inherit_fields:
865 cols.update({f: self._inherit_fields[f][2]})
867 if x=='.id': return [x]
868 return x.replace(':id','/id').replace('.id','/.id').split('/')
869 fields_to_export = map(fsplit, fields_to_export)
871 for row in self.browse(cr, uid, ids, context):
872 datas += self.__export_row(cr, uid, row, fields_to_export, context)
873 return {'datas': datas}
875 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
877 Import given data in given module
879 :param cr: database cursor
880 :param uid: current user id
881 :param fields: list of fields
882 :param data: data to import
883 :param mode: 'init' or 'update' for record creation
884 :param current_module: module name
885 :param noupdate: flag for record creation
886 :param context: context arguments, like lang, time zone,
887 :param filename: optional file to store partial import state for recovery
890 This method is used when importing data via client menu.
892 Example of fields to import for a sale.order::
895 partner_id, (=name_search)
896 order_line/.id, (=database_id)
898 order_line/product_id/id, (=xml id)
899 order_line/price_unit,
900 order_line/product_uom_qty,
901 order_line/product_uom/id (=xml_id)
905 def _replace_field(x):
906 x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
907 return x.replace(':id','/id').split('/')
908 fields = map(_replace_field, fields)
909 logger = netsvc.Logger()
910 ir_model_data_obj = self.pool.get('ir.model.data')
912 # mode: id (XML id) or .id (database id) or False for name_get
913 def _get_id(model_name, id, current_module=False, mode='id'):
916 obj_model = self.pool.get(model_name)
917 ids = obj_model.search(cr, uid, [('id', '=', int(id))])
919 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, id))
922 module, xml_id = id.rsplit('.', 1)
924 module, xml_id = current_module, id
925 record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
926 ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id'])
927 if not ir_model_data:
928 raise ValueError('No references to %s.%s' % (module, xml_id))
929 id = ir_model_data[0]['res_id']
931 obj_model = self.pool.get(model_name)
932 ids = obj_model.name_search(cr, uid, id, operator='=', context=context)
934 raise ValueError('No record found for %s' % (id,))
939 # datas: a list of records, each record is defined by a list of values
940 # prefix: a list of prefix fields ['line_ids']
941 # position: the line to process, skip is False if it's the first line of the current record
943 # (res, position, warning, res_id) with
944 # res: the record for the next line to process (including it's one2many)
945 # position: the new position for the next line
946 # res_id: the ID of the record if it's a modification
947 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0, skip=0):
948 line = datas[position]
956 for i in range(len(fields)):
959 raise Exception(_('Please check that all your lines have %d columns.'
960 'Stopped around line %d having %d columns.') % \
961 (len(fields), position+2, len(line)))
966 if field[:len(prefix)] <> prefix:
971 # ID of the record using a XML ID
972 if field[len(prefix)]=='id':
974 data_res_id = _get_id(model_name, line[i], current_module, 'id')
975 except ValueError, e:
980 # ID of the record using a database ID
981 elif field[len(prefix)]=='.id':
982 data_res_id = _get_id(model_name, line[i], current_module, '.id')
985 # recursive call for getting children and returning [(0,0,{})] or [(1,ID,{})]
986 if fields_def[field[len(prefix)]]['type']=='one2many':
987 if field[len(prefix)] in done:
989 done[field[len(prefix)]] = True
990 relation_obj = self.pool.get(fields_def[field[len(prefix)]]['relation'])
991 newfd = relation_obj.fields_get( cr, uid, context=context )
995 while pos < len(datas):
996 res2 = process_liness(self, datas, prefix + [field[len(prefix)]], current_module, relation_obj._name, newfd, pos, first)
999 (newrow, pos, w2, data_res_id2, xml_id2) = res2
1000 nbrmax = max(nbrmax, pos)
1003 if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
1005 res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
1007 elif fields_def[field[len(prefix)]]['type']=='many2one':
1008 relation = fields_def[field[len(prefix)]]['relation']
1009 if len(field) == len(prefix)+1:
1012 mode = field[len(prefix)+1]
1013 res = _get_id(relation, line[i], current_module, mode)
1015 elif fields_def[field[len(prefix)]]['type']=='many2many':
1016 relation = fields_def[field[len(prefix)]]['relation']
1017 if len(field) == len(prefix)+1:
1020 mode = field[len(prefix)+1]
1022 # TODO: improve this by using csv.csv_reader
1024 for db_id in line[i].split(config.get('csv_internal_sep')):
1025 res.append( _get_id(relation, db_id, current_module, mode) )
1028 elif fields_def[field[len(prefix)]]['type'] == 'integer':
1029 res = line[i] and int(line[i]) or 0
1030 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
1031 res = line[i].lower() not in ('0', 'false', 'off')
1032 elif fields_def[field[len(prefix)]]['type'] == 'float':
1033 res = line[i] and float(line[i]) or 0.0
1034 elif fields_def[field[len(prefix)]]['type'] == 'selection':
1035 for key, val in fields_def[field[len(prefix)]]['selection']:
1036 if tools.ustr(line[i]) in [tools.ustr(key), tools.ustr(val)]:
1039 if line[i] and not res:
1040 logger.notifyChannel("import", netsvc.LOG_WARNING,
1041 _("key '%s' not found in selection field '%s'") % \
1042 (tools.ustr(line[i]), tools.ustr(field[len(prefix)])))
1043 warning += [_("Key/value '%s' not found in selection field '%s'") % (tools.ustr(line[i]), tools.ustr(field[len(prefix)]))]
1048 row[field[len(prefix)]] = res or False
1050 result = (row, nbrmax, warning, data_res_id, xml_id)
1053 fields_def = self.fields_get(cr, uid, context=context)
1055 if config.get('import_partial', False) and filename:
1056 data = pickle.load(file(config.get('import_partial')))
1059 while position<len(datas):
1062 (res, position, warning, res_id, xml_id) = \
1063 process_liness(self, datas, [], current_module, self._name, fields_def, position=position)
1066 return (-1, res, 'Line ' + str(position) +' : ' + '!\n'.join(warning), '')
1069 ir_model_data_obj._update(cr, uid, self._name,
1070 current_module, res, mode=mode, xml_id=xml_id,
1071 noupdate=noupdate, res_id=res_id, context=context)
1072 except Exception, e:
1073 return (-1, res, 'Line ' + str(position) +' : ' + str(e), '')
1075 if config.get('import_partial', False) and filename and (not (position%100)):
1076 data = pickle.load(file(config.get('import_partial')))
1077 data[filename] = position
1078 pickle.dump(data, file(config.get('import_partial'), 'wb'))
1079 if context.get('defer_parent_store_computation'):
1080 self._parent_store_compute(cr)
1083 if context.get('defer_parent_store_computation'):
1084 self._parent_store_compute(cr)
1085 return (position, 0, 0, 0)
1087 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
1089 Read records with given ids with the given fields
1091 :param cr: database cursor
1092 :param user: current user id
1093 :param ids: id or list of the ids of the records to read
1094 :param fields: optional list of field names to return (default: all fields would be returned)
1095 :type fields: list (example ['field_name_1', ...])
1096 :param context: optional context dictionary - it may contains keys for specifying certain options
1097 like ``context_lang``, ``context_tz`` to alter the results of the call.
1098 A special ``bin_size`` boolean flag may also be passed in the context to request the
1099 value of all fields.binary columns to be returned as the size of the binary instead of its
1100 contents. This can also be selectively overriden by passing a field-specific flag
1101 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1102 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1103 :return: list of dictionaries((dictionary per record asked)) with requested field values
1104 :rtype: [{‘name_of_the_field’: value, ...}, ...]
1105 :raise AccessError: * if user has no read rights on the requested object
1106 * if user tries to bypass access rules for read on the requested object
1109 raise NotImplementedError(_('The read method is not implemented on this object !'))
1111 def get_invalid_fields(self, cr, uid):
1112 return list(self._invalids)
1114 def _validate(self, cr, uid, ids, context=None):
1115 context = context or {}
1116 lng = context.get('lang', False) or 'en_US'
1117 trans = self.pool.get('ir.translation')
1119 for constraint in self._constraints:
1120 fun, msg, fields = constraint
1121 if not fun(self, cr, uid, ids):
1122 # Check presence of __call__ directly instead of using
1123 # callable() because it will be deprecated as of Python 3.0
1124 if hasattr(msg, '__call__'):
1125 tmp_msg = msg(self, cr, uid, ids, context=context)
1126 if isinstance(tmp_msg, tuple):
1127 tmp_msg, params = tmp_msg
1128 translated_msg = tmp_msg % params
1130 translated_msg = tmp_msg
1132 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
1134 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1136 self._invalids.update(fields)
1139 raise except_orm('ValidateError', '\n'.join(error_msgs))
1141 self._invalids.clear()
1143 def default_get(self, cr, uid, fields_list, context=None):
1145 Returns default values for the fields in fields_list.
1147 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1148 :type fields_list: list
1149 :param context: optional context dictionary - it may contains keys for specifying certain options
1150 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1151 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1152 or override a default value for a field.
1153 A special ``bin_size`` boolean flag may also be passed in the context to request the
1154 value of all fields.binary columns to be returned as the size of the binary instead of its
1155 contents. This can also be selectively overriden by passing a field-specific flag
1156 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1157 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1158 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1160 # trigger view init hook
1161 self.view_init(cr, uid, fields_list, context)
1167 # get the default values for the inherited fields
1168 for t in self._inherits.keys():
1169 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1172 # get the default values defined in the object
1173 for f in fields_list:
1174 if f in self._defaults:
1175 if callable(self._defaults[f]):
1176 defaults[f] = self._defaults[f](self, cr, uid, context)
1178 defaults[f] = self._defaults[f]
1180 fld_def = ((f in self._columns) and self._columns[f]) \
1181 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1184 if isinstance(fld_def, fields.property):
1185 property_obj = self.pool.get('ir.property')
1186 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1188 if isinstance(prop_value, (browse_record, browse_null)):
1189 defaults[f] = prop_value.id
1191 defaults[f] = prop_value
1193 if f not in defaults:
1196 # get the default values set by the user and override the default
1197 # values defined in the object
1198 ir_values_obj = self.pool.get('ir.values')
1199 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1200 for id, field, field_value in res:
1201 if field in fields_list:
1202 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1203 if fld_def._type in ('many2one', 'one2one'):
1204 obj = self.pool.get(fld_def._obj)
1205 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1207 if fld_def._type in ('many2many'):
1208 obj = self.pool.get(fld_def._obj)
1210 for i in range(len(field_value)):
1211 if not obj.search(cr, uid, [('id', '=',
1214 field_value2.append(field_value[i])
1215 field_value = field_value2
1216 if fld_def._type in ('one2many'):
1217 obj = self.pool.get(fld_def._obj)
1219 for i in range(len(field_value)):
1220 field_value2.append({})
1221 for field2 in field_value[i]:
1222 if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'):
1223 obj2 = self.pool.get(obj._columns[field2]._obj)
1224 if not obj2.search(cr, uid,
1225 [('id', '=', field_value[i][field2])]):
1227 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type in ('many2one', 'one2one'):
1228 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1229 if not obj2.search(cr, uid,
1230 [('id', '=', field_value[i][field2])]):
1232 # TODO add test for many2many and one2many
1233 field_value2[i][field2] = field_value[i][field2]
1234 field_value = field_value2
1235 defaults[field] = field_value
1237 # get the default values from the context
1238 for key in context or {}:
1239 if key.startswith('default_') and (key[8:] in fields_list):
1240 defaults[key[8:]] = context[key]
1244 def perm_read(self, cr, user, ids, context=None, details=True):
1245 raise NotImplementedError(_('The perm_read method is not implemented on this object !'))
1247 def unlink(self, cr, uid, ids, context=None):
1248 raise NotImplementedError(_('The unlink method is not implemented on this object !'))
1250 def write(self, cr, user, ids, vals, context=None):
1251 raise NotImplementedError(_('The write method is not implemented on this object !'))
1253 def create(self, cr, user, vals, context=None):
1254 raise NotImplementedError(_('The create method is not implemented on this object !'))
1256 def fields_get_keys(self, cr, user, context=None):
1257 res = self._columns.keys()
1258 # TODO I believe this loop can be replace by
1259 # res.extend(self._inherit_fields.key())
1260 for parent in self._inherits:
1261 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1265 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
1266 """ Returns the definition of each field.
1268 The returned value is a dictionary (indiced by field name) of
1269 dictionaries. The _inherits'd fields are included. The string,
1270 help, and selection (if present) attributes are translated.
1278 translation_obj = self.pool.get('ir.translation')
1279 for parent in self._inherits:
1280 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
1282 for f, field in self._columns.iteritems():
1283 if allfields and f not in allfields:
1286 res[f] = fields.field_to_dict(self, cr, user, context, field)
1288 if not write_access:
1289 res[f]['readonly'] = True
1290 res[f]['states'] = {}
1292 if hasattr(res[f], 'string'):
1293 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
1295 res[f]['string'] = res_trans
1296 if hasattr(res[f], 'help'):
1297 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
1299 res[f]['help'] = help_trans
1300 if hasattr(res[f], 'selection'):
1301 if isinstance(field.selection, (tuple, list)):
1302 sel = field.selection
1304 for key, val in sel:
1307 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
1308 sel2.append((key, val2 or val))
1309 res[f]['selection'] = sel2
1315 # Overload this method if you need a window title which depends on the context
1317 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1320 def __view_look_dom(self, cr, user, node, view_id, context=None):
1328 if isinstance(s, unicode):
1329 return s.encode('utf8')
1332 # return True if node can be displayed to current user
1333 def check_group(node):
1334 if node.get('groups'):
1335 groups = node.get('groups').split(',')
1336 access_pool = self.pool.get('ir.model.access')
1337 can_see = any(access_pool.check_groups(cr, user, group) for group in groups)
1339 node.set('invisible', '1')
1340 if 'attrs' in node.attrib:
1341 del(node.attrib['attrs']) #avoid making field visible later
1342 del(node.attrib['groups'])
1347 if node.tag in ('field', 'node', 'arrow'):
1348 if node.get('object'):
1353 if f.tag in ('field'):
1354 xml += etree.tostring(f, encoding="utf-8")
1356 new_xml = etree.fromstring(encode(xml))
1357 ctx = context.copy()
1358 ctx['base_model_name'] = self._name
1359 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1364 attrs = {'views': views}
1366 if node.get('name'):
1369 if node.get('name') in self._columns:
1370 column = self._columns[node.get('name')]
1372 column = self._inherit_fields[node.get('name')][2]
1377 relation = self.pool.get(column._obj)
1382 if f.tag in ('form', 'tree', 'graph'):
1384 ctx = context.copy()
1385 ctx['base_model_name'] = self._name
1386 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1387 views[str(f.tag)] = {
1391 attrs = {'views': views}
1392 if node.get('widget') and node.get('widget') == 'selection':
1393 # Prepare the cached selection list for the client. This needs to be
1394 # done even when the field is invisible to the current user, because
1395 # other events could need to change its value to any of the selectable ones
1396 # (such as on_change events, refreshes, etc.)
1398 # If domain and context are strings, we keep them for client-side, otherwise
1399 # we evaluate them server-side to consider them when generating the list of
1401 # TODO: find a way to remove this hack, by allow dynamic domains
1403 if column._domain and not isinstance(column._domain, basestring):
1404 dom = column._domain
1405 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1406 search_context = dict(context)
1407 if column._context and not isinstance(column._context, basestring):
1408 search_context.update(column._context)
1409 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1410 if (node.get('required') and not int(node.get('required'))) or not column.required:
1411 attrs['selection'].append((False, ''))
1412 fields[node.get('name')] = attrs
1414 elif node.tag in ('form', 'tree'):
1415 result = self.view_header_get(cr, user, False, node.tag, context)
1417 node.set('string', result)
1419 elif node.tag == 'calendar':
1420 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1421 if node.get(additional_field):
1422 fields[node.get(additional_field)] = {}
1424 if 'groups' in node.attrib:
1428 if ('lang' in context) and not result:
1429 if node.get('string'):
1430 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1431 if trans == node.get('string') and ('base_model_name' in context):
1432 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1433 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1434 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1436 node.set('string', trans)
1437 if node.get('confirm'):
1438 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm'))
1440 node.set('confirm', trans)
1442 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
1444 node.set('sum', trans)
1445 if node.get('help'):
1446 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('help'))
1448 node.set('help', trans)
1451 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1452 fields.update(self.__view_look_dom(cr, user, f, view_id, context))
1456 def _disable_workflow_buttons(self, cr, user, node):
1458 # admin user can always activate workflow buttons
1461 # TODO handle the case of more than one workflow for a model or multiple
1462 # transitions with different groups and same signal
1463 usersobj = self.pool.get('res.users')
1464 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1465 for button in buttons:
1466 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1467 cr.execute("""SELECT DISTINCT t.group_id
1469 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1470 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1473 AND t.group_id is NOT NULL
1474 """, (self._name, button.get('name')))
1475 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1476 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1477 button.set('readonly', str(int(not can_click)))
1480 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1481 fields_def = self.__view_look_dom(cr, user, node, view_id, context=context)
1482 node = self._disable_workflow_buttons(cr, user, node)
1483 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1485 if node.tag == 'diagram':
1486 if node.getchildren()[0].tag == 'node':
1487 node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, fields_def.keys(), context)
1488 if node.getchildren()[1].tag == 'arrow':
1489 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, fields_def.keys(), context)
1490 for key, value in node_fields.items():
1492 for key, value in arrow_fields.items():
1495 fields = self.fields_get(cr, user, fields_def.keys(), context)
1496 for field in fields_def:
1498 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1499 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1500 elif field in fields:
1501 fields[field].update(fields_def[field])
1503 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1504 res = cr.fetchall()[:]
1506 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1507 msg = "\n * ".join([r[0] for r in res])
1508 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1509 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1510 raise except_orm('View error', msg)
1513 def __get_default_calendar_view(self):
1514 """Generate a default calendar view (For internal use only).
1517 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
1518 '<calendar string="%s"') % (self._description)
1520 if (self._date_name not in self._columns):
1522 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
1523 if dt in self._columns:
1524 self._date_name = dt
1529 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
1532 arch += ' date_start="%s"' % (self._date_name)
1534 for color in ["user_id", "partner_id", "x_user_id", "x_partner_id"]:
1535 if color in self._columns:
1536 arch += ' color="' + color + '"'
1539 dt_stop_flag = False
1541 for dt_stop in ["date_stop", "date_end", "x_date_stop", "x_date_end"]:
1542 if dt_stop in self._columns:
1543 arch += ' date_stop="' + dt_stop + '"'
1547 if not dt_stop_flag:
1548 for dt_delay in ["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"]:
1549 if dt_delay in self._columns:
1550 arch += ' date_delay="' + dt_delay + '"'
1554 ' <field name="%s"/>\n'
1555 '</calendar>') % (self._rec_name)
1559 def __get_default_search_view(self, cr, uid, context=None):
1560 form_view = self.fields_view_get(cr, uid, False, 'form', context=context)
1561 tree_view = self.fields_view_get(cr, uid, False, 'tree', context=context)
1563 fields_to_search = set()
1564 fields = self.fields_get(cr, uid, context=context)
1565 for field in fields:
1566 if fields[field].get('select'):
1567 fields_to_search.add(field)
1568 for view in (form_view, tree_view):
1569 view_root = etree.fromstring(view['arch'])
1570 # Only care about select=1 in xpath below, because select=2 is covered
1571 # by the custom advanced search in clients
1572 fields_to_search = fields_to_search.union(view_root.xpath("//field[@select=1]/@name"))
1574 tree_view_root = view_root # as provided by loop above
1575 search_view = etree.Element("search", attrib={'string': tree_view_root.get("string", "")})
1576 field_group = etree.Element("group")
1577 search_view.append(field_group)
1579 for field_name in fields_to_search:
1580 field_group.append(etree.Element("field", attrib={'name': field_name}))
1582 return etree.tostring(search_view, encoding="utf-8").replace('\t', '')
1585 # if view_id, view_type is not required
1587 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1589 Get the detailed composition of the requested view like fields, model, view architecture
1591 :param cr: database cursor
1592 :param user: current user id
1593 :param view_id: id of the view or None
1594 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
1595 :param context: context arguments, like lang, time zone
1596 :param toolbar: true to include contextual actions
1597 :param submenu: example (portal_project module)
1598 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
1599 :raise AttributeError:
1600 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
1601 * if some tag other than 'position' is found in parent view
1602 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
1609 if isinstance(s, unicode):
1610 return s.encode('utf8')
1613 def raise_view_error(error_msg, child_view_id):
1614 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
1615 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
1616 % (child_view.xml_id, self._name, error_msg))
1618 def _inherit_apply(src, inherit, inherit_id=None):
1619 def _find(node, node2):
1620 if node2.tag == 'xpath':
1621 res = node.xpath(node2.get('expr'))
1627 for n in node.getiterator(node2.tag):
1629 if node2.tag == 'field':
1630 # only compare field names, a field can be only once in a given view
1631 # at a given level (and for multilevel expressions, we should use xpath
1632 # inheritance spec anyway)
1633 if node2.get('name') == n.get('name'):
1637 for attr in node2.attrib:
1638 if attr == 'position':
1641 if n.get(attr) == node2.get(attr):
1648 # End: _find(node, node2)
1650 doc_dest = etree.fromstring(encode(inherit))
1651 toparse = [doc_dest]
1654 node2 = toparse.pop(0)
1655 if isinstance(node2, SKIPPED_ELEMENT_TYPES):
1657 if node2.tag == 'data':
1658 toparse += [ c for c in doc_dest ]
1660 node = _find(src, node2)
1661 if node is not None:
1663 if node2.get('position'):
1664 pos = node2.get('position')
1665 if pos == 'replace':
1666 parent = node.getparent()
1668 src = copy.deepcopy(node2[0])
1671 node.addprevious(child)
1672 node.getparent().remove(node)
1673 elif pos == 'attributes':
1674 for child in node2.getiterator('attribute'):
1675 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
1677 node.set(attribute[0], attribute[1])
1679 del(node.attrib[attribute[0]])
1681 sib = node.getnext()
1685 elif pos == 'after':
1690 sib.addprevious(child)
1691 elif pos == 'before':
1692 node.addprevious(child)
1694 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
1697 ' %s="%s"' % (attr, node2.get(attr))
1698 for attr in node2.attrib
1699 if attr != 'position'
1701 tag = "<%s%s>" % (node2.tag, attrs)
1702 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
1704 # End: _inherit_apply(src, inherit)
1706 result = {'type': view_type, 'model': self._name}
1710 parent_view_model = None
1712 view_ref = context.get(view_type + '_view_ref', False)
1713 if view_ref and not view_id:
1715 module, view_ref = view_ref.split('.', 1)
1716 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
1717 view_ref_res = cr.fetchone()
1719 view_id = view_ref_res[0]
1722 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
1724 WHERE id=%s""", (view_id,))
1726 cr.execute('''SELECT
1727 arch,name,field_parent,id,type,inherit_id,model
1734 ORDER BY priority''', (self._name, view_type))
1735 sql_res = cr.fetchone()
1741 view_id = ok or sql_res[3]
1742 parent_view_model = sql_res[6]
1744 # if a view was found
1746 result['type'] = sql_res[4]
1747 result['view_id'] = sql_res[3]
1748 result['arch'] = sql_res[0]
1750 def _inherit_apply_rec(result, inherit_id):
1751 # get all views which inherit from (ie modify) this view
1752 cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
1753 sql_inherit = cr.fetchall()
1754 for (inherit, id) in sql_inherit:
1755 result = _inherit_apply(result, inherit, id)
1756 result = _inherit_apply_rec(result, id)
1759 inherit_result = etree.fromstring(encode(result['arch']))
1760 result['arch'] = _inherit_apply_rec(inherit_result, sql_res[3])
1762 result['name'] = sql_res[1]
1763 result['field_parent'] = sql_res[2] or False
1766 # otherwise, build some kind of default view
1767 if view_type == 'form':
1768 res = self.fields_get(cr, user, context=context)
1769 xml = '<?xml version="1.0" encoding="utf-8"?> ' \
1770 '<form string="%s">' % (self._description,)
1772 if res[x]['type'] not in ('one2many', 'many2many'):
1773 xml += '<field name="%s"/>' % (x,)
1774 if res[x]['type'] == 'text':
1778 elif view_type == 'tree':
1779 _rec_name = self._rec_name
1780 if _rec_name not in self._columns:
1781 _rec_name = self._columns.keys()[0]
1782 xml = '<?xml version="1.0" encoding="utf-8"?>' \
1783 '<tree string="%s"><field name="%s"/></tree>' \
1784 % (self._description, _rec_name)
1786 elif view_type == 'calendar':
1787 xml = self.__get_default_calendar_view()
1789 elif view_type == 'search':
1790 xml = self.__get_default_search_view(cr, user, context)
1793 xml = '<?xml version="1.0"?>' # what happens here, graph case?
1794 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
1795 result['arch'] = etree.fromstring(encode(xml))
1796 result['name'] = 'default'
1797 result['field_parent'] = False
1798 result['view_id'] = 0
1800 if parent_view_model != self._name:
1801 ctx = context.copy()
1802 ctx['base_model_name'] = parent_view_model
1805 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
1806 result['arch'] = xarch
1807 result['fields'] = xfields
1810 if context and context.get('active_id', False):
1811 data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
1813 act_id = data_menu.id
1815 data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
1816 result['submenu'] = getattr(data_action, 'menus', False)
1820 for key in ('report_sxw_content', 'report_rml_content',
1821 'report_sxw', 'report_rml',
1822 'report_sxw_content_data', 'report_rml_content_data'):
1826 ir_values_obj = self.pool.get('ir.values')
1827 resprint = ir_values_obj.get(cr, user, 'action',
1828 'client_print_multi', [(self._name, False)], False,
1830 resaction = ir_values_obj.get(cr, user, 'action',
1831 'client_action_multi', [(self._name, False)], False,
1834 resrelate = ir_values_obj.get(cr, user, 'action',
1835 'client_action_relate', [(self._name, False)], False,
1837 resprint = map(clean, resprint)
1838 resaction = map(clean, resaction)
1839 resaction = filter(lambda x: not x.get('multi', False), resaction)
1840 resprint = filter(lambda x: not x.get('multi', False), resprint)
1841 resrelate = map(lambda x: x[2], resrelate)
1843 for x in resprint + resaction + resrelate:
1844 x['string'] = x['name']
1846 result['toolbar'] = {
1848 'action': resaction,
1853 _view_look_dom_arch = __view_look_dom_arch
1855 def search_count(self, cr, user, args, context=None):
1858 res = self.search(cr, user, args, context=context, count=True)
1859 if isinstance(res, list):
1863 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
1865 Search for records based on a search domain.
1867 :param cr: database cursor
1868 :param user: current user id
1869 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
1870 :param offset: optional number of results to skip in the returned values (default: 0)
1871 :param limit: optional max number of records to return (default: **None**)
1872 :param order: optional columns to sort by (default: self._order=id )
1873 :param context: optional context arguments, like lang, time zone
1874 :type context: dictionary
1875 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
1876 :return: id or list of ids of records matching the criteria
1877 :rtype: integer or list of integers
1878 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
1880 **Expressing a search domain (args)**
1882 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
1884 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
1885 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
1886 The semantics of most of these operators are obvious.
1887 The ``child_of`` operator will look for records who are children or grand-children of a given record,
1888 according to the semantics of this model (i.e following the relationship field named by
1889 ``self._parent_name``, by default ``parent_id``.
1890 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
1892 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
1893 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
1894 Be very careful about this when you combine them the first time.
1896 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
1898 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
1900 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
1902 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
1905 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
1907 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
1909 Private implementation of search() method, allowing specifying the uid to use for the access right check.
1910 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
1911 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
1913 :param access_rights_uid: optional user ID to use when checking access rights
1914 (not for ir.rules, this is only for ir.model.access)
1916 raise NotImplementedError(_('The search method is not implemented on this object !'))
1918 def name_get(self, cr, user, ids, context=None):
1921 :param cr: database cursor
1922 :param user: current user id
1924 :param ids: list of ids
1925 :param context: context arguments, like lang, time zone
1926 :type context: dictionary
1927 :return: tuples with the text representation of requested objects for to-many relationships
1934 if isinstance(ids, (int, long)):
1936 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
1937 [self._rec_name], context, load='_classic_write')]
1939 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
1941 Search for records and their display names according to a search domain.
1943 :param cr: database cursor
1944 :param user: current user id
1945 :param name: object name to search
1946 :param args: list of tuples specifying search criteria [('field_name', 'operator', 'value'), ...]
1947 :param operator: operator for search criterion
1948 :param context: context arguments, like lang, time zone
1949 :type context: dictionary
1950 :param limit: optional max number of records to return
1951 :return: list of object names matching the search criteria, used to provide completion for to-many relationships
1953 This method is equivalent of :py:meth:`~osv.osv.osv.search` on **name** + :py:meth:`~osv.osv.osv.name_get` on the result.
1954 See :py:meth:`~osv.osv.osv.search` for an explanation of the possible values for the search domain specified in **args**.
1957 return self._name_search(cr, user, name, args, operator, context, limit)
1959 def name_create(self, cr, uid, name, context=None):
1961 Creates a new record by calling :py:meth:`~osv.osv.osv.create` with only one
1962 value provided: the name of the new record (``_rec_name`` field).
1963 The new record will also be initialized with any default values applicable
1964 to this model, or provided through the context. The usual behavior of
1965 :py:meth:`~osv.osv.osv.create` applies.
1966 Similarly, this method may raise an exception if the model has multiple
1967 required fields and some do not have default values.
1969 :param name: name of the record to create
1971 :return: the :py:meth:`~osv.osv.osv.name_get` value for the newly-created record.
1973 rec_id = self.create(cr, uid, {self._rec_name: name}, context);
1974 return self.name_get(cr, uid, [rec_id], context)[0]
1976 # private implementation of name_search, allows passing a dedicated user for the name_get part to
1977 # solve some access rights issues
1978 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
1985 args += [(self._rec_name, operator, name)]
1986 access_rights_uid = name_get_uid or user
1987 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
1988 res = self.name_get(cr, access_rights_uid, ids, context)
1991 def copy(self, cr, uid, id, default=None, context=None):
1992 raise NotImplementedError(_('The copy method is not implemented on this object !'))
1994 def exists(self, cr, uid, id, context=None):
1995 raise NotImplementedError(_('The exists method is not implemented on this object !'))
1997 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2000 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
2002 fields = self._columns.keys() + self._inherit_fields.keys()
2003 #FIXME: collect all calls to _get_source into one SQL call.
2005 res[lang] = {'code': lang}
2007 if f in self._columns:
2008 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2010 res[lang][f] = res_trans
2012 res[lang][f] = self._columns[f].string
2013 for table in self._inherits:
2014 cols = intersect(self._inherit_fields.keys(), fields)
2015 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2018 res[lang]['code'] = lang
2019 for f in res2[lang]:
2020 res[lang][f] = res2[lang][f]
2023 def write_string(self, cr, uid, id, langs, vals, context=None):
2024 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
2025 #FIXME: try to only call the translation in one SQL
2028 if field in self._columns:
2029 src = self._columns[field].string
2030 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2031 for table in self._inherits:
2032 cols = intersect(self._inherit_fields.keys(), vals)
2034 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2037 def _check_removed_columns(self, cr, log=False):
2038 raise NotImplementedError()
2040 def _add_missing_default_values(self, cr, uid, values, context=None):
2041 missing_defaults = []
2042 avoid_tables = [] # avoid overriding inherited values when parent is set
2043 for tables, parent_field in self._inherits.items():
2044 if parent_field in values:
2045 avoid_tables.append(tables)
2046 for field in self._columns.keys():
2047 if not field in values:
2048 missing_defaults.append(field)
2049 for field in self._inherit_fields.keys():
2050 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2051 missing_defaults.append(field)
2053 if len(missing_defaults):
2054 # override defaults with the provided values, never allow the other way around
2055 defaults = self.default_get(cr, uid, missing_defaults, context)
2057 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2058 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2059 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2060 defaults[dv] = [(6, 0, defaults[dv])]
2061 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2062 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2063 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2064 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2065 defaults.update(values)
2069 class orm_memory(orm_template):
2071 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
2072 _max_count = config.get('osv_memory_count_limit')
2073 _max_hours = config.get('osv_memory_age_limit')
2077 def createInstance(cls, pool, cr):
2078 return cls.makeInstance(pool, cr, ['_columns', '_defaults'])
2080 def __init__(self, pool, cr):
2081 super(orm_memory, self).__init__(pool, cr)
2085 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
2087 def _check_access(self, uid, object_id, mode):
2088 if uid != 1 and self.datas[object_id]['internal.create_uid'] != uid:
2089 raise except_orm(_('AccessError'), '%s access is only allowed on your own records for osv_memory objects except for the super-user' % mode.capitalize())
2091 def vaccum(self, cr, uid, force=False):
2092 """Run the vaccuum cleaning system, expiring and removing old records from the
2093 virtual osv_memory tables if the "max count" or "max age" conditions are enabled
2094 and have been reached. This method can be called very often (e.g. everytime a record
2095 is created), but will only actually trigger the cleanup process once out of
2096 "_check_time" times (by default once out of 20 calls)."""
2098 if (not force) and (self.check_id % self._check_time):
2102 # Age-based expiration
2104 max = time.time() - self._max_hours * 60 * 60
2105 for k,v in self.datas.iteritems():
2106 if v['internal.date_access'] < max:
2108 self.unlink(cr, 1, tounlink)
2110 # Count-based expiration
2111 if self._max_count and len(self.datas) > self._max_count:
2112 # sort by access time to remove only the first/oldest ones in LRU fashion
2113 records = self.datas.items()
2114 records.sort(key=lambda x:x[1]['internal.date_access'])
2115 self.unlink(cr, 1, [x[0] for x in records[:len(self.datas)-self._max_count]])
2119 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
2122 if not fields_to_read:
2123 fields_to_read = self._columns.keys()
2127 if isinstance(ids, (int, long)):
2131 for f in fields_to_read:
2132 record = self.datas.get(id)
2134 self._check_access(user, id, 'read')
2135 r[f] = record.get(f, False)
2136 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
2139 if id in self.datas:
2140 self.datas[id]['internal.date_access'] = time.time()
2141 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
2142 for f in fields_post:
2143 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
2144 for record in result:
2145 record[f] = res2[record['id']]
2146 if isinstance(ids_orig, (int, long)):
2150 def write(self, cr, user, ids, vals, context=None):
2156 if self._columns[field]._classic_write:
2157 vals2[field] = vals[field]
2159 upd_todo.append(field)
2160 for object_id in ids:
2161 self._check_access(user, object_id, mode='write')
2162 self.datas[object_id].update(vals2)
2163 self.datas[object_id]['internal.date_access'] = time.time()
2164 for field in upd_todo:
2165 self._columns[field].set_memory(cr, self, object_id, field, vals[field], user, context)
2166 self._validate(cr, user, [object_id], context)
2167 wf_service = netsvc.LocalService("workflow")
2168 wf_service.trg_write(user, self._name, object_id, cr)
2171 def create(self, cr, user, vals, context=None):
2172 self.vaccum(cr, user)
2174 id_new = self.next_id
2176 vals = self._add_missing_default_values(cr, user, vals, context)
2181 if self._columns[field]._classic_write:
2182 vals2[field] = vals[field]
2184 upd_todo.append(field)
2185 self.datas[id_new] = vals2
2186 self.datas[id_new]['internal.date_access'] = time.time()
2187 self.datas[id_new]['internal.create_uid'] = user
2189 for field in upd_todo:
2190 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
2191 self._validate(cr, user, [id_new], context)
2192 if self._log_create and not (context and context.get('no_store_function', False)):
2193 message = self._description + \
2195 self.name_get(cr, user, [id_new], context=context)[0][1] + \
2197 self.log(cr, user, id_new, message, True, context=context)
2198 wf_service = netsvc.LocalService("workflow")
2199 wf_service.trg_create(user, self._name, id_new, cr)
2202 def _where_calc(self, cr, user, args, active_test=True, context=None):
2207 # if the object has a field named 'active', filter out all inactive
2208 # records unless they were explicitely asked for
2209 if 'active' in self._columns and (active_test and context.get('active_test', True)):
2211 active_in_args = False
2213 if a[0] == 'active':
2214 active_in_args = True
2215 if not active_in_args:
2216 args.insert(0, ('active', '=', 1))
2218 args = [('active', '=', 1)]
2221 e = expression.expression(args)
2222 e.parse(cr, user, self, context)
2226 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
2230 # implicit filter on current user except for superuser
2234 args.insert(0, ('internal.create_uid', '=', user))
2236 result = self._where_calc(cr, user, args, context=context)
2238 return self.datas.keys()
2242 #Find the value of dict
2245 for id, data in self.datas.items():
2246 counter = counter + 1
2248 if limit and (counter > int(limit)):
2253 val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
2254 elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
2255 val = eval('data[arg[0]]'+arg[1] +' arg[2]', locals())
2256 elif arg[1] in ['ilike']:
2257 val = (str(data[arg[0]]).find(str(arg[2]))!=-1)
2267 def unlink(self, cr, uid, ids, context=None):
2269 self._check_access(uid, id, 'unlink')
2270 self.datas.pop(id, None)
2272 cr.execute('delete from wkf_instance where res_type=%s and res_id IN %s', (self._name, tuple(ids)))
2275 def perm_read(self, cr, user, ids, context=None, details=True):
2277 credentials = self.pool.get('res.users').name_get(cr, user, [user])[0]
2278 create_date = time.strftime('%Y-%m-%d %H:%M:%S')
2280 self._check_access(user, id, 'read')
2282 'create_uid': credentials,
2283 'create_date': create_date,
2285 'write_date': False,
2291 def _check_removed_columns(self, cr, log=False):
2292 # nothing to check in memory...
2295 def exists(self, cr, uid, id, context=None):
2296 return id in self.datas
2298 class orm(orm_template):
2299 _sql_constraints = []
2301 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
2302 __logger = logging.getLogger('orm')
2303 __schema = logging.getLogger('orm.schema')
2304 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2306 Get the list of records in list view grouped by the given ``groupby`` fields
2308 :param cr: database cursor
2309 :param uid: current user id
2310 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2311 :param list fields: list of fields present in the list view specified on the object
2312 :param list groupby: fields by which the records will be grouped
2313 :param int offset: optional number of records to skip
2314 :param int limit: optional max number of records to return
2315 :param dict context: context arguments, like lang, time zone
2316 :param order: optional ``order by`` specification, for overriding the natural
2317 sort ordering of the groups, see also :py:meth:`~osv.osv.osv.search`
2318 (supported only for many2one fields currently)
2319 :return: list of dictionaries(one dictionary for each record) containing:
2321 * the values of fields grouped by the fields in ``groupby`` argument
2322 * __domain: list of tuples specifying the search criteria
2323 * __context: dictionary with argument like ``groupby``
2324 :rtype: [{'field_name_1': value, ...]
2325 :raise AccessError: * if user has no read rights on the requested object
2326 * if user tries to bypass access rules for read on the requested object
2329 context = context or {}
2330 self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
2332 fields = self._columns.keys()
2334 query = self._where_calc(cr, uid, domain, context=context)
2335 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2337 # Take care of adding join(s) if groupby is an '_inherits'ed field
2338 groupby_list = groupby
2339 qualified_groupby_field = groupby
2341 if isinstance(groupby, list):
2342 groupby = groupby[0]
2343 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2346 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2347 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2348 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2350 fget = self.fields_get(cr, uid, fields)
2351 float_int_fields = filter(lambda x: fget[x]['type'] in ('float', 'integer'), fields)
2353 group_count = group_by = groupby
2355 if fget.get(groupby):
2356 if fget[groupby]['type'] in ('date', 'datetime'):
2357 flist = "to_char(%s,'yyyy-mm') as %s " % (qualified_groupby_field, groupby)
2358 groupby = "to_char(%s,'yyyy-mm')" % (qualified_groupby_field)
2359 qualified_groupby_field = groupby
2361 flist = qualified_groupby_field
2363 # Don't allow arbitrary values, as this would be a SQL injection vector!
2364 raise except_orm(_('Invalid group_by'),
2365 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2368 fields_pre = [f for f in float_int_fields if
2369 f == self.CONCURRENCY_CHECK_FIELD
2370 or (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2371 for f in fields_pre:
2372 if f not in ['id', 'sequence']:
2373 group_operator = fget[f].get('group_operator', 'sum')
2376 qualified_field = '"%s"."%s"' % (self._table, f)
2377 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2379 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2381 from_clause, where_clause, where_clause_params = query.get_sql()
2382 where_clause = where_clause and ' WHERE ' + where_clause
2383 limit_str = limit and ' limit %d' % limit or ''
2384 offset_str = offset and ' offset %d' % offset or ''
2385 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2387 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2390 for r in cr.dictfetchall():
2391 for fld, val in r.items():
2392 if val == None: r[fld] = False
2393 alldata[r['id']] = r
2396 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=orderby or groupby, context=context)
2397 # the IDS of records that have groupby field value = False or '' should be sorted too
2398 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2399 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2400 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2401 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2405 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2406 if not isinstance(groupby_list, (str, unicode)):
2407 if groupby or not context.get('group_by_no_leaf', False):
2408 d['__context'] = {'group_by': groupby_list[1:]}
2409 if groupby and groupby in fget:
2410 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2411 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2412 days = calendar.monthrange(dt.year, dt.month)[1]
2414 d[groupby] = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d').strftime('%B %Y')
2415 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2416 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2417 del alldata[d['id']][groupby]
2418 d.update(alldata[d['id']])
2422 def _inherits_join_add(self, parent_model_name, query):
2424 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2426 :param parent_model_name: name of the parent model for which the clauses should be added
2427 :param query: query object on which the JOIN should be added
2429 inherits_field = self._inherits[parent_model_name]
2430 parent_model = self.pool.get(parent_model_name)
2431 parent_table_name = parent_model._table
2432 quoted_parent_table_name = '"%s"' % parent_table_name
2433 if quoted_parent_table_name not in query.tables:
2434 query.tables.append(quoted_parent_table_name)
2435 query.where_clause.append('("%s".%s = %s.id)' % (self._table, inherits_field, parent_table_name))
2437 def _inherits_join_calc(self, field, query):
2439 Adds missing table select and join clause(s) to ``query`` for reaching
2440 the field coming from an '_inherits' parent table (no duplicates).
2442 :param field: name of inherited field to reach
2443 :param query: query object on which the JOIN should be added
2444 :return: qualified name of field, to be used in SELECT clause
2446 current_table = self
2447 while field in current_table._inherit_fields and not field in current_table._columns:
2448 parent_model_name = current_table._inherit_fields[field][0]
2449 parent_table = self.pool.get(parent_model_name)
2450 self._inherits_join_add(parent_model_name, query)
2451 current_table = parent_table
2452 return '"%s".%s' % (current_table._table, field)
2454 def _parent_store_compute(self, cr):
2455 if not self._parent_store:
2457 logger = netsvc.Logger()
2458 logger.notifyChannel('data', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
2459 def browse_rec(root, pos=0):
2461 where = self._parent_name+'='+str(root)
2463 where = self._parent_name+' IS NULL'
2464 if self._parent_order:
2465 where += ' order by '+self._parent_order
2466 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2468 for id in cr.fetchall():
2469 pos2 = browse_rec(id[0], pos2)
2470 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2472 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2473 if self._parent_order:
2474 query += ' order by ' + self._parent_order
2477 for (root,) in cr.fetchall():
2478 pos = browse_rec(root, pos)
2481 def _update_store(self, cr, f, k):
2482 logger = netsvc.Logger()
2483 logger.notifyChannel('data', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
2484 ss = self._columns[k]._symbol_set
2485 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2486 cr.execute('select id from '+self._table)
2487 ids_lst = map(lambda x: x[0], cr.fetchall())
2490 ids_lst = ids_lst[40:]
2491 res = f.get(cr, self, iids, k, 1, {})
2492 for key, val in res.items():
2495 # if val is a many2one, just write the ID
2496 if type(val) == tuple:
2498 if (val<>False) or (type(val)<>bool):
2499 cr.execute(update_query, (ss[1](val), key))
2501 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2502 """Raise except_orm if value is not among the valid values for the selection field"""
2503 if self._columns[field]._type == 'reference':
2504 val_model, val_id_str = value.split(',', 1)
2507 val_id = long(val_id_str)
2511 raise except_orm(_('ValidateError'),
2512 _('Invalid value for reference field "%s" (last part must be a non-zero integer): "%s"') % (field, value))
2516 if isinstance(self._columns[field].selection, (tuple, list)):
2517 if val in dict(self._columns[field].selection):
2519 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2521 raise except_orm(_('ValidateError'),
2522 _('The value "%s" for the field "%s" is not in the selection') % (value, field))
2524 def _check_removed_columns(self, cr, log=False):
2525 # iterate on the database columns to drop the NOT NULL constraints
2526 # of fields which were required but have been removed (or will be added by another module)
2527 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2528 columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
2529 cr.execute("SELECT a.attname, a.attnotnull"
2530 " FROM pg_class c, pg_attribute a"
2531 " WHERE c.relname=%s"
2532 " AND c.oid=a.attrelid"
2533 " AND a.attisdropped=%s"
2534 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2535 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2537 for column in cr.dictfetchall():
2539 self.__logger.debug("column %s is in the table %s but not in the corresponding object %s",
2540 column['attname'], self._table, self._name)
2541 if column['attnotnull']:
2542 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2543 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2544 self._table, column['attname'])
2546 def _auto_init(self, cr, context=None):
2549 Call _field_create and, unless _auto is False:
2551 - create the corresponding table in database for the model,
2552 - possibly add the parent columns in database,
2553 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2554 'write_date' in database if _log_access is True (the default),
2555 - report on database columns no more existing in _columns,
2556 - remove no more existing not null constraints,
2557 - alter existing database columns to match _columns,
2558 - create database tables to match _columns,
2559 - add database indices to match _columns,
2560 - save in self._foreign_keys a list a foreign keys to create (see
2564 self._foreign_keys = []
2565 raise_on_invalid_object_name(self._name)
2568 store_compute = False
2570 update_custom_fields = context.get('update_custom_fields', False)
2571 self._field_create(cr, context=context)
2572 create = not self._table_exist(cr)
2574 if getattr(self, '_auto', True):
2577 self._create_table(cr)
2580 if self._parent_store:
2581 if not self._parent_columns_exist(cr):
2582 self._create_parent_columns(cr)
2583 store_compute = True
2585 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2586 if self._log_access:
2587 self._add_log_columns(cr)
2589 self._check_removed_columns(cr, log=False)
2591 # iterate on the "object columns"
2592 column_data = self._select_column_data(cr)
2594 for k, f in self._columns.iteritems():
2595 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
2597 # Don't update custom (also called manual) fields
2598 if f.manual and not update_custom_fields:
2601 if isinstance(f, fields.one2many):
2602 self._o2m_raise_on_missing_reference(cr, f)
2604 elif isinstance(f, fields.many2many):
2605 self._m2m_raise_or_create_relation(cr, f)
2608 res = column_data.get(k)
2610 # The field is not found as-is in database, try if it
2611 # exists with an old name.
2612 if not res and hasattr(f, 'oldname'):
2613 res = column_data.get(f.oldname)
2615 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
2617 column_data[k] = res
2618 self.__schema.debug("Table '%s': renamed column '%s' to '%s'",
2619 self._table, f.oldname, k)
2621 # The field already exists in database. Possibly
2622 # change its type, rename it, drop it or change its
2625 f_pg_type = res['typname']
2626 f_pg_size = res['size']
2627 f_pg_notnull = res['attnotnull']
2628 if isinstance(f, fields.function) and not f.store and\
2629 not getattr(f, 'nodrop', False):
2630 self.__logger.info('column %s (%s) in table %s removed: converted to a function !\n',
2631 k, f.string, self._table)
2632 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
2634 self.__schema.debug("Table '%s': dropped column '%s' with cascade",
2638 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
2643 ('text', 'char', 'VARCHAR(%d)' % (f.size or 0,), '::VARCHAR(%d)'%(f.size or 0,)),
2644 ('varchar', 'text', 'TEXT', ''),
2645 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2646 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
2647 ('timestamp', 'date', 'date', '::date'),
2648 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2649 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2651 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
2652 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2653 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
2654 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size))
2655 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2657 self.__schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
2658 self._table, k, f_pg_size, f.size)
2660 if (f_pg_type==c[0]) and (f._type==c[1]):
2661 if f_pg_type != f_obj_type:
2663 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2664 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
2665 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
2666 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2668 self.__schema.debug("Table '%s': column '%s' changed type from %s to %s",
2669 self._table, k, c[0], c[1])
2672 if f_pg_type != f_obj_type:
2676 newname = k + '_moved' + str(i)
2677 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
2678 "WHERE c.relname=%s " \
2679 "AND a.attname=%s " \
2680 "AND c.oid=a.attrelid ", (self._table, newname))
2681 if not cr.fetchone()[0]:
2685 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2686 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
2687 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2688 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2689 self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
2690 self._table, k, f_pg_type, f._type, newname)
2692 # if the field is required and hasn't got a NOT NULL constraint
2693 if f.required and f_pg_notnull == 0:
2694 # set the field to the default value if any
2695 if k in self._defaults:
2696 if callable(self._defaults[k]):
2697 default = self._defaults[k](self, cr, 1, context)
2699 default = self._defaults[k]
2701 if (default is not None):
2702 ss = self._columns[k]._symbol_set
2703 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
2704 cr.execute(query, (ss[1](default),))
2705 # add the NOT NULL constraint
2708 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2710 self.__schema.debug("Table '%s': column '%s': added NOT NULL constraint",
2713 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
2714 "If you want to have it, you should update the records and execute manually:\n"\
2715 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2716 self.__schema.warn(msg, self._table, k, self._table, k)
2718 elif not f.required and f_pg_notnull == 1:
2719 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2721 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2724 indexname = '%s_%s_index' % (self._table, k)
2725 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
2726 res2 = cr.dictfetchall()
2727 if not res2 and f.select:
2728 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2730 if f._type == 'text':
2731 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
2732 msg = "Table '%s': Adding (b-tree) index for text column '%s'."\
2733 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
2734 " because there is a length limit for indexable btree values!\n"\
2735 "Use a search view instead if you simply want to make the field searchable."
2736 self.__schema.warn(msg, self._table, k, f._type)
2737 if res2 and not f.select:
2738 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
2740 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
2741 self.__schema.debug(msg, self._table, k, f._type)
2743 if isinstance(f, fields.many2one):
2744 ref = self.pool.get(f._obj)._table
2745 if ref != 'ir_actions':
2746 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
2747 'pg_attribute as att1, pg_attribute as att2 '
2748 'WHERE con.conrelid = cl1.oid '
2749 'AND cl1.relname = %s '
2750 'AND con.confrelid = cl2.oid '
2751 'AND cl2.relname = %s '
2752 'AND array_lower(con.conkey, 1) = 1 '
2753 'AND con.conkey[1] = att1.attnum '
2754 'AND att1.attrelid = cl1.oid '
2755 'AND att1.attname = %s '
2756 'AND array_lower(con.confkey, 1) = 1 '
2757 'AND con.confkey[1] = att2.attnum '
2758 'AND att2.attrelid = cl2.oid '
2759 'AND att2.attname = %s '
2760 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
2761 res2 = cr.dictfetchall()
2763 if res2[0]['confdeltype'] != POSTGRES_CONFDELTYPES.get(f.ondelete.upper(), 'a'):
2764 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"')
2765 self._foreign_keys.append((self._table, k, ref, f.ondelete))
2767 self.__schema.debug("Table '%s': column '%s': XXX",
2770 # The field doesn't exist in database. Create it if necessary.
2772 if not isinstance(f, fields.function) or f.store:
2773 # add the missing field
2774 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2775 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2776 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2777 self._table, k, get_pg_type(f)[1])
2780 if not create and k in self._defaults:
2781 if callable(self._defaults[k]):
2782 default = self._defaults[k](self, cr, 1, context)
2784 default = self._defaults[k]
2786 ss = self._columns[k]._symbol_set
2787 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
2788 cr.execute(query, (ss[1](default),))
2790 netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
2792 # remember the functions to call for the stored fields
2793 if isinstance(f, fields.function):
2795 if f.store is not True: # i.e. if f.store is a dict
2796 order = f.store[f.store.keys()[0]][2]
2797 todo_end.append((order, self._update_store, (f, k)))
2799 # and add constraints if needed
2800 if isinstance(f, fields.many2one):
2801 if not self.pool.get(f._obj):
2802 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2803 ref = self.pool.get(f._obj)._table
2804 # ir_actions is inherited so foreign key doesn't work on it
2805 if ref != 'ir_actions':
2806 self._foreign_keys.append((self._table, k, ref, f.ondelete))
2807 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2808 self._table, k, ref, f.ondelete)
2810 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2814 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2815 self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
2818 msg = "WARNING: unable to set column %s of table %s not null !\n"\
2819 "Try to re-run: openerp-server --update=module\n"\
2820 "If it doesn't work, update records and execute manually:\n"\
2821 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2822 self.__logger.warn(msg, k, self._table, self._table, k)
2826 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2827 create = not bool(cr.fetchone())
2829 cr.commit() # start a new transaction
2831 self._add_sql_constraints(cr)
2834 self._execute_sql(cr)
2837 self._parent_store_compute(cr)
2843 def _auto_end(self, cr, context=None):
2844 """ Create the foreign keys recorded by _auto_init. """
2845 for t, k, r, d in self._foreign_keys:
2846 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
2848 del self._foreign_keys
2851 def _table_exist(self, cr):
2852 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2856 def _create_table(self, cr):
2857 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
2858 cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''")))
2859 self.__schema.debug("Table '%s': created", self._table)
2862 def _parent_columns_exist(self, cr):
2863 cr.execute("""SELECT c.relname
2864 FROM pg_class c, pg_attribute a
2865 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2866 """, (self._table, 'parent_left'))
2870 def _create_parent_columns(self, cr):
2871 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
2872 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
2873 if 'parent_left' not in self._columns:
2874 self.__logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
2876 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2877 self._table, 'parent_left', 'INTEGER')
2878 elif not self._columns['parent_left'].select:
2879 self.__logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
2881 if 'parent_right' not in self._columns:
2882 self.__logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
2884 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2885 self._table, 'parent_right', 'INTEGER')
2886 elif not self._columns['parent_right'].select:
2887 self.__logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
2889 if self._columns[self._parent_name].ondelete != 'cascade':
2890 self.__logger.error("The column %s on object %s must be set as ondelete='cascade'",
2891 self._parent_name, self._name)
2896 def _add_log_columns(self, cr):
2898 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2899 'create_date': 'TIMESTAMP',
2900 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2901 'write_date': 'TIMESTAMP'
2906 FROM pg_class c, pg_attribute a
2907 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2908 """, (self._table, k))
2910 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
2912 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2913 self._table, k, logs[k])
2916 def _select_column_data(self, cr):
2917 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2918 "FROM pg_class c,pg_attribute a,pg_type t " \
2919 "WHERE c.relname=%s " \
2920 "AND c.oid=a.attrelid " \
2921 "AND a.atttypid=t.oid", (self._table,))
2922 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
2925 def _o2m_raise_on_missing_reference(self, cr, f):
2926 # TODO this check should be a method on fields.one2many.
2927 other = self.pool.get(f._obj)
2929 # TODO the condition could use fields_get_keys().
2930 if f._fields_id not in other._columns.keys():
2931 if f._fields_id not in other._inherit_fields.keys():
2932 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
2935 def _m2m_raise_or_create_relation(self, cr, f):
2936 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (f._rel,))
2937 if not cr.dictfetchall():
2938 if not self.pool.get(f._obj):
2939 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2940 ref = self.pool.get(f._obj)._table
2941 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s")) WITH OIDS' % (f._rel, f._id1, f._id2, f._id1, f._id2))
2942 self._foreign_keys.append((f._rel, f._id1, self._table, 'CASCADE'))
2943 self._foreign_keys.append((f._rel, f._id2, ref, 'CASCADE'))
2944 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
2945 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
2946 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref))
2948 self.__schema.debug("Create table '%s': relation between '%s' and '%s'",
2949 f._rel, self._table, ref)
2952 def _add_sql_constraints(self, cr):
2955 Modify this model's database table constraints so they match the one in
2959 for (key, con, _) in self._sql_constraints:
2960 conname = '%s_%s' % (self._table, key)
2962 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
2963 existing_constraints = cr.dictfetchall()
2968 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
2969 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
2970 self._table, conname, con),
2971 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
2976 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
2977 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
2978 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
2984 if not existing_constraints:
2985 # constraint does not exists:
2986 sql_actions['add']['execute'] = True
2987 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
2988 elif con.lower() not in [item['condef'].lower() for item in existing_constraints]:
2989 # constraint exists but its definition has changed:
2990 sql_actions['drop']['execute'] = True
2991 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
2992 sql_actions['add']['execute'] = True
2993 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
2995 # we need to add the constraint:
2996 sql_actions = [item for item in sql_actions.values()]
2997 sql_actions.sort(key=lambda x: x['order'])
2998 for sql_action in [action for action in sql_actions if action['execute']]:
3000 cr.execute(sql_action['query'])
3002 self.__schema.debug(sql_action['msg_ok'])
3004 self.__schema.warn(sql_action['msg_err'])
3008 def _execute_sql(self, cr):
3009 """ Execute the SQL code from the _sql attribute (if any)."""
3010 if hasattr(self, "_sql"):
3011 for line in self._sql.split(';'):
3012 line2 = line.replace('\n', '').strip()
3019 def createInstance(cls, pool, cr):
3020 return cls.makeInstance(pool, cr, ['_columns', '_defaults',
3021 '_inherits', '_constraints', '_sql_constraints'])
3023 def __init__(self, pool, cr):
3026 - copy the stored fields' functions in the osv_pool,
3027 - update the _columns with the fields found in ir_model_fields,
3028 - ensure there is a many2one for each _inherits'd parent,
3029 - update the children's _columns,
3030 - give a chance to each field to initialize itself.
3033 super(orm, self).__init__(pool, cr)
3035 if not hasattr(self, '_log_access'):
3036 # if not access is not specify, it is the same value as _auto
3037 self._log_access = getattr(self, "_auto", True)
3039 self._columns = self._columns.copy()
3040 for store_field in self._columns:
3041 f = self._columns[store_field]
3042 if hasattr(f, 'digits_change'):
3044 if not isinstance(f, fields.function):
3048 if self._columns[store_field].store is True:
3049 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
3051 sm = self._columns[store_field].store
3052 for object, aa in sm.items():
3054 (fnct, fields2, order, length) = aa
3056 (fnct, fields2, order) = aa
3059 raise except_orm('Error',
3060 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
3061 self.pool._store_function.setdefault(object, [])
3063 for x, y, z, e, f, l in self.pool._store_function[object]:
3064 if (x==self._name) and (y==store_field) and (e==fields2):
3068 self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order, length))
3069 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
3071 for (key, _, msg) in self._sql_constraints:
3072 self.pool._sql_error[self._table+'_'+key] = msg
3074 # Load manual fields
3076 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
3078 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
3079 for field in cr.dictfetchall():
3080 if field['name'] in self._columns:
3083 'string': field['field_description'],
3084 'required': bool(field['required']),
3085 'readonly': bool(field['readonly']),
3086 'domain': eval(field['domain']) if field['domain'] else None,
3087 'size': field['size'],
3088 'ondelete': field['on_delete'],
3089 'translate': (field['translate']),
3091 #'select': int(field['select_level'])
3094 if field['ttype'] == 'selection':
3095 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
3096 elif field['ttype'] == 'reference':
3097 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
3098 elif field['ttype'] == 'many2one':
3099 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
3100 elif field['ttype'] == 'one2many':
3101 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
3102 elif field['ttype'] == 'many2many':
3103 _rel1 = field['relation'].replace('.', '_')
3104 _rel2 = field['model'].replace('.', '_')
3105 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
3106 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
3108 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
3109 self._inherits_check()
3110 self._inherits_reload()
3111 if not self._sequence:
3112 self._sequence = self._table + '_id_seq'
3113 for k in self._defaults:
3114 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
3115 for f in self._columns:
3116 self._columns[f].restart()
3118 __init__.__doc__ = orm_template.__init__.__doc__ + __init__.__doc__
3121 # Update objects that uses this one to update their _inherits fields
3124 def _inherits_reload_src(self):
3125 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3126 for obj in self.pool.models.values():
3127 if self._name in obj._inherits:
3128 obj._inherits_reload()
3130 def _inherits_reload(self):
3131 """ Recompute the _inherit_fields mapping.
3133 This will also call itself on each inherits'd child model.
3137 for table in self._inherits:
3138 other = self.pool.get(table)
3139 res.update(other._inherit_fields)
3140 for col in other._columns.keys():
3141 res[col] = (table, self._inherits[table], other._columns[col])
3142 for col in other._inherit_fields.keys():
3143 res[col] = (table, self._inherits[table], other._inherit_fields[col][2])
3144 self._inherit_fields = res
3145 self._inherits_reload_src()
3147 def _inherits_check(self):
3148 for table, field_name in self._inherits.items():
3149 if field_name not in self._columns:
3150 logging.getLogger('init').info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.' % (field_name, self._name))
3151 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3152 required=True, ondelete="cascade")
3153 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
3154 logging.getLogger('init').warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.' % (field_name, self._name))
3155 self._columns[field_name].required = True
3156 self._columns[field_name].ondelete = "cascade"
3158 #def __getattr__(self, name):
3160 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3161 # (though inherits doesn't use Python inheritance).
3162 # Handles translating between local ids and remote ids.
3163 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3164 # when you have inherits.
3166 # for model, field in self._inherits.iteritems():
3167 # proxy = self.pool.get(model)
3168 # if hasattr(proxy, name):
3169 # attribute = getattr(proxy, name)
3170 # if not hasattr(attribute, '__call__'):
3174 # return super(orm, self).__getattr__(name)
3176 # def _proxy(cr, uid, ids, *args, **kwargs):
3177 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3178 # lst = [obj[field].id for obj in objects if obj[field]]
3179 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3184 def fields_get(self, cr, user, fields=None, context=None):
3186 Get the description of list of fields
3188 :param cr: database cursor
3189 :param user: current user id
3190 :param fields: list of fields
3191 :param context: context arguments, like lang, time zone
3192 :return: dictionary of field dictionaries, each one describing a field of the business object
3193 :raise AccessError: * if user has no create/write rights on the requested object
3196 ira = self.pool.get('ir.model.access')
3197 write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
3198 ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
3199 return super(orm, self).fields_get(cr, user, fields, context, write_access)
3201 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3204 self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
3206 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
3207 if isinstance(ids, (int, long)):
3211 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3212 result = self._read_flat(cr, user, select, fields, context, load)
3215 for key, v in r.items():
3219 if isinstance(ids, (int, long, dict)):
3220 return result and result[0] or False
3223 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3228 if fields_to_read == None:
3229 fields_to_read = self._columns.keys()
3231 # Construct a clause for the security rules.
3232 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3233 # or will at least contain self._table.
3234 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3236 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3237 fields_pre = [f for f in fields_to_read if
3238 f == self.CONCURRENCY_CHECK_FIELD
3239 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3240 ] + self._inherits.values()
3244 def convert_field(f):
3245 f_qual = "%s.%s" % (self._table, f) # need fully-qualified references in case len(tables) > 1
3246 if f in ('create_date', 'write_date'):
3247 return "date_trunc('second', %s) as %s" % (f_qual, f)
3248 if f == self.CONCURRENCY_CHECK_FIELD:
3249 if self._log_access:
3250 return "COALESCE(%s.write_date, %s.create_date, now())::timestamp AS %s" % (self._table, self._table, f,)
3251 return "now()::timestamp AS %s" % (f,)
3252 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3253 return 'length(%s) as "%s"' % (f_qual, f)
3256 fields_pre2 = map(convert_field, fields_pre)
3257 order_by = self._parent_order or self._order
3258 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
3259 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3261 query += " AND " + (' OR '.join(rule_clause))
3262 query += " ORDER BY " + order_by
3263 for sub_ids in cr.split_for_in_conditions(ids):
3265 cr.execute(query, [tuple(sub_ids)] + rule_params)
3266 if cr.rowcount != len(sub_ids):
3267 raise except_orm(_('AccessError'),
3268 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: read, Document type: %s).')
3269 % (self._description,))
3271 cr.execute(query, (tuple(sub_ids),))
3272 res.extend(cr.dictfetchall())
3274 res = map(lambda x: {'id': x}, ids)
3276 for f in fields_pre:
3277 if f == self.CONCURRENCY_CHECK_FIELD:
3279 if self._columns[f].translate:
3280 ids = [x['id'] for x in res]
3281 #TODO: optimize out of this loop
3282 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3284 r[f] = res_trans.get(r['id'], False) or r[f]
3286 for table in self._inherits:
3287 col = self._inherits[table]
3288 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3291 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3299 if not record[col]: # if the record is deleted from _inherits table?
3301 record.update(res3[record[col]])
3302 if col not in fields_to_read:
3305 # all fields which need to be post-processed by a simple function (symbol_get)
3306 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3309 for f in fields_post:
3310 r[f] = self._columns[f]._symbol_get(r[f])
3311 ids = [x['id'] for x in res]
3313 # all non inherited fields for which the attribute whose name is in load is False
3314 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3316 # Compute POST fields
3318 for f in fields_post:
3319 todo.setdefault(self._columns[f]._multi, [])
3320 todo[self._columns[f]._multi].append(f)
3321 for key, val in todo.items():
3323 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3324 assert res2 is not None, \
3325 'The function field "%s" on the "%s" model returned None\n' \
3326 '(a dictionary was expected).' % (val[0], self._name)
3329 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3330 multi_fields = res2.get(record['id'],{})
3332 record[pos] = multi_fields.get(pos,[])
3335 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3338 record[f] = res2[record['id']]
3343 for field in vals.copy():
3345 if field in self._columns:
3346 fobj = self._columns[field]
3353 for group in groups:
3354 module = group.split(".")[0]
3355 grp = group.split(".")[1]
3356 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3357 (grp, module, 'res.groups', user))
3358 readonly = cr.fetchall()
3359 if readonly[0][0] >= 1:
3362 elif readonly[0][0] == 0:
3368 if type(vals[field]) == type([]):
3370 elif type(vals[field]) == type(0.0):
3372 elif type(vals[field]) == type(''):
3373 vals[field] = '=No Permission='
3378 def perm_read(self, cr, user, ids, context=None, details=True):
3380 Returns some metadata about the given records.
3382 :param details: if True, \*_uid fields are replaced with the name of the user
3383 :return: list of ownership dictionaries for each requested record
3384 :rtype: list of dictionaries with the following keys:
3387 * create_uid: user who created the record
3388 * create_date: date when the record was created
3389 * write_uid: last user who changed the record
3390 * write_date: date of the last change to the record
3391 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3398 uniq = isinstance(ids, (int, long))
3402 if self._log_access:
3403 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3404 quoted_table = '"%s"' % self._table
3405 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3406 query = '''SELECT %s, __imd.module, __imd.name
3407 FROM %s LEFT JOIN ir_model_data __imd
3408 ON (__imd.model = %%s and __imd.res_id = %s.id)
3409 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3410 cr.execute(query, (self._name, tuple(ids)))
3411 res = cr.dictfetchall()
3414 r[key] = r[key] or False
3415 if details and key in ('write_uid', 'create_uid') and r[key]:
3417 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3419 pass # Leave the numeric uid there
3420 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3421 del r['name'], r['module']
3426 def _check_concurrency(self, cr, ids, context):
3429 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3431 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
3432 for sub_ids in cr.split_for_in_conditions(ids):
3435 id_ref = "%s,%s" % (self._name, id)
3436 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3438 ids_to_check.extend([id, update_date])
3439 if not ids_to_check:
3441 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3444 # mention the first one only to keep the error message readable
3445 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3447 def check_access_rule(self, cr, uid, ids, operation, context=None):
3448 """Verifies that the operation given by ``operation`` is allowed for the user
3449 according to ir.rules.
3451 :param operation: one of ``write``, ``unlink``
3452 :raise except_orm: * if current ir.rules do not permit this operation.
3453 :return: None if the operation is allowed
3455 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3457 where_clause = ' and ' + ' and '.join(where_clause)
3458 for sub_ids in cr.split_for_in_conditions(ids):
3459 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3460 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3461 [sub_ids] + where_params)
3462 if cr.rowcount != len(sub_ids):
3463 raise except_orm(_('AccessError'),
3464 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: %s, Document type: %s).')
3465 % (operation, self._description))
3467 def unlink(self, cr, uid, ids, context=None):
3469 Delete records with given ids
3471 :param cr: database cursor
3472 :param uid: current user id
3473 :param ids: id or list of ids
3474 :param context: (optional) context arguments, like lang, time zone
3476 :raise AccessError: * if user has no unlink rights on the requested object
3477 * if user tries to bypass access rules for unlink on the requested object
3478 :raise UserError: if the record is default property for other records
3483 if isinstance(ids, (int, long)):
3486 result_store = self._store_get_values(cr, uid, ids, None, context)
3488 self._check_concurrency(cr, ids, context)
3490 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
3492 properties = self.pool.get('ir.property')
3493 domain = [('res_id', '=', False),
3494 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3496 if properties.search(cr, uid, domain, context=context):
3497 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3499 wf_service = netsvc.LocalService("workflow")
3501 wf_service.trg_delete(uid, self._name, oid, cr)
3504 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3505 pool_model_data = self.pool.get('ir.model.data')
3506 ir_values_obj = self.pool.get('ir.values')
3507 for sub_ids in cr.split_for_in_conditions(ids):
3508 cr.execute('delete from ' + self._table + ' ' \
3509 'where id IN %s', (sub_ids,))
3511 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3512 # as these are not connected with real database foreign keys, and would be dangling references.
3513 # Step 1. Calling unlink of ir_model_data only for the affected IDS.
3514 reference_ids = pool_model_data.search(cr, uid, [('res_id','in',list(sub_ids)),('model','=',self._name)], context=context)
3515 # Step 2. Marching towards the real deletion of referenced records
3517 pool_model_data.unlink(cr, uid, reference_ids, context=context)
3519 # For the same reason, removing the record relevant to ir_values
3520 ir_value_ids = ir_values_obj.search(cr, uid,
3521 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3524 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3526 for order, object, store_ids, fields in result_store:
3527 if object != self._name:
3528 obj = self.pool.get(object)
3529 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3530 rids = map(lambda x: x[0], cr.fetchall())
3532 obj._store_set_values(cr, uid, rids, fields, context)
3539 def write(self, cr, user, ids, vals, context=None):
3541 Update records with given ids with the given field values
3543 :param cr: database cursor
3544 :param user: current user id
3546 :param ids: object id or list of object ids to update according to **vals**
3547 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3548 :type vals: dictionary
3549 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3550 :type context: dictionary
3552 :raise AccessError: * if user has no write rights on the requested object
3553 * if user tries to bypass access rules for write on the requested object
3554 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3555 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3557 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3559 + For a many2many field, a list of tuples is expected.
3560 Here is the list of tuple that are accepted, with the corresponding semantics ::
3562 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3563 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3564 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3565 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3566 (4, ID) link to existing record with id = ID (adds a relationship)
3567 (5) unlink all (like using (3,ID) for all linked records)
3568 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3571 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3573 + For a one2many field, a lits of tuples is expected.
3574 Here is the list of tuple that are accepted, with the corresponding semantics ::
3576 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3577 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3578 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3581 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
3583 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
3584 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
3588 for field in vals.copy():
3590 if field in self._columns:
3591 fobj = self._columns[field]
3592 elif field in self._inherit_fields:
3593 fobj = self._inherit_fields[field][2]
3600 for group in groups:
3601 module = group.split(".")[0]
3602 grp = group.split(".")[1]
3603 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3604 (grp, module, 'res.groups', user))
3605 readonly = cr.fetchall()
3606 if readonly[0][0] >= 1:
3609 elif readonly[0][0] == 0:
3621 if isinstance(ids, (int, long)):
3624 self._check_concurrency(cr, ids, context)
3625 self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
3627 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
3629 # No direct update of parent_left/right
3630 vals.pop('parent_left', None)
3631 vals.pop('parent_right', None)
3633 parents_changed = []
3634 parent_order = self._parent_order or self._order
3635 if self._parent_store and (self._parent_name in vals):
3636 # The parent_left/right computation may take up to
3637 # 5 seconds. No need to recompute the values if the
3638 # parent is the same.
3639 # Note: to respect parent_order, nodes must be processed in
3640 # order, so ``parents_changed`` must be ordered properly.
3641 parent_val = vals[self._parent_name]
3643 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
3644 (self._table, self._parent_name, self._parent_name, parent_order)
3645 cr.execute(query, (tuple(ids), parent_val))
3647 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
3648 (self._table, self._parent_name, parent_order)
3649 cr.execute(query, (tuple(ids),))
3650 parents_changed = map(operator.itemgetter(0), cr.fetchall())
3657 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
3659 if field in self._columns:
3660 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
3661 if (not totranslate) or not self._columns[field].translate:
3662 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
3663 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
3664 direct.append(field)
3666 upd_todo.append(field)
3668 updend.append(field)
3669 if field in self._columns \
3670 and hasattr(self._columns[field], 'selection') \
3672 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3674 if self._log_access:
3675 upd0.append('write_uid=%s')
3676 upd0.append('write_date=now()')
3680 self.check_access_rule(cr, user, ids, 'write', context=context)
3681 for sub_ids in cr.split_for_in_conditions(ids):
3682 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
3683 'where id IN %s', upd1 + [sub_ids])
3684 if cr.rowcount != len(sub_ids):
3685 raise except_orm(_('AccessError'),
3686 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
3691 if self._columns[f].translate:
3692 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
3695 # Inserting value to DB
3696 self.write(cr, user, ids, {f: vals[f]})
3697 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
3700 # call the 'set' method of fields which are not classic_write
3701 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3703 # default element in context must be removed when call a one2many or many2many
3704 rel_context = context.copy()
3705 for c in context.items():
3706 if c[0].startswith('default_'):
3707 del rel_context[c[0]]
3709 for field in upd_todo:
3711 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
3713 for table in self._inherits:
3714 col = self._inherits[table]
3716 for sub_ids in cr.split_for_in_conditions(ids):
3717 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
3718 'where id IN %s', (sub_ids,))
3719 nids.extend([x[0] for x in cr.fetchall()])
3723 if self._inherit_fields[val][0] == table:
3726 self.pool.get(table).write(cr, user, nids, v, context)
3728 self._validate(cr, user, ids, context)
3730 # TODO: use _order to set dest at the right position and not first node of parent
3731 # We can't defer parent_store computation because the stored function
3732 # fields that are computer may refer (directly or indirectly) to
3733 # parent_left/right (via a child_of domain)
3736 self.pool._init_parent[self._name] = True
3738 order = self._parent_order or self._order
3739 parent_val = vals[self._parent_name]
3741 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
3743 clause, params = '%s IS NULL' % (self._parent_name,), ()
3745 for id in parents_changed:
3746 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
3747 pleft, pright = cr.fetchone()
3748 distance = pright - pleft + 1
3750 # Positions of current siblings, to locate proper insertion point;
3751 # this can _not_ be fetched outside the loop, as it needs to be refreshed
3752 # after each update, in case several nodes are sequentially inserted one
3753 # next to the other (i.e computed incrementally)
3754 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
3755 parents = cr.fetchall()
3757 # Find Position of the element
3759 for (parent_pright, parent_id) in parents:
3762 position = parent_pright + 1
3764 # It's the first node of the parent
3769 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
3770 position = cr.fetchone()[0] + 1
3772 if pleft < position <= pright:
3773 raise except_orm(_('UserError'), _('Recursivity Detected.'))
3775 if pleft < position:
3776 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3777 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3778 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
3780 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3781 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3782 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
3784 result += self._store_get_values(cr, user, ids, vals.keys(), context)
3788 for order, object, ids_to_update, fields_to_recompute in result:
3789 key = (object, tuple(fields_to_recompute))
3790 done.setdefault(key, {})
3791 # avoid to do several times the same computation
3793 for id in ids_to_update:
3794 if id not in done[key]:
3795 done[key][id] = True
3797 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
3799 wf_service = netsvc.LocalService("workflow")
3801 wf_service.trg_write(user, self._name, id, cr)
3805 # TODO: Should set perm to user.xxx
3807 def create(self, cr, user, vals, context=None):
3809 Create a new record for the model.
3811 The values for the new record are initialized using the ``vals``
3812 argument, and if necessary the result of ``default_get()``.
3814 :param cr: database cursor
3815 :param user: current user id
3817 :param vals: field values for new record, e.g {'field_name': field_value, ...}
3818 :type vals: dictionary
3819 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3820 :type context: dictionary
3821 :return: id of new record created
3822 :raise AccessError: * if user has no create rights on the requested object
3823 * if user tries to bypass access rules for create on the requested object
3824 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3825 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3827 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
3828 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
3834 self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
3836 vals = self._add_missing_default_values(cr, user, vals, context)
3839 for v in self._inherits:
3840 if self._inherits[v] not in vals:
3843 tocreate[v] = {'id': vals[self._inherits[v]]}
3844 (upd0, upd1, upd2) = ('', '', [])
3846 for v in vals.keys():
3847 if v in self._inherit_fields:
3848 (table, col, col_detail) = self._inherit_fields[v]
3849 tocreate[table][v] = vals[v]
3852 if (v not in self._inherit_fields) and (v not in self._columns):
3855 # Try-except added to filter the creation of those records whose filds are readonly.
3856 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
3858 cr.execute("SELECT nextval('"+self._sequence+"')")
3860 raise except_orm(_('UserError'),
3861 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
3863 id_new = cr.fetchone()[0]
3864 for table in tocreate:
3865 if self._inherits[table] in vals:
3866 del vals[self._inherits[table]]
3868 record_id = tocreate[table].pop('id', None)
3870 if record_id is None or not record_id:
3871 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
3873 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
3875 upd0 += ',' + self._inherits[table]
3877 upd2.append(record_id)
3879 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
3880 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
3882 for bool_field in bool_fields:
3883 if bool_field not in vals:
3884 vals[bool_field] = False
3886 for field in vals.copy():
3888 if field in self._columns:
3889 fobj = self._columns[field]
3891 fobj = self._inherit_fields[field][2]
3897 for group in groups:
3898 module = group.split(".")[0]
3899 grp = group.split(".")[1]
3900 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
3901 (grp, module, 'res.groups', user))
3902 readonly = cr.fetchall()
3903 if readonly[0][0] >= 1:
3906 elif readonly[0][0] == 0:
3914 if self._columns[field]._classic_write:
3915 upd0 = upd0 + ',"' + field + '"'
3916 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
3917 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
3919 if not isinstance(self._columns[field], fields.related):
3920 upd_todo.append(field)
3921 if field in self._columns \
3922 and hasattr(self._columns[field], 'selection') \
3924 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3925 if self._log_access:
3926 upd0 += ',create_uid,create_date'
3929 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
3930 self.check_access_rule(cr, user, [id_new], 'create', context=context)
3931 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3933 if self._parent_store and not context.get('defer_parent_store_computation'):
3935 self.pool._init_parent[self._name] = True
3937 parent = vals.get(self._parent_name, False)
3939 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
3941 result_p = cr.fetchall()
3942 for (pleft,) in result_p:
3947 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
3948 pleft_old = cr.fetchone()[0]
3951 cr.execute('select max(parent_right) from '+self._table)
3952 pleft = cr.fetchone()[0] or 0
3953 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
3954 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
3955 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
3957 # default element in context must be remove when call a one2many or many2many
3958 rel_context = context.copy()
3959 for c in context.items():
3960 if c[0].startswith('default_'):
3961 del rel_context[c[0]]
3964 for field in upd_todo:
3965 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
3966 self._validate(cr, user, [id_new], context)
3968 if not context.get('no_store_function', False):
3969 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
3972 for order, object, ids, fields2 in result:
3973 if not (object, ids, fields2) in done:
3974 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
3975 done.append((object, ids, fields2))
3977 if self._log_create and not (context and context.get('no_store_function', False)):
3978 message = self._description + \
3980 self.name_get(cr, user, [id_new], context=context)[0][1] + \
3981 "' " + _("created.")
3982 self.log(cr, user, id_new, message, True, context=context)
3983 wf_service = netsvc.LocalService("workflow")
3984 wf_service.trg_create(user, self._name, id_new, cr)
3987 def _store_get_values(self, cr, uid, ids, fields, context):
3988 """Returns an ordered list of fields.functions to call due to
3989 an update operation on ``fields`` of records with ``ids``,
3990 obtained by calling the 'store' functions of these fields,
3991 as setup by their 'store' attribute.
3993 :return: [(priority, model_name, [record_ids,], [function_fields,])]
3995 # FIXME: rewrite, cleanup, use real variable names
3996 # e.g.: http://pastie.org/1222060
3998 fncts = self.pool._store_function.get(self._name, [])
3999 for fnct in range(len(fncts)):
4004 for f in (fields or []):
4005 if f in fncts[fnct][3]:
4011 result.setdefault(fncts[fnct][0], {})
4013 # uid == 1 for accessing objects having rules defined on store fields
4014 ids2 = fncts[fnct][2](self, cr, 1, ids, context)
4015 for id in filter(None, ids2):
4016 result[fncts[fnct][0]].setdefault(id, [])
4017 result[fncts[fnct][0]][id].append(fnct)
4019 for object in result:
4021 for id, fnct in result[object].items():
4022 k2.setdefault(tuple(fnct), [])
4023 k2[tuple(fnct)].append(id)
4024 for fnct, id in k2.items():
4025 dict.setdefault(fncts[fnct[0]][4], [])
4026 dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4], object, id, map(lambda x: fncts[x][1], fnct)))
4034 def _store_set_values(self, cr, uid, ids, fields, context):
4035 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4036 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4041 if self._log_access:
4042 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4046 field_dict.setdefault(r[0], [])
4047 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4048 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4049 for i in self.pool._store_function.get(self._name, []):
4051 up_write_date = write_date + datetime.timedelta(hours=i[5])
4052 if datetime.datetime.now() < up_write_date:
4054 field_dict[r[0]].append(i[1])
4060 if self._columns[f]._multi not in keys:
4061 keys.append(self._columns[f]._multi)
4062 todo.setdefault(self._columns[f]._multi, [])
4063 todo[self._columns[f]._multi].append(f)
4067 # uid == 1 for accessing objects having rules defined on store fields
4068 result = self._columns[val[0]].get(cr, self, ids, val, 1, context=context)
4069 for id, value in result.items():
4071 for f in value.keys():
4072 if f in field_dict[id]:
4079 if self._columns[v]._type in ('many2one', 'one2one'):
4081 value[v] = value[v][0]
4084 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4085 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4088 cr.execute('update "' + self._table + '" set ' + \
4089 ','.join(upd0) + ' where id = %s', upd1)
4093 # uid == 1 for accessing objects having rules defined on store fields
4094 result = self._columns[f].get(cr, self, ids, f, 1, context=context)
4095 for r in result.keys():
4097 if r in field_dict.keys():
4098 if f in field_dict[r]:
4100 for id, value in result.items():
4101 if self._columns[f]._type in ('many2one', 'one2one'):
4106 cr.execute('update "' + self._table + '" set ' + \
4107 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4113 def perm_write(self, cr, user, ids, fields, context=None):
4114 raise NotImplementedError(_('This method does not exist anymore'))
4116 # TODO: ameliorer avec NULL
4117 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4118 """Computes the WHERE clause needed to implement an OpenERP domain.
4119 :param domain: the domain to compute
4121 :param active_test: whether the default filtering of records with ``active``
4122 field set to ``False`` should be applied.
4123 :return: the query expressing the given domain as provided in domain
4124 :rtype: osv.query.Query
4129 # if the object has a field named 'active', filter out all inactive
4130 # records unless they were explicitely asked for
4131 if 'active' in self._columns and (active_test and context.get('active_test', True)):
4133 active_in_args = False
4135 if a[0] == 'active':
4136 active_in_args = True
4137 if not active_in_args:
4138 domain.insert(0, ('active', '=', 1))
4140 domain = [('active', '=', 1)]
4144 e = expression.expression(domain)
4145 e.parse(cr, user, self, context)
4146 tables = e.get_tables()
4147 where_clause, where_params = e.to_sql()
4148 where_clause = where_clause and [where_clause] or []
4150 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4152 return Query(tables, where_clause, where_params)
4154 def _check_qorder(self, word):
4155 if not regex_order.match(word):
4156 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4159 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4160 """Add what's missing in ``query`` to implement all appropriate ir.rules
4161 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4163 :param query: the current query object
4165 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4167 if parent_model and child_object:
4168 # as inherited rules are being applied, we need to add the missing JOIN
4169 # to reach the parent table (if it was not JOINed yet in the query)
4170 child_object._inherits_join_add(parent_model, query)
4171 query.where_clause += added_clause
4172 query.where_clause_params += added_params
4173 for table in added_tables:
4174 if table not in query.tables:
4175 query.tables.append(table)
4179 # apply main rules on the object
4180 rule_obj = self.pool.get('ir.rule')
4181 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
4183 # apply ir.rules from the parents (through _inherits)
4184 for inherited_model in self._inherits:
4185 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
4186 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
4188 def _generate_m2o_order_by(self, order_field, query):
4190 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4191 either native m2o fields or function/related fields that are stored, including
4192 intermediate JOINs for inheritance if required.
4194 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4196 if order_field not in self._columns and order_field in self._inherit_fields:
4197 # also add missing joins for reaching the table containing the m2o field
4198 qualified_field = self._inherits_join_calc(order_field, query)
4199 order_field_column = self._inherit_fields[order_field][2]
4201 qualified_field = '"%s"."%s"' % (self._table, order_field)
4202 order_field_column = self._columns[order_field]
4204 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4205 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4206 logging.getLogger('orm.search').debug("Many2one function/related fields must be stored " \
4207 "to be used as ordering fields! Ignoring sorting for %s.%s",
4208 self._name, order_field)
4211 # figure out the applicable order_by for the m2o
4212 dest_model = self.pool.get(order_field_column._obj)
4213 m2o_order = dest_model._order
4214 if not regex_order.match(m2o_order):
4215 # _order is complex, can't use it here, so we default to _rec_name
4216 m2o_order = dest_model._rec_name
4218 # extract the field names, to be able to qualify them and add desc/asc
4220 for order_part in m2o_order.split(","):
4221 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
4222 m2o_order = m2o_order_list
4224 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4225 # as we don't want to exclude results that have NULL values for the m2o
4226 src_table, src_field = qualified_field.replace('"','').split('.', 1)
4227 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
4228 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
4229 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4232 def _generate_order_by(self, order_spec, query):
4234 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4235 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4237 :raise" except_orm in case order_spec is malformed
4239 order_by_clause = self._order
4241 order_by_elements = []
4242 self._check_qorder(order_spec)
4243 for order_part in order_spec.split(','):
4244 order_split = order_part.strip().split(' ')
4245 order_field = order_split[0].strip()
4246 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4248 if order_field == 'id':
4249 order_by_clause = '"%s"."%s"' % (self._table, order_field)
4250 elif order_field in self._columns:
4251 order_column = self._columns[order_field]
4252 if order_column._classic_read:
4253 inner_clause = '"%s"."%s"' % (self._table, order_field)
4254 elif order_column._type == 'many2one':
4255 inner_clause = self._generate_m2o_order_by(order_field, query)
4257 continue # ignore non-readable or "non-joinable" fields
4258 elif order_field in self._inherit_fields:
4259 parent_obj = self.pool.get(self._inherit_fields[order_field][0])
4260 order_column = parent_obj._columns[order_field]
4261 if order_column._classic_read:
4262 inner_clause = self._inherits_join_calc(order_field, query)
4263 elif order_column._type == 'many2one':
4264 inner_clause = self._generate_m2o_order_by(order_field, query)
4266 continue # ignore non-readable or "non-joinable" fields
4268 if isinstance(inner_clause, list):
4269 for clause in inner_clause:
4270 order_by_elements.append("%s %s" % (clause, order_direction))
4272 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4273 if order_by_elements:
4274 order_by_clause = ",".join(order_by_elements)
4276 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4278 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4280 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4281 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4282 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4283 This is ok at the security level because this method is private and not callable through XML-RPC.
4285 :param access_rights_uid: optional user ID to use when checking access rights
4286 (not for ir.rules, this is only for ir.model.access)
4290 self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
4292 query = self._where_calc(cr, user, args, context=context)
4293 self._apply_ir_rules(cr, user, query, 'read', context=context)
4294 order_by = self._generate_order_by(order, query)
4295 from_clause, where_clause, where_clause_params = query.get_sql()
4297 limit_str = limit and ' limit %d' % limit or ''
4298 offset_str = offset and ' offset %d' % offset or ''
4299 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4302 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4305 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4307 return [x[0] for x in res]
4309 # returns the different values ever entered for one field
4310 # this is used, for example, in the client when the user hits enter on
4312 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4315 if field in self._inherit_fields:
4316 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4318 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4320 def copy_data(self, cr, uid, id, default=None, context=None):
4322 Copy given record's data with all its fields values
4324 :param cr: database cursor
4325 :param user: current user id
4326 :param id: id of the record to copy
4327 :param default: field values to override in the original values of the copied record
4328 :type default: dictionary
4329 :param context: context arguments, like lang, time zone
4330 :type context: dictionary
4331 :return: dictionary containing all the field values
4337 # avoid recursion through already copied records in case of circular relationship
4338 seen_map = context.setdefault('__copy_data_seen',{})
4339 if id in seen_map.setdefault(self._name,[]):
4341 seen_map[self._name].append(id)
4345 if 'state' not in default:
4346 if 'state' in self._defaults:
4347 if callable(self._defaults['state']):
4348 default['state'] = self._defaults['state'](self, cr, uid, context)
4350 default['state'] = self._defaults['state']
4352 context_wo_lang = context.copy()
4353 if 'lang' in context:
4354 del context_wo_lang['lang']
4355 data = self.read(cr, uid, [id,], context=context_wo_lang)
4359 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4361 fields = self.fields_get(cr, uid, context=context)
4363 ftype = fields[f]['type']
4365 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
4369 data[f] = default[f]
4370 elif 'function' in fields[f]:
4372 elif ftype == 'many2one':
4374 data[f] = data[f] and data[f][0]
4377 elif ftype in ('one2many', 'one2one'):
4379 rel = self.pool.get(fields[f]['relation'])
4381 # duplicate following the order of the ids
4382 # because we'll rely on it later for copying
4383 # translations in copy_translation()!
4385 for rel_id in data[f]:
4386 # the lines are first duplicated using the wrong (old)
4387 # parent but then are reassigned to the correct one thanks
4388 # to the (0, 0, ...)
4389 d = rel.copy_data(cr, uid, rel_id, context=context)
4391 res.append((0, 0, d))
4393 elif ftype == 'many2many':
4394 data[f] = [(6, 0, data[f])]
4398 # make sure we don't break the current parent_store structure and
4399 # force a clean recompute!
4400 for parent_column in ['parent_left', 'parent_right']:
4401 data.pop(parent_column, None)
4403 for v in self._inherits:
4404 del data[self._inherits[v]]
4407 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4411 # avoid recursion through already copied records in case of circular relationship
4412 seen_map = context.setdefault('__copy_translations_seen',{})
4413 if old_id in seen_map.setdefault(self._name,[]):
4415 seen_map[self._name].append(old_id)
4417 trans_obj = self.pool.get('ir.translation')
4418 fields = self.fields_get(cr, uid, context=context)
4420 translation_records = []
4421 for field_name, field_def in fields.items():
4422 # we must recursively copy the translations for o2o and o2m
4423 if field_def['type'] in ('one2one', 'one2many'):
4424 target_obj = self.pool.get(field_def['relation'])
4425 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4426 # here we rely on the order of the ids to match the translations
4427 # as foreseen in copy_data()
4428 old_children = sorted(old_record[field_name])
4429 new_children = sorted(new_record[field_name])
4430 for (old_child, new_child) in zip(old_children, new_children):
4431 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4432 # and for translatable fields we keep them for copy
4433 elif field_def.get('translate'):
4435 if field_name in self._columns:
4436 trans_name = self._name + "," + field_name
4437 elif field_name in self._inherit_fields:
4438 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4440 trans_ids = trans_obj.search(cr, uid, [
4441 ('name', '=', trans_name),
4442 ('res_id', '=', old_id)
4444 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4446 for record in translation_records:
4448 record['res_id'] = new_id
4449 trans_obj.create(cr, uid, record, context=context)
4452 def copy(self, cr, uid, id, default=None, context=None):
4454 Duplicate record with given id updating it with default values
4456 :param cr: database cursor
4457 :param uid: current user id
4458 :param id: id of the record to copy
4459 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4460 :type default: dictionary
4461 :param context: context arguments, like lang, time zone
4462 :type context: dictionary
4468 context = context.copy()
4469 data = self.copy_data(cr, uid, id, default, context)
4470 new_id = self.create(cr, uid, data, context)
4471 self.copy_translations(cr, uid, id, new_id, context)
4474 def exists(self, cr, uid, ids, context=None):
4475 if type(ids) in (int, long):
4477 query = 'SELECT count(1) FROM "%s"' % (self._table)
4478 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4479 return cr.fetchone()[0] == len(ids)
4481 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4482 warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4483 self._name, DeprecationWarning, stacklevel=3)
4484 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4485 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4486 return self._check_recursion(cr, uid, ids, context, parent)
4488 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4490 Verifies that there is no loop in a hierarchical structure of records,
4491 by following the parent relationship using the **parent** field until a loop
4492 is detected or until a top-level record is found.
4494 :param cr: database cursor
4495 :param uid: current user id
4496 :param ids: list of ids of records to check
4497 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4498 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4502 parent = self._parent_name
4504 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
4507 for i in range(0, len(ids), cr.IN_MAX):
4508 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
4509 cr.execute(query, (tuple(sub_ids_parent),))
4510 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
4511 ids_parent = ids_parent2
4512 for i in ids_parent:
4517 def _get_xml_ids(self, cr, uid, ids, *args, **kwargs):
4518 """Find out the XML ID(s) of any database record.
4520 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
4522 :return: map of ids to the list of their fully qualified XML IDs
4523 (empty list when there's none).
4525 model_data_obj = self.pool.get('ir.model.data')
4526 data_ids = model_data_obj.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
4527 data_results = model_data_obj.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
4530 # can't use dict.fromkeys() as the list would be shared!
4532 for record in data_results:
4533 result[record['res_id']].append('%(module)s.%(name)s' % record)
4536 def get_xml_id(self, cr, uid, ids, *args, **kwargs):
4537 """Find out the XML ID of any database record, if there
4538 is one. This method works as a possible implementation
4539 for a function field, to be able to add it to any
4540 model object easily, referencing it as ``osv.osv.get_xml_id``.
4542 When multiple XML IDs exist for a record, only one
4543 of them is returned (randomly).
4545 **Synopsis**: ``get_xml_id(cr, uid, ids) -> { 'id': 'module.xml_id' }``
4547 :return: map of ids to their fully qualified XML ID,
4548 defaulting to an empty string when there's none
4549 (to be usable as a function field).
4551 results = self._get_xml_ids(cr, uid, ids)
4552 for k, v in results.items():
4559 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: