1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
23 # Object relationnal mapping to postgresql module
24 # . Hierarchical structure
25 # . Constraints consistency, validations
26 # . Object meta Data depends on its status
27 # . Optimised processing by complex query (multiple actions at once)
28 # . Default fields value
29 # . Permissions optimisation
30 # . Persistant object: DB postgresql
32 # . Multi-level caching system
33 # . 2 different inheritancies
35 # - classicals (varchar, integer, boolean, ...)
36 # - relations (one2many, many2one, many2many)
53 from lxml import etree
54 from tools.config import config
55 from tools.translate import _
58 from query import Query
60 from tools.safe_eval import safe_eval as eval
62 # List of etree._Element subclasses that we choose to ignore when parsing XML.
63 from tools import SKIPPED_ELEMENT_TYPES
65 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
67 POSTGRES_CONFDELTYPES = {
75 def last_day_of_current_month():
76 today = datetime.date.today()
77 last_day = str(calendar.monthrange(today.year, today.month)[1])
78 return time.strftime('%Y-%m-' + last_day)
80 def intersect(la, lb):
81 return filter(lambda x: x in lb, la)
83 class except_orm(Exception):
84 def __init__(self, name, value):
87 self.args = (name, value)
89 class BrowseRecordError(Exception):
92 # Readonly python database object browser
93 class browse_null(object):
98 def __getitem__(self, name):
101 def __getattr__(self, name):
102 return None # XXX: return self ?
110 def __nonzero__(self):
113 def __unicode__(self):
118 # TODO: execute an object method on browse_record_list
120 class browse_record_list(list):
122 def __init__(self, lst, context=None):
125 super(browse_record_list, self).__init__(lst)
126 self.context = context
129 class browse_record(object):
130 logger = netsvc.Logger()
132 def __init__(self, cr, uid, id, table, cache, context=None, list_class=None, fields_process=None):
134 table : the object (inherited from orm)
135 context : dictionary with an optional context
137 if fields_process is None:
141 self._list_class = list_class or browse_record_list
146 self._table_name = self._table._name
147 self.__logger = logging.getLogger(
148 'osv.browse_record.' + self._table_name)
149 self._context = context
150 self._fields_process = fields_process
152 cache.setdefault(table._name, {})
153 self._data = cache[table._name]
155 if not (id and isinstance(id, (int, long,))):
156 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
157 # if not table.exists(cr, uid, id, context):
158 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
160 if id not in self._data:
161 self._data[id] = {'id': id}
165 def __getitem__(self, name):
169 if name not in self._data[self._id]:
170 # build the list of fields we will fetch
172 # fetch the definition of the field which was asked for
173 if name in self._table._columns:
174 col = self._table._columns[name]
175 elif name in self._table._inherit_fields:
176 col = self._table._inherit_fields[name][2]
177 elif hasattr(self._table, str(name)):
178 attr = getattr(self._table, name)
180 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
181 return lambda *args, **argv: attr(self._cr, self._uid, [self._id], *args, **argv)
185 self.logger.notifyChannel("browse_record", netsvc.LOG_WARNING,
186 "Field '%s' does not exist in object '%s': \n%s" % (
187 name, self, ''.join(traceback.format_exc())))
188 raise KeyError("Field '%s' does not exist in object '%s'" % (
191 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
193 # gen the list of "local" (ie not inherited) fields which are classic or many2one
194 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
195 # gen the list of inherited fields
196 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
197 # complete the field list with the inherited fields which are classic or many2one
198 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
199 # otherwise we fetch only that field
201 fields_to_fetch = [(name, col)]
202 ids = filter(lambda id: name not in self._data[id], self._data.keys())
204 field_names = map(lambda x: x[0], fields_to_fetch)
205 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
207 # TODO: improve this, very slow for reports
208 if self._fields_process:
209 lang = self._context.get('lang', 'en_US') or 'en_US'
210 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
212 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
213 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
215 for field_name, field_column in fields_to_fetch:
216 if field_column._type in self._fields_process:
217 for result_line in field_values:
218 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
219 if result_line[field_name]:
220 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
223 # Where did those ids come from? Perhaps old entries in ir_model_dat?
224 self.__logger.warn("No field_values found for ids %s in %s", ids, self)
225 raise KeyError('Field %s not found in %s'%(name, self))
226 # create browse records for 'remote' objects
227 for result_line in field_values:
229 for field_name, field_column in fields_to_fetch:
230 if field_column._type in ('many2one', 'one2one'):
231 if result_line[field_name]:
232 obj = self._table.pool.get(field_column._obj)
233 if isinstance(result_line[field_name], (list, tuple)):
234 value = result_line[field_name][0]
236 value = result_line[field_name]
238 # FIXME: this happen when a _inherits object
239 # overwrite a field of it parent. Need
240 # testing to be sure we got the right
241 # object and not the parent one.
242 if not isinstance(value, browse_record):
244 # In some cases the target model is not available yet, so we must ignore it,
245 # which is safe in most cases, this value will just be loaded later when needed.
246 # This situation can be caused by custom fields that connect objects with m2o without
247 # respecting module dependencies, causing relationships to be connected to soon when
248 # the target is not loaded yet.
250 new_data[field_name] = browse_record(self._cr,
251 self._uid, value, obj, self._cache,
252 context=self._context,
253 list_class=self._list_class,
254 fields_process=self._fields_process)
256 new_data[field_name] = value
258 new_data[field_name] = browse_null()
260 new_data[field_name] = browse_null()
261 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
262 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
263 elif field_column._type in ('reference'):
264 if result_line[field_name]:
265 if isinstance(result_line[field_name], browse_record):
266 new_data[field_name] = result_line[field_name]
268 ref_obj, ref_id = result_line[field_name].split(',')
269 ref_id = long(ref_id)
271 obj = self._table.pool.get(ref_obj)
272 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
274 new_data[field_name] = browse_null()
276 new_data[field_name] = browse_null()
278 new_data[field_name] = result_line[field_name]
279 self._data[result_line['id']].update(new_data)
281 if not name in self._data[self._id]:
282 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
283 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
284 "Fields to fetch: %s, Field values: %s"%(field_names, field_values))
285 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
286 "Cached: %s, Table: %s"%(self._data[self._id], self._table))
287 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
288 return self._data[self._id][name]
290 def __getattr__(self, name):
294 raise AttributeError(e)
296 def __contains__(self, name):
297 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
299 def __hasattr__(self, name):
306 return "browse_record(%s, %d)" % (self._table_name, self._id)
308 def __eq__(self, other):
309 if not isinstance(other, browse_record):
311 return (self._table_name, self._id) == (other._table_name, other._id)
313 def __ne__(self, other):
314 if not isinstance(other, browse_record):
316 return (self._table_name, self._id) != (other._table_name, other._id)
318 # we need to define __unicode__ even though we've already defined __str__
319 # because we have overridden __getattr__
320 def __unicode__(self):
321 return unicode(str(self))
324 return hash((self._table_name, self._id))
332 (type returned by postgres when the column was created, type expression to create the column)
336 fields.boolean: 'bool',
337 fields.integer: 'int4',
338 fields.integer_big: 'int8',
342 fields.datetime: 'timestamp',
343 fields.binary: 'bytea',
344 fields.many2one: 'int4',
346 if type(f) in type_dict:
347 f_type = (type_dict[type(f)], type_dict[type(f)])
348 elif isinstance(f, fields.float):
350 f_type = ('numeric', 'NUMERIC')
352 f_type = ('float8', 'DOUBLE PRECISION')
353 elif isinstance(f, (fields.char, fields.reference)):
354 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
355 elif isinstance(f, fields.selection):
356 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
357 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
358 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
361 f_size = getattr(f, 'size', None) or 16
364 f_type = ('int4', 'INTEGER')
366 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
367 elif isinstance(f, fields.function) and eval('fields.'+(f._type), globals()) in type_dict:
368 t = eval('fields.'+(f._type), globals())
369 f_type = (type_dict[t], type_dict[t])
370 elif isinstance(f, fields.function) and f._type == 'float':
372 f_type = ('numeric', 'NUMERIC')
374 f_type = ('float8', 'DOUBLE PRECISION')
375 elif isinstance(f, fields.function) and f._type == 'selection':
376 f_type = ('text', 'text')
377 elif isinstance(f, fields.function) and f._type == 'char':
378 f_type = ('varchar', 'VARCHAR(%d)' % (f.size))
380 logger = netsvc.Logger()
381 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
386 class orm_template(object):
392 _parent_name = 'parent_id'
393 _parent_store = False
394 _parent_order = False
404 CONCURRENCY_CHECK_FIELD = '__last_update'
405 def log(self, cr, uid, id, message, secondary=False, context=None):
406 return self.pool.get('res.log').create(cr, uid,
409 'res_model': self._name,
410 'secondary': secondary,
416 def view_init(self, cr, uid, fields_list, context=None):
417 """Override this method to do specific things when a view on the object is opened."""
420 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
421 raise NotImplementedError(_('The read_group method is not implemented on this object !'))
423 def _field_create(self, cr, context=None):
426 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
428 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
429 model_id = cr.fetchone()[0]
430 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
432 model_id = cr.fetchone()[0]
433 if 'module' in context:
434 name_id = 'model_'+self._name.replace('.', '_')
435 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
437 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
438 (name_id, context['module'], 'ir.model', model_id)
443 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
445 for rec in cr.dictfetchall():
446 cols[rec['name']] = rec
448 for (k, f) in self._columns.items():
450 'model_id': model_id,
453 'field_description': f.string.replace("'", " "),
455 'relation': f._obj or '',
456 'view_load': (f.view_load and 1) or 0,
457 'select_level': tools.ustr(f.select or 0),
458 'readonly': (f.readonly and 1) or 0,
459 'required': (f.required and 1) or 0,
460 'selectable': (f.selectable and 1) or 0,
461 'translate': (f.translate and 1) or 0,
462 'relation_field': (f._type=='one2many' and isinstance(f, fields.one2many)) and f._fields_id or '',
465 # When its a custom field,it does not contain f.select
466 if context.get('field_state', 'base') == 'manual':
467 if context.get('field_name', '') == k:
468 vals['select_level'] = context.get('select', '0')
469 #setting value to let the problem NOT occur next time
471 vals['select_level'] = cols[k]['select_level']
474 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
475 id = cr.fetchone()[0]
477 cr.execute("""INSERT INTO ir_model_fields (
478 id, model_id, model, name, field_description, ttype,
479 relation,view_load,state,select_level,relation_field, translate
481 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
483 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
484 vals['relation'], bool(vals['view_load']), 'base',
485 vals['select_level'], vals['relation_field'], bool(vals['translate'])
487 if 'module' in context:
488 name1 = 'field_' + self._table + '_' + k
489 cr.execute("select name from ir_model_data where name=%s", (name1,))
491 name1 = name1 + "_" + str(id)
492 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
493 (name1, context['module'], 'ir.model.fields', id)
496 for key, val in vals.items():
497 if cols[k][key] != vals[key]:
498 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
500 cr.execute("""UPDATE ir_model_fields SET
501 model_id=%s, field_description=%s, ttype=%s, relation=%s,
502 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s
504 model=%s AND name=%s""", (
505 vals['model_id'], vals['field_description'], vals['ttype'],
506 vals['relation'], bool(vals['view_load']),
507 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['model'], vals['name']
512 def _auto_init(self, cr, context=None):
513 self._field_create(cr, context=context)
515 def __init__(self, cr):
516 if not self._name and not hasattr(self, '_inherit'):
517 name = type(self).__name__.split('.')[0]
518 msg = "The class %s has to have a _name attribute" % name
520 logger = netsvc.Logger()
521 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
522 raise except_orm('ValueError', msg)
524 if not self._description:
525 self._description = self._name
527 self._table = self._name.replace('.', '_')
529 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
530 """Fetch records as objects allowing to use dot notation to browse fields and relations
532 :param cr: database cursor
533 :param user: current user id
534 :param select: id or list of ids
535 :param context: context arguments, like lang, time zone
536 :rtype: object or list of objects requested
539 self._list_class = list_class or browse_record_list
541 # need to accepts ints and longs because ids coming from a method
542 # launched by button in the interface have a type long...
543 if isinstance(select, (int, long)):
544 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
545 elif isinstance(select, list):
546 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
550 def __export_row(self, cr, uid, row, fields, context=None):
554 def check_type(field_type):
555 if field_type == 'float':
557 elif field_type == 'integer':
559 elif field_type == 'boolean':
563 def selection_field(in_field):
564 col_obj = self.pool.get(in_field.keys()[0])
565 if f[i] in col_obj._columns.keys():
566 return col_obj._columns[f[i]]
567 elif f[i] in col_obj._inherits.keys():
568 selection_field(col_obj._inherits)
573 data = map(lambda x: '', range(len(fields)))
575 for fpos in range(len(fields)):
584 model_data = self.pool.get('ir.model.data')
585 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
587 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
589 r = '%s.%s' % (d['module'], d['name'])
596 # To display external name of selection field when its exported
598 if f[i] in self._columns.keys():
599 cols = self._columns[f[i]]
600 elif f[i] in self._inherit_fields.keys():
601 cols = selection_field(self._inherits)
602 if cols and cols._type == 'selection':
603 sel_list = cols.selection
604 if r and type(sel_list) == type([]):
605 r = [x[1] for x in sel_list if r==x[0]]
606 r = r and r[0] or False
608 if f[i] in self._columns:
609 r = check_type(self._columns[f[i]]._type)
610 elif f[i] in self._inherit_fields:
611 r = check_type(self._inherit_fields[f[i]][2]._type)
612 data[fpos] = r or False
614 if isinstance(r, (browse_record_list, list)):
616 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
619 if [x for x in fields2 if x]:
623 lines2 = self.__export_row(cr, uid, row2, fields2,
626 for fpos2 in range(len(fields)):
627 if lines2 and lines2[0][fpos2]:
628 data[fpos2] = lines2[0][fpos2]
632 name_relation = self.pool.get(rr._table_name)._rec_name
633 if isinstance(rr[name_relation], browse_record):
634 rr = rr[name_relation]
635 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
636 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
637 dt += tools.ustr(rr_name or '') + ','
647 if isinstance(r, browse_record):
648 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
649 r = r and r[0] and r[0][1] or ''
650 data[fpos] = tools.ustr(r or '')
651 return [data] + lines
653 def export_data(self, cr, uid, ids, fields_to_export, context=None):
655 Export fields for selected objects
657 :param cr: database cursor
658 :param uid: current user id
659 :param ids: list of ids
660 :param fields_to_export: list of fields
661 :param context: context arguments, like lang, time zone
662 :rtype: dictionary with a *datas* matrix
664 This method is used when exporting data via client menu
669 cols = self._columns.copy()
670 for f in self._inherit_fields:
671 cols.update({f: self._inherit_fields[f][2]})
673 if x=='.id': return [x]
674 return x.replace(':id','/id').replace('.id','/.id').split('/')
675 fields_to_export = map(fsplit, fields_to_export)
676 fields_export = fields_to_export + []
680 for row in self.browse(cr, uid, ids, context):
681 datas += self.__export_row(cr, uid, row, fields_to_export, context)
682 return {'datas': datas}
684 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
686 Import given data in given module
688 :param cr: database cursor
689 :param uid: current user id
690 :param fields: list of fields
691 :param data: data to import
692 :param mode: 'init' or 'update' for record creation
693 :param current_module: module name
694 :param noupdate: flag for record creation
695 :param context: context arguments, like lang, time zone,
696 :param filename: optional file to store partial import state for recovery
699 This method is used when importing data via client menu.
701 Example of fields to import for a sale.order::
704 partner_id, (=name_search)
705 order_line/.id, (=database_id)
707 order_line/product_id/id, (=xml id)
708 order_line/price_unit,
709 order_line/product_uom_qty,
710 order_line/product_uom/id (=xml_id)
714 def _replace_field(x):
715 x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
716 return x.replace(':id','/id').split('/')
717 fields = map(_replace_field, fields)
718 logger = netsvc.Logger()
719 ir_model_data_obj = self.pool.get('ir.model.data')
721 # mode: id (XML id) or .id (database id) or False for name_get
722 def _get_id(model_name, id, current_module=False, mode='id'):
725 obj_model = self.pool.get(model_name)
726 dom = [('id', '=', id)]
727 if obj_model._columns.get('active'):
728 dom.append(('active', 'in', ['True','False']))
729 ids = obj_model.search(cr, uid, dom, context=context)
731 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, id))
734 module, xml_id = id.rsplit('.', 1)
736 module, xml_id = current_module, id
737 record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
738 ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id'], context=context)
739 if not ir_model_data:
740 raise ValueError('No references to %s.%s' % (module, xml_id))
741 id = ir_model_data[0]['res_id']
743 obj_model = self.pool.get(model_name)
744 ids = obj_model.name_search(cr, uid, id, operator='=', context=context)
746 raise ValueError('No record found for %s' % (id,))
751 # datas: a list of records, each record is defined by a list of values
752 # prefix: a list of prefix fields ['line_ids']
753 # position: the line to process, skip is False if it's the first line of the current record
755 # (res, position, warning, res_id) with
756 # res: the record for the next line to process (including it's one2many)
757 # position: the new position for the next line
758 # res_id: the ID of the record if it's a modification
759 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0, skip=0):
760 line = datas[position]
768 for i in range(len(fields)):
771 raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
774 if field[:len(prefix)] <> prefix:
779 # ID of the record using a XML ID
780 if field[len(prefix)]=='id':
782 data_res_id = _get_id(model_name, line[i], current_module, 'id')
783 except ValueError, e:
788 # ID of the record using a database ID
789 elif field[len(prefix)]=='.id':
790 data_res_id = _get_id(model_name, line[i], current_module, '.id')
793 # recursive call for getting children and returning [(0,0,{})] or [(1,ID,{})]
794 if fields_def[field[len(prefix)]]['type']=='one2many':
795 if field[len(prefix)] in done:
797 done[field[len(prefix)]] = True
798 relation_obj = self.pool.get(fields_def[field[len(prefix)]]['relation'])
799 newfd = relation_obj.fields_get(cr, uid, context=context)
803 while pos < len(datas):
804 res2 = process_liness(self, datas, prefix + [field[len(prefix)]], current_module, relation_obj._name, newfd, pos, first)
807 (newrow, pos, w2, data_res_id2, xml_id2) = res2
808 nbrmax = max(nbrmax, pos)
811 if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
813 res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
815 elif fields_def[field[len(prefix)]]['type']=='many2one':
816 relation = fields_def[field[len(prefix)]]['relation']
817 if len(field) == len(prefix)+1:
820 mode = field[len(prefix)+1]
821 res = line[i] and _get_id(relation, line[i], current_module, mode) or False
823 elif fields_def[field[len(prefix)]]['type']=='many2many':
824 relation = fields_def[field[len(prefix)]]['relation']
825 if len(field) == len(prefix)+1:
828 mode = field[len(prefix)+1]
830 # TODO: improve this by using csv.csv_reader
833 for db_id in line[i].split(config.get('csv_internal_sep')):
834 res.append( _get_id(relation, db_id, current_module, mode) )
837 elif fields_def[field[len(prefix)]]['type'] == 'integer':
838 res = line[i] and int(line[i]) or 0
839 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
840 res = line[i].lower() not in ('0', 'false', 'off')
841 elif fields_def[field[len(prefix)]]['type'] == 'float':
842 res = line[i] and float(line[i]) or 0.0
843 elif fields_def[field[len(prefix)]]['type'] == 'selection':
844 for key, val in fields_def[field[len(prefix)]]['selection']:
845 if line[i] in [tools.ustr(key), tools.ustr(val)]:
848 if line[i] and not res:
849 logger.notifyChannel("import", netsvc.LOG_WARNING,
850 _("key '%s' not found in selection field '%s'") % \
851 (line[i], field[len(prefix)]))
852 warning += [_("Key/value '%s' not found in selection field '%s'") % (line[i], field[len(prefix)])]
856 row[field[len(prefix)]] = res or False
858 result = (row, nbrmax, warning, data_res_id, xml_id)
861 fields_def = self.fields_get(cr, uid, context=context)
863 if config.get('import_partial', False) and filename:
864 data = pickle.load(file(config.get('import_partial')))
865 original_value = data.get(filename, 0)
868 while position<len(datas):
871 (res, position, warning, res_id, xml_id) = \
872 process_liness(self, datas, [], current_module, self._name, fields_def, position=position)
875 return (-1, res, 'Line ' + str(position) +' : ' + '!\n'.join(warning), '')
878 id = ir_model_data_obj._update(cr, uid, self._name,
879 current_module, res, mode=mode, xml_id=xml_id,
880 noupdate=noupdate, res_id=res_id, context=context)
882 return (-1, res, 'Line ' + str(position) +' : ' + str(e), '')
884 if config.get('import_partial', False) and filename and (not (position%100)):
885 data = pickle.load(file(config.get('import_partial')))
886 data[filename] = position
887 pickle.dump(data, file(config.get('import_partial'), 'wb'))
888 if context.get('defer_parent_store_computation'):
889 self._parent_store_compute(cr)
892 if context.get('defer_parent_store_computation'):
893 self._parent_store_compute(cr)
894 return (position, 0, 0, 0)
896 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
898 Read records with given ids with the given fields
900 :param cr: database cursor
901 :param user: current user id
902 :param ids: id or list of the ids of the records to read
903 :param fields: optional list of field names to return (default: all fields would be returned)
904 :type fields: list (example ['field_name_1', ...])
905 :param context: optional context dictionary - it may contains keys for specifying certain options
906 like ``context_lang``, ``context_tz`` to alter the results of the call.
907 A special ``bin_size`` boolean flag may also be passed in the context to request the
908 value of all fields.binary columns to be returned as the size of the binary instead of its
909 contents. This can also be selectively overriden by passing a field-specific flag
910 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
911 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
912 :return: list of dictionaries((dictionary per record asked)) with requested field values
913 :rtype: [{‘name_of_the_field’: value, ...}, ...]
914 :raise AccessError: * if user has no read rights on the requested object
915 * if user tries to bypass access rules for read on the requested object
918 raise NotImplementedError(_('The read method is not implemented on this object !'))
920 def get_invalid_fields(self, cr, uid):
921 return list(self._invalids)
923 def _validate(self, cr, uid, ids, context=None):
924 context = context or {}
925 lng = context.get('lang', False) or 'en_US'
926 trans = self.pool.get('ir.translation')
928 for constraint in self._constraints:
929 fun, msg, fields = constraint
930 if not fun(self, cr, uid, ids):
931 # Check presence of __call__ directly instead of using
932 # callable() because it will be deprecated as of Python 3.0
933 if hasattr(msg, '__call__'):
934 tmp_msg = msg(self, cr, uid, ids, context=context)
935 if isinstance(tmp_msg, tuple):
936 tmp_msg, params = tmp_msg
937 translated_msg = tmp_msg % params
939 translated_msg = tmp_msg
941 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
943 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
945 self._invalids.update(fields)
948 raise except_orm('ValidateError', '\n'.join(error_msgs))
950 self._invalids.clear()
952 def default_get(self, cr, uid, fields_list, context=None):
954 Returns default values for the fields in fields_list.
956 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
957 :type fields_list: list
958 :param context: optional context dictionary - it may contains keys for specifying certain options
959 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
960 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
961 or override a default value for a field.
962 A special ``bin_size`` boolean flag may also be passed in the context to request the
963 value of all fields.binary columns to be returned as the size of the binary instead of its
964 contents. This can also be selectively overriden by passing a field-specific flag
965 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
966 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
967 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
969 # trigger view init hook
970 self.view_init(cr, uid, fields_list, context)
976 # get the default values for the inherited fields
977 for t in self._inherits.keys():
978 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
981 # get the default values defined in the object
982 for f in fields_list:
983 if f in self._defaults:
984 if callable(self._defaults[f]):
985 defaults[f] = self._defaults[f](self, cr, uid, context)
987 defaults[f] = self._defaults[f]
989 fld_def = ((f in self._columns) and self._columns[f]) \
990 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
993 if isinstance(fld_def, fields.property):
994 property_obj = self.pool.get('ir.property')
995 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
997 if isinstance(prop_value, (browse_record, browse_null)):
998 defaults[f] = prop_value.id
1000 defaults[f] = prop_value
1002 if f not in defaults:
1005 # get the default values set by the user and override the default
1006 # values defined in the object
1007 ir_values_obj = self.pool.get('ir.values')
1008 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1009 for id, field, field_value in res:
1010 if field in fields_list:
1011 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1012 if fld_def._type in ('many2one', 'one2one'):
1013 obj = self.pool.get(fld_def._obj)
1014 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1016 if fld_def._type in ('many2many'):
1017 obj = self.pool.get(fld_def._obj)
1019 for i in range(len(field_value)):
1020 if not obj.search(cr, uid, [('id', '=',
1023 field_value2.append(field_value[i])
1024 field_value = field_value2
1025 if fld_def._type in ('one2many'):
1026 obj = self.pool.get(fld_def._obj)
1028 for i in range(len(field_value)):
1029 field_value2.append({})
1030 for field2 in field_value[i]:
1031 if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'):
1032 obj2 = self.pool.get(obj._columns[field2]._obj)
1033 if not obj2.search(cr, uid,
1034 [('id', '=', field_value[i][field2])]):
1036 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type in ('many2one', 'one2one'):
1037 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1038 if not obj2.search(cr, uid,
1039 [('id', '=', field_value[i][field2])]):
1041 # TODO add test for many2many and one2many
1042 field_value2[i][field2] = field_value[i][field2]
1043 field_value = field_value2
1044 defaults[field] = field_value
1046 # get the default values from the context
1047 for key in context or {}:
1048 if key.startswith('default_') and (key[8:] in fields_list):
1049 defaults[key[8:]] = context[key]
1053 def perm_read(self, cr, user, ids, context=None, details=True):
1054 raise NotImplementedError(_('The perm_read method is not implemented on this object !'))
1056 def unlink(self, cr, uid, ids, context=None):
1057 raise NotImplementedError(_('The unlink method is not implemented on this object !'))
1059 def write(self, cr, user, ids, vals, context=None):
1060 raise NotImplementedError(_('The write method is not implemented on this object !'))
1062 def create(self, cr, user, vals, context=None):
1063 raise NotImplementedError(_('The create method is not implemented on this object !'))
1065 def fields_get_keys(self, cr, user, context=None):
1066 res = self._columns.keys()
1067 for parent in self._inherits:
1068 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1071 # returns the definition of each field in the object
1072 # the optional fields parameter can limit the result to some fields
1073 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
1077 translation_obj = self.pool.get('ir.translation')
1078 for parent in self._inherits:
1079 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
1081 if self._columns.keys():
1082 for f in self._columns.keys():
1083 field_col = self._columns[f]
1084 if allfields and f not in allfields:
1086 res[f] = {'type': field_col._type}
1087 # This additional attributes for M2M and function field is added
1088 # because we need to display tooltip with this additional information
1089 # when client is started in debug mode.
1090 if isinstance(field_col, fields.function):
1091 res[f]['function'] = field_col._fnct and field_col._fnct.func_name or False
1092 res[f]['store'] = field_col.store
1093 if isinstance(field_col.store, dict):
1094 res[f]['store'] = str(field_col.store)
1095 res[f]['fnct_search'] = field_col._fnct_search and field_col._fnct_search.func_name or False
1096 res[f]['fnct_inv'] = field_col._fnct_inv and field_col._fnct_inv.func_name or False
1097 res[f]['fnct_inv_arg'] = field_col._fnct_inv_arg or False
1098 res[f]['func_obj'] = field_col._obj or False
1099 res[f]['func_method'] = field_col._method
1100 if isinstance(field_col, fields.many2many):
1101 res[f]['related_columns'] = list((field_col._id1, field_col._id2))
1102 res[f]['third_table'] = field_col._rel
1103 for arg in ('string', 'readonly', 'states', 'size', 'required', 'group_operator',
1104 'change_default', 'translate', 'help', 'select', 'selectable'):
1105 if getattr(field_col, arg):
1106 res[f][arg] = getattr(field_col, arg)
1107 if not write_access:
1108 res[f]['readonly'] = True
1109 res[f]['states'] = {}
1110 for arg in ('digits', 'invisible', 'filters'):
1111 if getattr(field_col, arg, None):
1112 res[f][arg] = getattr(field_col, arg)
1114 if field_col.string:
1115 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
1117 res[f]['string'] = res_trans
1119 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
1121 res[f]['help'] = help_trans
1123 if hasattr(field_col, 'selection'):
1124 if isinstance(field_col.selection, (tuple, list)):
1125 sel = field_col.selection
1126 # translate each selection option
1128 for (key, val) in sel:
1131 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
1132 sel2.append((key, val2 or val))
1134 res[f]['selection'] = sel
1136 # call the 'dynamic selection' function
1137 res[f]['selection'] = field_col.selection(self, cr, user, context)
1138 if res[f]['type'] in ('one2many', 'many2many', 'many2one', 'one2one'):
1139 res[f]['relation'] = field_col._obj
1140 res[f]['domain'] = field_col._domain
1141 res[f]['context'] = field_col._context
1143 #TODO : read the fields from the database
1147 # filter out fields which aren't in the fields list
1148 for r in res.keys():
1149 if r not in allfields:
1154 # Overload this method if you need a window title which depends on the context
1156 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1159 def __view_look_dom(self, cr, user, node, view_id, context=None):
1167 if isinstance(s, unicode):
1168 return s.encode('utf8')
1171 # return True if node can be displayed to current user
1172 def check_group(node):
1173 if node.get('groups'):
1174 groups = node.get('groups').split(',')
1175 access_pool = self.pool.get('ir.model.access')
1176 can_see = any(access_pool.check_groups(cr, user, group) for group in groups)
1178 node.set('invisible', '1')
1179 if 'attrs' in node.attrib:
1180 del(node.attrib['attrs']) #avoid making field visible later
1181 del(node.attrib['groups'])
1186 if node.tag in ('field', 'node', 'arrow'):
1187 if node.get('object'):
1192 if f.tag in ('field'):
1193 xml += etree.tostring(f, encoding="utf-8")
1195 new_xml = etree.fromstring(encode(xml))
1196 ctx = context.copy()
1197 ctx['base_model_name'] = self._name
1198 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1203 attrs = {'views': views}
1205 if node.get('name'):
1208 if node.get('name') in self._columns:
1209 column = self._columns[node.get('name')]
1211 column = self._inherit_fields[node.get('name')][2]
1216 relation = self.pool.get(column._obj)
1221 if f.tag in ('form', 'tree', 'graph'):
1223 ctx = context.copy()
1224 ctx['base_model_name'] = self._name
1225 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1226 views[str(f.tag)] = {
1230 attrs = {'views': views}
1231 if node.get('widget') and node.get('widget') == 'selection':
1232 # Prepare the cached selection list for the client. This needs to be
1233 # done even when the field is invisible to the current user, because
1234 # other events could need to change its value to any of the selectable ones
1235 # (such as on_change events, refreshes, etc.)
1237 # If domain and context are strings, we keep them for client-side, otherwise
1238 # we evaluate them server-side to consider them when generating the list of
1240 # TODO: find a way to remove this hack, by allow dynamic domains
1242 if column._domain and not isinstance(column._domain, basestring):
1243 dom = column._domain
1244 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1245 search_context = dict(context)
1246 if column._context and not isinstance(column._context, basestring):
1247 search_context.update(column._context)
1248 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1249 if (node.get('required') and not int(node.get('required'))) or not column.required:
1250 attrs['selection'].append((False, ''))
1251 fields[node.get('name')] = attrs
1253 elif node.tag in ('form', 'tree'):
1254 result = self.view_header_get(cr, user, False, node.tag, context)
1256 node.set('string', result)
1258 elif node.tag == 'calendar':
1259 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1260 if node.get(additional_field):
1261 fields[node.get(additional_field)] = {}
1263 if 'groups' in node.attrib:
1267 if ('lang' in context) and not result:
1268 if node.get('string'):
1269 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1270 if trans == node.get('string') and ('base_model_name' in context):
1271 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1272 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1273 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1275 node.set('string', trans)
1276 if node.get('confirm'):
1277 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm'))
1279 node.set('confirm', trans)
1281 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
1283 node.set('sum', trans)
1286 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1287 fields.update(self.__view_look_dom(cr, user, f, view_id, context))
1291 def _disable_workflow_buttons(self, cr, user, node):
1293 # admin user can always activate workflow buttons
1296 # TODO handle the case of more than one workflow for a model or multiple
1297 # transitions with different groups and same signal
1298 usersobj = self.pool.get('res.users')
1299 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1300 for button in buttons:
1301 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1302 cr.execute("""SELECT DISTINCT t.group_id
1304 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1305 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1308 AND t.group_id is NOT NULL
1309 """, (self._name, button.get('name')))
1310 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1311 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1312 button.set('readonly', str(int(not can_click)))
1315 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1316 fields_def = self.__view_look_dom(cr, user, node, view_id, context=context)
1317 node = self._disable_workflow_buttons(cr, user, node)
1318 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1320 if node.tag == 'diagram':
1321 if node.getchildren()[0].tag == 'node':
1322 node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, fields_def.keys(), context)
1323 if node.getchildren()[1].tag == 'arrow':
1324 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, fields_def.keys(), context)
1325 for key, value in node_fields.items():
1327 for key, value in arrow_fields.items():
1330 fields = self.fields_get(cr, user, fields_def.keys(), context)
1331 for field in fields_def:
1333 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1334 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1335 elif field in fields:
1336 fields[field].update(fields_def[field])
1338 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1339 res = cr.fetchall()[:]
1341 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1342 msg = "\n * ".join([r[0] for r in res])
1343 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1344 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1345 raise except_orm('View error', msg)
1348 def __get_default_calendar_view(self):
1349 """Generate a default calendar view (For internal use only).
1352 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
1353 '<calendar string="%s"') % (self._description)
1355 if (self._date_name not in self._columns):
1357 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
1358 if dt in self._columns:
1359 self._date_name = dt
1364 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
1367 arch += ' date_start="%s"' % (self._date_name)
1369 for color in ["user_id", "partner_id", "x_user_id", "x_partner_id"]:
1370 if color in self._columns:
1371 arch += ' color="' + color + '"'
1374 dt_stop_flag = False
1376 for dt_stop in ["date_stop", "date_end", "x_date_stop", "x_date_end"]:
1377 if dt_stop in self._columns:
1378 arch += ' date_stop="' + dt_stop + '"'
1382 if not dt_stop_flag:
1383 for dt_delay in ["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"]:
1384 if dt_delay in self._columns:
1385 arch += ' date_delay="' + dt_delay + '"'
1389 ' <field name="%s"/>\n'
1390 '</calendar>') % (self._rec_name)
1394 def __get_default_search_view(self, cr, uid, context=None):
1395 form_view = self.fields_view_get(cr, uid, False, 'form', context=context)
1396 tree_view = self.fields_view_get(cr, uid, False, 'tree', context=context)
1398 fields_to_search = set()
1399 fields = self.fields_get(cr, uid, context=context)
1400 for field in fields:
1401 if fields[field].get('select'):
1402 fields_to_search.add(field)
1403 for view in (form_view, tree_view):
1404 view_root = etree.fromstring(view['arch'])
1405 # Only care about select=1 in xpath below, because select=2 is covered
1406 # by the custom advanced search in clients
1407 fields_to_search = fields_to_search.union(view_root.xpath("//field[@select=1]/@name"))
1409 tree_view_root = view_root # as provided by loop above
1410 search_view = etree.Element("search", attrib={'string': tree_view_root.get("string", "")})
1411 field_group = etree.Element("group")
1412 search_view.append(field_group)
1414 for field_name in fields_to_search:
1415 field_group.append(etree.Element("field", attrib={'name': field_name}))
1417 return etree.tostring(search_view, encoding="utf-8").replace('\t', '')
1420 # if view_id, view_type is not required
1422 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1424 Get the detailed composition of the requested view like fields, model, view architecture
1426 :param cr: database cursor
1427 :param user: current user id
1428 :param view_id: id of the view or None
1429 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
1430 :param context: context arguments, like lang, time zone
1431 :param toolbar: true to include contextual actions
1432 :param submenu: example (portal_project module)
1433 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
1434 :raise AttributeError:
1435 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
1436 * if some tag other than 'position' is found in parent view
1437 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
1444 if isinstance(s, unicode):
1445 return s.encode('utf8')
1448 def raise_view_error(error_msg, child_view_id):
1449 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
1450 raise AttributeError(("View definition error for inherited view '%(xml_id)s' on '%(model)s' model: " + error_msg)
1451 % { 'xml_id': child_view.xml_id,
1452 'parent_xml_id': view.xml_id,
1453 'model': self._name, })
1455 def _inherit_apply(src, inherit, inherit_id=None):
1456 def _find(node, node2):
1457 if node2.tag == 'xpath':
1458 res = node.xpath(node2.get('expr'))
1464 for n in node.getiterator(node2.tag):
1466 if node2.tag == 'field':
1467 # only compare field names, a field can be only once in a given view
1468 # at a given level (and for multilevel expressions, we should use xpath
1469 # inheritance spec anyway)
1470 if node2.get('name') == n.get('name'):
1474 for attr in node2.attrib:
1475 if attr == 'position':
1478 if n.get(attr) == node2.get(attr):
1485 # End: _find(node, node2)
1487 doc_dest = etree.fromstring(encode(inherit))
1488 toparse = [doc_dest]
1491 node2 = toparse.pop(0)
1492 if isinstance(node2, SKIPPED_ELEMENT_TYPES):
1494 if node2.tag == 'data':
1495 toparse += [ c for c in doc_dest ]
1497 node = _find(src, node2)
1498 if node is not None:
1500 if node2.get('position'):
1501 pos = node2.get('position')
1502 if pos == 'replace':
1503 parent = node.getparent()
1505 src = copy.deepcopy(node2[0])
1508 node.addprevious(child)
1509 node.getparent().remove(node)
1510 elif pos == 'attributes':
1511 for child in node2.getiterator('attribute'):
1512 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
1514 node.set(attribute[0], attribute[1])
1516 del(node.attrib[attribute[0]])
1518 sib = node.getnext()
1522 elif pos == 'after':
1527 sib.addprevious(child)
1528 elif pos == 'before':
1529 node.addprevious(child)
1531 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
1534 ' %s="%s"' % (attr, node2.get(attr))
1535 for attr in node2.attrib
1536 if attr != 'position'
1538 tag = "<%s%s>" % (node2.tag, attrs)
1539 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
1541 # End: _inherit_apply(src, inherit)
1543 result = {'type': view_type, 'model': self._name}
1548 parent_view_model = None
1550 view_ref = context.get(view_type + '_view_ref', False)
1551 if view_ref and not view_id:
1553 module, view_ref = view_ref.split('.', 1)
1554 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
1555 view_ref_res = cr.fetchone()
1557 view_id = view_ref_res[0]
1560 query = "SELECT arch,name,field_parent,id,type,inherit_id,model FROM ir_ui_view WHERE id=%s"
1563 query += " AND model=%s"
1564 params += (self._name,)
1565 cr.execute(query, params)
1567 cr.execute('''SELECT
1568 arch,name,field_parent,id,type,inherit_id,model
1575 ORDER BY priority''', (self._name, view_type))
1576 sql_res = cr.fetchone()
1582 view_id = ok or sql_res[3]
1584 parent_view_model = sql_res[6]
1586 # if a view was found
1588 result['type'] = sql_res[4]
1589 result['view_id'] = sql_res[3]
1590 result['arch'] = sql_res[0]
1592 def _inherit_apply_rec(result, inherit_id):
1593 # get all views which inherit from (ie modify) this view
1594 cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
1595 sql_inherit = cr.fetchall()
1596 for (inherit, id) in sql_inherit:
1597 result = _inherit_apply(result, inherit, id)
1598 result = _inherit_apply_rec(result, id)
1601 inherit_result = etree.fromstring(encode(result['arch']))
1602 result['arch'] = _inherit_apply_rec(inherit_result, sql_res[3])
1604 result['name'] = sql_res[1]
1605 result['field_parent'] = sql_res[2] or False
1608 # otherwise, build some kind of default view
1609 if view_type == 'form':
1610 res = self.fields_get(cr, user, context=context)
1611 xml = '<?xml version="1.0" encoding="utf-8"?> ' \
1612 '<form string="%s">' % (self._description,)
1614 if res[x]['type'] not in ('one2many', 'many2many'):
1615 xml += '<field name="%s"/>' % (x,)
1616 if res[x]['type'] == 'text':
1620 elif view_type == 'tree':
1621 _rec_name = self._rec_name
1622 if _rec_name not in self._columns:
1623 if len(self._columns.keys()):
1624 _rec_name = self._columns.keys()[0]
1627 xml = '<?xml version="1.0" encoding="utf-8"?>' \
1628 '<tree string="%s"><field name="%s"/></tree>' \
1629 % (self._description, _rec_name)
1631 elif view_type == 'calendar':
1632 xml = self.__get_default_calendar_view()
1634 elif view_type == 'search':
1635 xml = self.__get_default_search_view(cr, user, context)
1638 xml = '<?xml version="1.0"?>' # what happens here, graph case?
1639 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
1640 result['arch'] = etree.fromstring(encode(xml))
1641 result['name'] = 'default'
1642 result['field_parent'] = False
1643 result['view_id'] = 0
1645 if parent_view_model != self._name:
1646 ctx = context.copy()
1647 ctx['base_model_name'] = parent_view_model
1650 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
1651 result['arch'] = xarch
1652 result['fields'] = xfields
1655 if context and context.get('active_id', False):
1656 data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
1658 act_id = data_menu.id
1660 data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
1661 result['submenu'] = getattr(data_action, 'menus', False)
1665 for key in ('report_sxw_content', 'report_rml_content',
1666 'report_sxw', 'report_rml',
1667 'report_sxw_content_data', 'report_rml_content_data'):
1671 ir_values_obj = self.pool.get('ir.values')
1672 resprint = ir_values_obj.get(cr, user, 'action',
1673 'client_print_multi', [(self._name, False)], False,
1675 resaction = ir_values_obj.get(cr, user, 'action',
1676 'client_action_multi', [(self._name, False)], False,
1679 resrelate = ir_values_obj.get(cr, user, 'action',
1680 'client_action_relate', [(self._name, False)], False,
1682 resprint = map(clean, resprint)
1683 resaction = map(clean, resaction)
1684 resaction = filter(lambda x: not x.get('multi', False), resaction)
1685 resprint = filter(lambda x: not x.get('multi', False), resprint)
1686 resrelate = map(lambda x: x[2], resrelate)
1688 for x in resprint + resaction + resrelate:
1689 x['string'] = x['name']
1691 result['toolbar'] = {
1693 'action': resaction,
1698 _view_look_dom_arch = __view_look_dom_arch
1700 def search_count(self, cr, user, args, context=None):
1703 res = self.search(cr, user, args, context=context, count=True)
1704 if isinstance(res, list):
1708 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
1710 Search for records based on a search domain.
1712 :param cr: database cursor
1713 :param user: current user id
1714 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
1715 :param offset: optional number of results to skip in the returned values (default: 0)
1716 :param limit: optional max number of records to return (default: **None**)
1717 :param order: optional columns to sort by (default: self._order=id )
1718 :param context: optional context arguments, like lang, time zone
1719 :type context: dictionary
1720 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
1721 :return: id or list of ids of records matching the criteria
1722 :rtype: integer or list of integers
1723 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
1725 **Expressing a search domain (args)**
1727 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
1729 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
1730 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
1731 The semantics of most of these operators are obvious.
1732 The ``child_of`` operator will look for records who are children or grand-children of a given record,
1733 according to the semantics of this model (i.e following the relationship field named by
1734 ``self._parent_name``, by default ``parent_id``.
1735 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
1737 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
1738 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
1739 Be very careful about this when you combine them the first time.
1741 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
1743 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
1745 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
1747 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
1750 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
1752 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
1754 Private implementation of search() method, allowing specifying the uid to use for the access right check.
1755 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
1756 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
1758 :param access_rights_uid: optional user ID to use when checking access rights
1759 (not for ir.rules, this is only for ir.model.access)
1761 raise NotImplementedError(_('The search method is not implemented on this object !'))
1763 def name_get(self, cr, user, ids, context=None):
1766 :param cr: database cursor
1767 :param user: current user id
1769 :param ids: list of ids
1770 :param context: context arguments, like lang, time zone
1771 :type context: dictionary
1772 :return: tuples with the text representation of requested objects for to-many relationships
1779 if isinstance(ids, (int, long)):
1781 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
1782 [self._rec_name], context, load='_classic_write')]
1784 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
1786 Search for records and their display names according to a search domain.
1788 :param cr: database cursor
1789 :param user: current user id
1790 :param name: object name to search
1791 :param args: list of tuples specifying search criteria [('field_name', 'operator', 'value'), ...]
1792 :param operator: operator for search criterion
1793 :param context: context arguments, like lang, time zone
1794 :type context: dictionary
1795 :param limit: optional max number of records to return
1796 :return: list of object names matching the search criteria, used to provide completion for to-many relationships
1798 This method is equivalent of :py:meth:`~osv.osv.osv.search` on **name** + :py:meth:`~osv.osv.osv.name_get` on the result.
1799 See :py:meth:`~osv.osv.osv.search` for an explanation of the possible values for the search domain specified in **args**.
1802 return self._name_search(cr, user, name, args, operator, context, limit)
1804 # private implementation of name_search, allows passing a dedicated user for the name_get part to
1805 # solve some access rights issues
1806 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
1813 args += [(self._rec_name, operator, name)]
1814 access_rights_uid = name_get_uid or user
1815 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
1816 res = self.name_get(cr, access_rights_uid, ids, context)
1819 def copy(self, cr, uid, id, default=None, context=None):
1820 raise NotImplementedError(_('The copy method is not implemented on this object !'))
1822 def exists(self, cr, uid, ids, context=None):
1823 raise NotImplementedError(_('The exists method is not implemented on this object !'))
1825 def read_string(self, cr, uid, id, langs, fields=None, context=None):
1828 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
1830 fields = self._columns.keys() + self._inherit_fields.keys()
1831 #FIXME: collect all calls to _get_source into one SQL call.
1833 res[lang] = {'code': lang}
1835 if f in self._columns:
1836 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
1838 res[lang][f] = res_trans
1840 res[lang][f] = self._columns[f].string
1841 for table in self._inherits:
1842 cols = intersect(self._inherit_fields.keys(), fields)
1843 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
1846 res[lang]['code'] = lang
1847 for f in res2[lang]:
1848 res[lang][f] = res2[lang][f]
1851 def write_string(self, cr, uid, id, langs, vals, context=None):
1852 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
1853 #FIXME: try to only call the translation in one SQL
1856 if field in self._columns:
1857 src = self._columns[field].string
1858 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
1859 for table in self._inherits:
1860 cols = intersect(self._inherit_fields.keys(), vals)
1862 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
1865 def _check_removed_columns(self, cr, log=False):
1866 raise NotImplementedError()
1868 def _add_missing_default_values(self, cr, uid, values, context=None):
1869 missing_defaults = []
1870 avoid_tables = [] # avoid overriding inherited values when parent is set
1871 for tables, parent_field in self._inherits.items():
1872 if parent_field in values:
1873 avoid_tables.append(tables)
1874 for field in self._columns.keys():
1875 if not field in values:
1876 missing_defaults.append(field)
1877 for field in self._inherit_fields.keys():
1878 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
1879 missing_defaults.append(field)
1881 if len(missing_defaults):
1882 # override defaults with the provided values, never allow the other way around
1883 defaults = self.default_get(cr, uid, missing_defaults, context)
1885 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
1886 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
1887 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
1888 defaults[dv] = [(6, 0, defaults[dv])]
1889 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
1890 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
1891 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
1892 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
1893 defaults.update(values)
1897 class orm_memory(orm_template):
1899 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
1900 _inherit_fields = {}
1901 _max_count = config.get('osv_memory_count_limit')
1902 _max_hours = config.get('osv_memory_age_limit')
1905 def __init__(self, cr):
1906 super(orm_memory, self).__init__(cr)
1910 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
1912 def _check_access(self, uid, object_id, mode):
1913 if uid != 1 and self.datas[object_id]['internal.create_uid'] != uid:
1914 raise except_orm(_('AccessError'), '%s access is only allowed on your own records for osv_memory objects except for the super-user' % mode.capitalize())
1916 def vaccum(self, cr, uid, force=False):
1917 """Run the vaccuum cleaning system, expiring and removing old records from the
1918 virtual osv_memory tables if the "max count" or "max age" conditions are enabled
1919 and have been reached. This method can be called very often (e.g. everytime a record
1920 is created), but will only actually trigger the cleanup process once out of
1921 "_check_time" times (by default once out of 20 calls)."""
1923 if (not force) and (self.check_id % self._check_time):
1927 # Age-based expiration
1929 max = time.time() - self._max_hours * 60 * 60
1930 for k,v in self.datas.iteritems():
1931 if v['internal.date_access'] < max:
1933 self.unlink(cr, 1, tounlink)
1935 # Count-based expiration
1936 if self._max_count and len(self.datas) > self._max_count:
1937 # sort by access time to remove only the first/oldest ones in LRU fashion
1938 records = self.datas.items()
1939 records.sort(key=lambda x:x[1]['internal.date_access'])
1940 self.unlink(cr, 1, [x[0] for x in records[:len(self.datas)-self._max_count]])
1944 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
1947 if not fields_to_read:
1948 fields_to_read = self._columns.keys()
1952 if isinstance(ids, (int, long)):
1956 for f in fields_to_read:
1957 record = self.datas.get(id)
1959 self._check_access(user, id, 'read')
1960 r[f] = record.get(f, False)
1961 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
1964 if id in self.datas:
1965 self.datas[id]['internal.date_access'] = time.time()
1966 # all non inherited fields for which the attribute whose name is in load is False
1967 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
1969 for f in fields_post:
1970 todo.setdefault(self._columns[f]._multi, [])
1971 todo[self._columns[f]._multi].append(f)
1972 for key, val in todo.items():
1974 res2 = self._columns[val[0]].get_memory(cr, self, ids, val, user, context=context, values=result)
1976 for record in result:
1977 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
1978 multi_fields = res2.get(record['id'],{})
1980 record[pos] = multi_fields.get(pos,[])
1983 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
1984 for record in result:
1986 record[f] = res2[record['id']]
1990 if isinstance(ids_orig, (int, long)):
1994 def write(self, cr, user, ids, vals, context=None):
2000 if self._columns[field]._classic_write:
2001 vals2[field] = vals[field]
2003 upd_todo.append(field)
2004 for object_id in ids:
2005 self._check_access(user, object_id, mode='write')
2006 self.datas[object_id].update(vals2)
2007 self.datas[object_id]['internal.date_access'] = time.time()
2008 for field in upd_todo:
2009 self._columns[field].set_memory(cr, self, object_id, field, vals[field], user, context)
2010 self._validate(cr, user, [object_id], context)
2011 wf_service = netsvc.LocalService("workflow")
2012 wf_service.trg_write(user, self._name, object_id, cr)
2015 def create(self, cr, user, vals, context=None):
2016 self.vaccum(cr, user)
2018 id_new = self.next_id
2020 vals = self._add_missing_default_values(cr, user, vals, context)
2025 if self._columns[field]._classic_write:
2026 vals2[field] = vals[field]
2028 upd_todo.append(field)
2029 self.datas[id_new] = vals2
2030 self.datas[id_new]['internal.date_access'] = time.time()
2031 self.datas[id_new]['internal.create_uid'] = user
2033 for field in upd_todo:
2034 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
2035 self._validate(cr, user, [id_new], context)
2036 if self._log_create and not (context and context.get('no_store_function', False)):
2037 message = self._description + \
2039 self.name_get(cr, user, [id_new], context=context)[0][1] + \
2041 self.log(cr, user, id_new, message, True, context=context)
2042 wf_service = netsvc.LocalService("workflow")
2043 wf_service.trg_create(user, self._name, id_new, cr)
2046 def _where_calc(self, cr, user, args, active_test=True, context=None):
2051 # if the object has a field named 'active', filter out all inactive
2052 # records unless they were explicitely asked for
2053 if 'active' in self._columns and (active_test and context.get('active_test', True)):
2055 active_in_args = False
2057 if a[0] == 'active':
2058 active_in_args = True
2059 if not active_in_args:
2060 args.insert(0, ('active', '=', 1))
2062 args = [('active', '=', 1)]
2065 e = expression.expression(args)
2066 e.parse(cr, user, self, context)
2070 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
2074 # implicit filter on current user except for superuser
2078 args.insert(0, ('internal.create_uid', '=', user))
2080 result = self._where_calc(cr, user, args, context=context)
2082 return self.datas.keys()
2086 #Find the value of dict
2089 for id, data in self.datas.items():
2091 if limit and (counter > int(limit) + int(offset)):
2096 val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
2097 elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
2098 val = eval('data[arg[0]]'+arg[1] +' arg[2]', locals())
2099 elif arg[1] in ['ilike']:
2100 val = (str(data[arg[0]]).find(str(arg[2]))!=-1)
2103 if counter > offset:
2110 def unlink(self, cr, uid, ids, context=None):
2112 self._check_access(uid, id, 'unlink')
2113 self.datas.pop(id, None)
2115 cr.execute('delete from wkf_instance where res_type=%s and res_id IN %s', (self._name, tuple(ids)))
2118 def perm_read(self, cr, user, ids, context=None, details=True):
2120 credentials = self.pool.get('res.users').name_get(cr, user, [user])[0]
2121 create_date = time.strftime('%Y-%m-%d %H:%M:%S')
2123 self._check_access(user, id, 'read')
2125 'create_uid': credentials,
2126 'create_date': create_date,
2128 'write_date': False,
2134 def _check_removed_columns(self, cr, log=False):
2135 # nothing to check in memory...
2138 def exists(self, cr, uid, ids, context=None):
2139 if isinstance(ids, (int,long)):
2141 return all(( id in self.datas for id in ids ))
2143 class orm(orm_template):
2144 _sql_constraints = []
2147 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
2148 __logger = logging.getLogger('orm')
2149 __schema = logging.getLogger('orm.schema')
2150 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2152 Get the list of records in list view grouped by the given ``groupby`` fields
2154 :param cr: database cursor
2155 :param uid: current user id
2156 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2157 :param fields: list of fields present in the list view specified on the object
2158 :param groupby: list of fields on which to groupby the records
2159 :type fields_list: list (example ['field_name_1', ...])
2160 :param offset: optional number of records to skip
2161 :param limit: optional max number of records to return
2162 :param context: context arguments, like lang, time zone
2163 :param order: optional ``order by`` specification, for overriding the natural
2164 sort ordering of the groups, see also :py:meth:`~osv.osv.osv.search`
2165 (supported only for many2one fields currently)
2166 :return: list of dictionaries(one dictionary for each record) containing:
2168 * the values of fields grouped by the fields in ``groupby`` argument
2169 * __domain: list of tuples specifying the search criteria
2170 * __context: dictionary with argument like ``groupby``
2171 :rtype: [{'field_name_1': value, ...]
2172 :raise AccessError: * if user has no read rights on the requested object
2173 * if user tries to bypass access rules for read on the requested object
2176 context = context or {}
2177 self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
2179 fields = self._columns.keys()
2181 query = self._where_calc(cr, uid, domain, context=context)
2182 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2184 # Take care of adding join(s) if groupby is an '_inherits'ed field
2185 groupby_list = groupby
2186 qualified_groupby_field = groupby
2188 if isinstance(groupby, list):
2189 groupby = groupby[0]
2190 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2193 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2194 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2195 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2197 fget = self.fields_get(cr, uid, fields, context=context)
2198 float_int_fields = filter(lambda x: fget[x]['type'] in ('float', 'integer'), fields)
2200 group_count = group_by = groupby
2202 if fget.get(groupby):
2203 groupby_type = fget[groupby]['type']
2204 if groupby_type in ('date', 'datetime'):
2205 qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
2206 flist = "%s as %s " % (qualified_groupby_field, groupby)
2207 elif groupby_type == 'boolean':
2208 qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
2209 flist = "%s as %s " % (qualified_groupby_field, groupby)
2211 flist = qualified_groupby_field
2213 # Don't allow arbitrary values, as this would be a SQL injection vector!
2214 raise except_orm(_('Invalid group_by'),
2215 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2218 fields_pre = [f for f in float_int_fields if
2219 f == self.CONCURRENCY_CHECK_FIELD
2220 or (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2221 for f in fields_pre:
2222 if f not in ['id', 'sequence']:
2223 group_operator = fget[f].get('group_operator', 'sum')
2226 qualified_field = '"%s"."%s"' % (self._table, f)
2227 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2229 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2231 from_clause, where_clause, where_clause_params = query.get_sql()
2232 where_clause = where_clause and ' WHERE ' + where_clause
2233 limit_str = limit and ' limit %d' % limit or ''
2234 offset_str = offset and ' offset %d' % offset or ''
2235 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2237 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2240 for r in cr.dictfetchall():
2241 for fld, val in r.items():
2242 if val == None: r[fld] = False
2243 alldata[r['id']] = r
2246 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=orderby or groupby, context=context)
2247 # the IDS of records that have groupby field value = False or '' should be sorted too
2248 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2249 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2250 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2251 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2255 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2256 if not isinstance(groupby_list, (str, unicode)):
2257 if groupby or not context.get('group_by_no_leaf', False):
2258 d['__context'] = {'group_by': groupby_list[1:]}
2259 if groupby and groupby in fget:
2260 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2261 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2262 days = calendar.monthrange(dt.year, dt.month)[1]
2264 d[groupby] = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d').strftime('%B %Y')
2265 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2266 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2267 del alldata[d['id']][groupby]
2268 d.update(alldata[d['id']])
2272 def _inherits_join_add(self, parent_model_name, query):
2274 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2276 :param parent_model_name: name of the parent model for which the clauses should be added
2277 :param query: query object on which the JOIN should be added
2279 inherits_field = self._inherits[parent_model_name]
2280 parent_model = self.pool.get(parent_model_name)
2281 parent_table_name = parent_model._table
2282 quoted_parent_table_name = '"%s"' % parent_table_name
2283 if quoted_parent_table_name not in query.tables:
2284 query.tables.append(quoted_parent_table_name)
2285 query.where_clause.append('("%s".%s = %s.id)' % (self._table, inherits_field, parent_table_name))
2287 def _inherits_join_calc(self, field, query):
2289 Adds missing table select and join clause(s) to ``query`` for reaching
2290 the field coming from an '_inherits' parent table (no duplicates).
2292 :param field: name of inherited field to reach
2293 :param query: query object on which the JOIN should be added
2294 :return: qualified name of field, to be used in SELECT clause
2296 current_table = self
2297 while field in current_table._inherit_fields and not field in current_table._columns:
2298 parent_model_name = current_table._inherit_fields[field][0]
2299 parent_table = self.pool.get(parent_model_name)
2300 current_table._inherits_join_add(parent_model_name, query)
2301 current_table = parent_table
2302 return '"%s".%s' % (current_table._table, field)
2304 def _parent_store_compute(self, cr):
2305 if not self._parent_store:
2307 logger = netsvc.Logger()
2308 logger.notifyChannel('data', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
2309 def browse_rec(root, pos=0):
2311 where = self._parent_name+'='+str(root)
2313 where = self._parent_name+' IS NULL'
2314 if self._parent_order:
2315 where += ' order by '+self._parent_order
2316 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2318 for id in cr.fetchall():
2319 pos2 = browse_rec(id[0], pos2)
2320 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2322 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2323 if self._parent_order:
2324 query += ' order by ' + self._parent_order
2327 for (root,) in cr.fetchall():
2328 pos = browse_rec(root, pos)
2331 def _update_store(self, cr, f, k):
2332 logger = netsvc.Logger()
2333 logger.notifyChannel('data', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
2334 ss = self._columns[k]._symbol_set
2335 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2336 cr.execute('select id from '+self._table)
2337 ids_lst = map(lambda x: x[0], cr.fetchall())
2340 ids_lst = ids_lst[40:]
2341 res = f.get(cr, self, iids, k, 1, {})
2342 for key, val in res.items():
2345 # if val is a many2one, just write the ID
2346 if type(val) == tuple:
2348 if (val<>False) or (type(val)<>bool):
2349 cr.execute(update_query, (ss[1](val), key))
2351 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2352 """Raise except_orm if value is not among the valid values for the selection field"""
2353 if self._columns[field]._type == 'reference':
2354 val_model, val_id_str = value.split(',', 1)
2357 val_id = long(val_id_str)
2361 raise except_orm(_('ValidateError'),
2362 _('Invalid value for reference field "%s" (last part must be a non-zero integer): "%s"') % (field, value))
2366 if isinstance(self._columns[field].selection, (tuple, list)):
2367 if val in dict(self._columns[field].selection):
2369 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2371 raise except_orm(_('ValidateError'),
2372 _('The value "%s" for the field "%s" is not in the selection') % (value, field))
2374 def _check_removed_columns(self, cr, log=False):
2375 # iterate on the database columns to drop the NOT NULL constraints
2376 # of fields which were required but have been removed (or will be added by another module)
2377 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2378 columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
2379 cr.execute("SELECT a.attname, a.attnotnull"
2380 " FROM pg_class c, pg_attribute a"
2381 " WHERE c.relname=%s"
2382 " AND c.oid=a.attrelid"
2383 " AND a.attisdropped=%s"
2384 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2385 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2387 for column in cr.dictfetchall():
2389 self.__logger.debug("column %s is in the table %s but not in the corresponding object %s",
2390 column['attname'], self._table, self._name)
2391 if column['attnotnull']:
2392 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2393 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2394 self._table, column['attname'])
2396 def _auto_init(self, cr, context=None):
2399 store_compute = False
2402 self._field_create(cr, context=context)
2403 if getattr(self, '_auto', True):
2404 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2406 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
2407 cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''")))
2409 self.__schema.debug("Table '%s': created", self._table)
2412 if self._parent_store:
2413 cr.execute("""SELECT c.relname
2414 FROM pg_class c, pg_attribute a
2415 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2416 """, (self._table, 'parent_left'))
2418 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
2419 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
2420 if 'parent_left' not in self._columns:
2421 self.__logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
2423 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2424 self._table, 'parent_left', 'INTEGER')
2425 elif not self._columns['parent_left'].select:
2426 self.__logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
2428 if 'parent_right' not in self._columns:
2429 self.__logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
2431 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2432 self._table, 'parent_right', 'INTEGER')
2433 elif not self._columns['parent_right'].select:
2434 self.__logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
2436 if self._columns[self._parent_name].ondelete != 'cascade':
2437 self.__logger.error("The column %s on object %s must be set as ondelete='cascade'",
2438 self._parent_name, self._name)
2441 store_compute = True
2443 if self._log_access:
2445 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2446 'create_date': 'TIMESTAMP',
2447 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2448 'write_date': 'TIMESTAMP'
2453 FROM pg_class c, pg_attribute a
2454 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2455 """, (self._table, k))
2457 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
2459 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2460 self._table, k, logs[k])
2462 self._check_removed_columns(cr, log=False)
2464 # iterate on the "object columns"
2465 todo_update_store = []
2466 update_custom_fields = context.get('update_custom_fields', False)
2468 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2469 "FROM pg_class c,pg_attribute a,pg_type t " \
2470 "WHERE c.relname=%s " \
2471 "AND c.oid=a.attrelid " \
2472 "AND a.atttypid=t.oid", (self._table,))
2473 col_data = dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
2476 for k in self._columns:
2477 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
2479 #Not Updating Custom fields
2480 if k.startswith('x_') and not update_custom_fields:
2483 f = self._columns[k]
2485 if isinstance(f, fields.one2many):
2486 cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,))
2488 if self.pool.get(f._obj):
2489 if f._fields_id not in self.pool.get(f._obj)._columns.keys():
2490 if not self.pool.get(f._obj)._inherits or (f._fields_id not in self.pool.get(f._obj)._inherit_fields.keys()):
2491 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
2494 cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id))
2495 res = cr.fetchone()[0]
2497 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY (%s) REFERENCES "%s" ON DELETE SET NULL' % (self._obj, f._fields_id, f._table))
2498 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE SET NULL",
2499 self._obj, f._fields_id, f._table)
2500 elif isinstance(f, fields.many2many):
2501 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (f._rel,))
2502 if not cr.dictfetchall():
2503 if not self.pool.get(f._obj):
2504 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2505 ref = self.pool.get(f._obj)._table
2506 # ref = f._obj.replace('.', '_')
2507 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, UNIQUE("%s","%s")) WITH OIDS' % (f._rel, f._id1, self._table, f._id2, ref, f._id1, f._id2))
2508 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
2509 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
2510 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref))
2512 self.__schema.debug("Create table '%s': relation between '%s' and '%s'",
2513 f._rel, self._table, ref)
2515 res = col_data.get(k, [])
2516 res = res and [res] or []
2517 if not res and hasattr(f, 'oldname'):
2518 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2519 "FROM pg_class c,pg_attribute a,pg_type t " \
2520 "WHERE c.relname=%s " \
2521 "AND a.attname=%s " \
2522 "AND c.oid=a.attrelid " \
2523 "AND a.atttypid=t.oid", (self._table, f.oldname))
2524 res_old = cr.dictfetchall()
2525 if res_old and len(res_old) == 1:
2526 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
2528 res[0]['attname'] = k
2529 self.__schema.debug("Table '%s': renamed column '%s' to '%s'",
2530 self._table, f.oldname, k)
2534 f_pg_type = f_pg_def['typname']
2535 f_pg_size = f_pg_def['size']
2536 f_pg_notnull = f_pg_def['attnotnull']
2537 if isinstance(f, fields.function) and not f.store and\
2538 not getattr(f, 'nodrop', False):
2539 self.__logger.info('column %s (%s) in table %s removed: converted to a function !\n',
2540 k, f.string, self._table)
2541 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
2543 self.__schema.debug("Table '%s': dropped column '%s' with cascade",
2547 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
2552 ('text', 'char', 'VARCHAR(%d)' % (f.size or 0,), '::VARCHAR(%d)'%(f.size or 0,)),
2553 ('varchar', 'text', 'TEXT', ''),
2554 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2555 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
2556 ('timestamp', 'date', 'date', '::date'),
2557 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2558 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2560 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
2561 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2562 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
2563 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size))
2564 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2566 self.__schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
2567 self._table, k, f_pg_size, f.size)
2569 if (f_pg_type==c[0]) and (f._type==c[1]):
2570 if f_pg_type != f_obj_type:
2572 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2573 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
2574 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
2575 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2577 self.__schema.debug("Table '%s': column '%s' changed type from %s to %s",
2578 self._table, k, c[0], c[1])
2581 if f_pg_type != f_obj_type:
2585 newname = k + '_moved' + str(i)
2586 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
2587 "WHERE c.relname=%s " \
2588 "AND a.attname=%s " \
2589 "AND c.oid=a.attrelid ", (self._table, newname))
2590 if not cr.fetchone()[0]:
2594 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2595 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
2596 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2597 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2598 self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
2599 self._table, k, f_pg_type, f._type, newname)
2601 # if the field is required and hasn't got a NOT NULL constraint
2602 if f.required and f_pg_notnull == 0:
2603 # set the field to the default value if any
2604 if k in self._defaults:
2605 if callable(self._defaults[k]):
2606 default = self._defaults[k](self, cr, 1, context)
2608 default = self._defaults[k]
2610 if (default is not None):
2611 ss = self._columns[k]._symbol_set
2612 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
2613 cr.execute(query, (ss[1](default),))
2614 # add the NOT NULL constraint
2617 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2619 self.__schema.debug("Table '%s': column '%s': added NOT NULL constraint",
2622 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
2623 "If you want to have it, you should update the records and execute manually:\n"\
2624 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2625 self.__schema.warn(msg, self._table, k, self._table, k)
2627 elif not f.required and f_pg_notnull == 1:
2628 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2630 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2633 indexname = '%s_%s_index' % (self._table, k)
2634 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
2635 res2 = cr.dictfetchall()
2636 if not res2 and f.select:
2637 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2639 if f._type == 'text':
2640 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
2641 msg = "Table '%s': Adding (b-tree) index for text column '%s'."\
2642 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
2643 " because there is a length limit for indexable btree values!\n"\
2644 "Use a search view instead if you simply want to make the field searchable."
2645 self.__schema.warn(msg, self._table, k, f._type)
2646 if res2 and not f.select:
2647 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
2649 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
2650 self.__schema.debug(msg, self._table, k, f._type)
2652 if isinstance(f, fields.many2one):
2653 ref = self.pool.get(f._obj)._table
2654 if ref != 'ir_actions':
2655 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
2656 'pg_attribute as att1, pg_attribute as att2 '
2657 'WHERE con.conrelid = cl1.oid '
2658 'AND cl1.relname = %s '
2659 'AND con.confrelid = cl2.oid '
2660 'AND cl2.relname = %s '
2661 'AND array_lower(con.conkey, 1) = 1 '
2662 'AND con.conkey[1] = att1.attnum '
2663 'AND att1.attrelid = cl1.oid '
2664 'AND att1.attname = %s '
2665 'AND array_lower(con.confkey, 1) = 1 '
2666 'AND con.confkey[1] = att2.attnum '
2667 'AND att2.attrelid = cl2.oid '
2668 'AND att2.attname = %s '
2669 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
2670 res2 = cr.dictfetchall()
2672 if res2[0]['confdeltype'] != POSTGRES_CONFDELTYPES.get(f.ondelete.upper(), 'a'):
2673 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"')
2674 cr.execute('ALTER TABLE "' + self._table + '" ADD FOREIGN KEY ("' + k + '") REFERENCES "' + ref + '" ON DELETE ' + f.ondelete)
2676 self.__schema.debug("Table '%s': column '%s': XXX",
2679 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, "Programming error, column %s->%s has multiple instances !" % (self._table, k))
2681 if not isinstance(f, fields.function) or f.store:
2682 # add the missing field
2683 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2684 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2685 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2686 self._table, k, get_pg_type(f)[1])
2689 if not create and k in self._defaults:
2690 if callable(self._defaults[k]):
2691 default = self._defaults[k](self, cr, 1, context)
2693 default = self._defaults[k]
2695 ss = self._columns[k]._symbol_set
2696 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
2697 cr.execute(query, (ss[1](default),))
2699 netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
2701 if isinstance(f, fields.function):
2703 if f.store is not True:
2704 order = f.store[f.store.keys()[0]][2]
2705 todo_update_store.append((order, f, k))
2707 # and add constraints if needed
2708 if isinstance(f, fields.many2one):
2709 if not self.pool.get(f._obj):
2710 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2711 ref = self.pool.get(f._obj)._table
2712 # ref = f._obj.replace('.', '_')
2713 # ir_actions is inherited so foreign key doesn't work on it
2714 if ref != 'ir_actions':
2715 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete))
2716 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2717 self._table, k, ref, f.ondelete)
2719 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2723 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2724 self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
2727 msg = "WARNING: unable to set column %s of table %s not null !\n"\
2728 "Try to re-run: openerp-server.py --update=module\n"\
2729 "If it doesn't work, update records and execute manually:\n"\
2730 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2731 self.__logger.warn(msg, k, self._table, self._table, k)
2733 for order, f, k in todo_update_store:
2734 todo_end.append((order, self._update_store, (f, k)))
2737 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2738 create = not bool(cr.fetchone())
2740 cr.commit() # start a new transaction
2742 for (key, con, _) in self._sql_constraints:
2743 conname = '%s_%s' % (self._table, key)
2745 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
2746 existing_constraints = cr.dictfetchall()
2751 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
2752 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
2753 self._table, conname, con),
2754 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
2759 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
2760 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
2761 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
2767 if not existing_constraints:
2768 # constraint does not exists:
2769 sql_actions['add']['execute'] = True
2770 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
2771 elif con.lower() not in [item['condef'].lower() for item in existing_constraints]:
2772 # constraint exists but its definition has changed:
2773 sql_actions['drop']['execute'] = True
2774 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
2775 sql_actions['add']['execute'] = True
2776 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
2778 # we need to add the constraint:
2779 sql_actions = [item for item in sql_actions.values()]
2780 sql_actions.sort(key=lambda x: x['order'])
2781 for sql_action in [action for action in sql_actions if action['execute']]:
2783 cr.execute(sql_action['query'])
2785 self.__schema.debug(sql_action['msg_ok'])
2787 self.__schema.warn(sql_action['msg_err'])
2791 if hasattr(self, "_sql"):
2792 for line in self._sql.split(';'):
2793 line2 = line.replace('\n', '').strip()
2798 self._parent_store_compute(cr)
2802 def __init__(self, cr):
2803 super(orm, self).__init__(cr)
2805 if not hasattr(self, '_log_access'):
2806 # if not access is not specify, it is the same value as _auto
2807 self._log_access = getattr(self, "_auto", True)
2809 self._columns = self._columns.copy()
2810 for store_field in self._columns:
2811 f = self._columns[store_field]
2812 if hasattr(f, 'digits_change'):
2814 def not_this_field(stored_func):
2815 x, y, z, e, f, l = stored_func
2816 return x != self._name or y != store_field
2817 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
2818 if not isinstance(f, fields.function):
2824 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
2825 for object, aa in sm.items():
2827 (fnct, fields2, order, length) = aa
2829 (fnct, fields2, order) = aa
2832 raise except_orm('Error',
2833 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
2834 self.pool._store_function.setdefault(object, [])
2835 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
2836 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
2838 for (key, _, msg) in self._sql_constraints:
2839 self.pool._sql_error[self._table+'_'+key] = msg
2841 # Load manual fields
2843 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
2845 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
2846 for field in cr.dictfetchall():
2847 if field['name'] in self._columns:
2850 'string': field['field_description'],
2851 'required': bool(field['required']),
2852 'readonly': bool(field['readonly']),
2853 'domain': eval(field['domain']) if field['domain'] else None,
2854 'size': field['size'],
2855 'ondelete': field['on_delete'],
2856 'translate': (field['translate']),
2858 #'select': int(field['select_level'])
2861 if field['ttype'] == 'selection':
2862 self._columns[field['name']] = getattr(fields, field['ttype'])(eval(field['selection']), **attrs)
2863 elif field['ttype'] == 'reference':
2864 self._columns[field['name']] = getattr(fields, field['ttype'])(selection=eval(field['selection']), **attrs)
2865 elif field['ttype'] == 'many2one':
2866 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], **attrs)
2867 elif field['ttype'] == 'one2many':
2868 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], field['relation_field'], **attrs)
2869 elif field['ttype'] == 'many2many':
2870 _rel1 = field['relation'].replace('.', '_')
2871 _rel2 = field['model'].replace('.', '_')
2872 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
2873 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], _rel_name, 'id1', 'id2', **attrs)
2875 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
2876 self._inherits_check()
2877 self._inherits_reload()
2878 if not self._sequence:
2879 self._sequence = self._table + '_id_seq'
2880 for k in self._defaults:
2881 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
2882 for f in self._columns:
2883 self._columns[f].restart()
2886 # Update objects that uses this one to update their _inherits fields
2889 def _inherits_reload_src(self):
2890 for obj in self.pool.obj_pool.values():
2891 if self._name in obj._inherits:
2892 obj._inherits_reload()
2894 def _inherits_reload(self):
2896 for table in self._inherits:
2897 res.update(self.pool.get(table)._inherit_fields)
2898 for col in self.pool.get(table)._columns.keys():
2899 res[col] = (table, self._inherits[table], self.pool.get(table)._columns[col], table)
2900 for col in self.pool.get(table)._inherit_fields.keys():
2901 res[col] = (table, self._inherits[table], self.pool.get(table)._inherit_fields[col][2], self.pool.get(table)._inherit_fields[col][3])
2902 self._inherit_fields = res
2903 self._all_columns = self._get_column_infos()
2904 self._inherits_reload_src()
2906 def _get_column_infos(self):
2907 """Returns a dict mapping all fields names (direct fields and
2908 inherited field via _inherits) to a ``column_info`` struct
2909 giving detailed columns """
2911 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
2912 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
2913 for k, col in self._columns.iteritems():
2914 result[k] = fields.column_info(k, col)
2917 def _inherits_check(self):
2918 for table, field_name in self._inherits.items():
2919 if field_name not in self._columns:
2920 logging.getLogger('init').info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.' % (field_name, self._name))
2921 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
2922 required=True, ondelete="cascade")
2923 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
2924 logging.getLogger('init').warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.' % (field_name, self._name))
2925 self._columns[field_name].required = True
2926 self._columns[field_name].ondelete = "cascade"
2928 #def __getattr__(self, name):
2930 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
2931 # (though inherits doesn't use Python inheritance).
2932 # Handles translating between local ids and remote ids.
2933 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
2934 # when you have inherits.
2936 # for model, field in self._inherits.iteritems():
2937 # proxy = self.pool.get(model)
2938 # if hasattr(proxy, name):
2939 # attribute = getattr(proxy, name)
2940 # if not hasattr(attribute, '__call__'):
2944 # return super(orm, self).__getattr__(name)
2946 # def _proxy(cr, uid, ids, *args, **kwargs):
2947 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
2948 # lst = [obj[field].id for obj in objects if obj[field]]
2949 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
2954 def fields_get(self, cr, user, fields=None, context=None):
2956 Get the description of list of fields
2958 :param cr: database cursor
2959 :param user: current user id
2960 :param fields: list of fields
2961 :param context: context arguments, like lang, time zone
2962 :return: dictionary of field dictionaries, each one describing a field of the business object
2963 :raise AccessError: * if user has no create/write rights on the requested object
2966 ira = self.pool.get('ir.model.access')
2967 write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
2968 ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
2969 return super(orm, self).fields_get(cr, user, fields, context, write_access)
2971 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
2974 self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
2976 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
2977 if isinstance(ids, (int, long)):
2981 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
2982 result = self._read_flat(cr, user, select, fields, context, load)
2985 for key, v in r.items():
2989 if isinstance(ids, (int, long, dict)):
2990 return result and result[0] or False
2993 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
2998 if fields_to_read == None:
2999 fields_to_read = self._columns.keys()
3001 # Construct a clause for the security rules.
3002 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3003 # or will at least contain self._table.
3004 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3006 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3007 fields_pre = [f for f in fields_to_read if
3008 f == self.CONCURRENCY_CHECK_FIELD
3009 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3010 ] + self._inherits.values()
3014 def convert_field(f):
3015 f_qual = "%s.%s" % (self._table, f) # need fully-qualified references in case len(tables) > 1
3016 if f in ('create_date', 'write_date'):
3017 return "date_trunc('second', %s) as %s" % (f_qual, f)
3018 if f == self.CONCURRENCY_CHECK_FIELD:
3019 if self._log_access:
3020 return "COALESCE(%s.write_date, %s.create_date, now())::timestamp AS %s" % (self._table, self._table, f,)
3021 return "now()::timestamp AS %s" % (f,)
3022 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3023 return 'length(%s) as "%s"' % (f_qual, f)
3026 fields_pre2 = map(convert_field, fields_pre)
3027 order_by = self._parent_order or self._order
3028 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
3029 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3031 query += " AND " + (' OR '.join(rule_clause))
3032 query += " ORDER BY " + order_by
3033 for sub_ids in cr.split_for_in_conditions(ids):
3035 cr.execute(query, [tuple(sub_ids)] + rule_params)
3036 if cr.rowcount != len(sub_ids):
3037 raise except_orm(_('AccessError'),
3038 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: read, Document type: %s).')
3039 % (self._description,))
3041 cr.execute(query, (tuple(sub_ids),))
3042 res.extend(cr.dictfetchall())
3044 res = map(lambda x: {'id': x}, ids)
3046 for f in fields_pre:
3047 if f == self.CONCURRENCY_CHECK_FIELD:
3049 if self._columns[f].translate:
3050 ids = [x['id'] for x in res]
3051 #TODO: optimize out of this loop
3052 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3054 r[f] = res_trans.get(r['id'], False) or r[f]
3056 for table in self._inherits:
3057 col = self._inherits[table]
3058 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3061 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3069 if not record[col]: # if the record is deleted from _inherits table?
3071 record.update(res3[record[col]])
3072 if col not in fields_to_read:
3075 # all fields which need to be post-processed by a simple function (symbol_get)
3076 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3079 for f in fields_post:
3080 r[f] = self._columns[f]._symbol_get(r[f])
3081 ids = [x['id'] for x in res]
3083 # all non inherited fields for which the attribute whose name is in load is False
3084 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3086 # Compute POST fields
3088 for f in fields_post:
3089 todo.setdefault(self._columns[f]._multi, [])
3090 todo[self._columns[f]._multi].append(f)
3091 for key, val in todo.items():
3093 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3096 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3097 multi_fields = res2.get(record['id'],{})
3099 record[pos] = multi_fields.get(pos,[])
3102 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3105 record[f] = res2[record['id']]
3110 for field in vals.copy():
3112 if field in self._columns:
3113 fobj = self._columns[field]
3120 for group in groups:
3121 module = group.split(".")[0]
3122 grp = group.split(".")[1]
3123 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3124 (grp, module, 'res.groups', user))
3125 readonly = cr.fetchall()
3126 if readonly[0][0] >= 1:
3129 elif readonly[0][0] == 0:
3135 if type(vals[field]) == type([]):
3137 elif type(vals[field]) == type(0.0):
3139 elif type(vals[field]) == type(''):
3140 vals[field] = '=No Permission='
3145 def perm_read(self, cr, user, ids, context=None, details=True):
3147 Returns some metadata about the given records.
3149 :param details: if True, \*_uid fields are replaced with the name of the user
3150 :return: list of ownership dictionaries for each requested record
3151 :rtype: list of dictionaries with the following keys:
3154 * create_uid: user who created the record
3155 * create_date: date when the record was created
3156 * write_uid: last user who changed the record
3157 * write_date: date of the last change to the record
3158 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3165 uniq = isinstance(ids, (int, long))
3169 if self._log_access:
3170 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3171 quoted_table = '"%s"' % self._table
3172 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3173 query = '''SELECT %s, __imd.module, __imd.name
3174 FROM %s LEFT JOIN ir_model_data __imd
3175 ON (__imd.model = %%s and __imd.res_id = %s.id)
3176 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3177 cr.execute(query, (self._name, tuple(ids)))
3178 res = cr.dictfetchall()
3181 r[key] = r[key] or False
3182 if details and key in ('write_uid', 'create_uid') and r[key]:
3184 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3186 pass # Leave the numeric uid there
3187 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3188 del r['name'], r['module']
3193 def _check_concurrency(self, cr, ids, context):
3196 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3198 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
3199 for sub_ids in cr.split_for_in_conditions(ids):
3202 id_ref = "%s,%s" % (self._name, id)
3203 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3205 ids_to_check.extend([id, update_date])
3206 if not ids_to_check:
3208 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3211 # mention the first one only to keep the error message readable
3212 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3214 def check_access_rule(self, cr, uid, ids, operation, context=None):
3215 """Verifies that the operation given by ``operation`` is allowed for the user
3216 according to ir.rules.
3218 :param operation: one of ``write``, ``unlink``
3219 :raise except_orm: * if current ir.rules do not permit this operation.
3220 :return: None if the operation is allowed
3222 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3224 where_clause = ' and ' + ' and '.join(where_clause)
3225 for sub_ids in cr.split_for_in_conditions(ids):
3226 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3227 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3228 [sub_ids] + where_params)
3229 if cr.rowcount != len(sub_ids):
3230 raise except_orm(_('AccessError'),
3231 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: %s, Document type: %s).')
3232 % (operation, self._description))
3234 def unlink(self, cr, uid, ids, context=None):
3236 Delete records with given ids
3238 :param cr: database cursor
3239 :param uid: current user id
3240 :param ids: id or list of ids
3241 :param context: (optional) context arguments, like lang, time zone
3243 :raise AccessError: * if user has no unlink rights on the requested object
3244 * if user tries to bypass access rules for unlink on the requested object
3245 :raise UserError: if the record is default property for other records
3250 if isinstance(ids, (int, long)):
3253 result_store = self._store_get_values(cr, uid, ids, None, context)
3255 self._check_concurrency(cr, ids, context)
3257 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
3259 properties = self.pool.get('ir.property')
3260 domain = [('res_id', '=', False),
3261 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3263 if properties.search(cr, uid, domain, context=context):
3264 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3266 wf_service = netsvc.LocalService("workflow")
3268 wf_service.trg_delete(uid, self._name, oid, cr)
3271 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3272 pool_model_data = self.pool.get('ir.model.data')
3273 pool_ir_values = self.pool.get('ir.values')
3274 for sub_ids in cr.split_for_in_conditions(ids):
3275 cr.execute('delete from ' + self._table + ' ' \
3276 'where id IN %s', (sub_ids,))
3278 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3279 # as these are not connected with real database foreign keys, and would be dangling references.
3280 # Step 1. Calling unlink of ir_model_data only for the affected IDS.
3281 referenced_ids = pool_model_data.search(cr, uid, [('res_id','in',list(sub_ids)),('model','=',self._name)], context=context)
3282 # Step 2. Marching towards the real deletion of referenced records
3283 pool_model_data.unlink(cr, uid, referenced_ids, context=context)
3285 # For the same reason, removing the record relevant to ir_values
3286 ir_value_ids = pool_ir_values.search(cr, uid,
3287 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3290 pool_ir_values.unlink(cr, uid, ir_value_ids, context=context)
3292 for order, object, store_ids, fields in result_store:
3293 if object != self._name:
3294 obj = self.pool.get(object)
3295 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3296 rids = map(lambda x: x[0], cr.fetchall())
3298 obj._store_set_values(cr, uid, rids, fields, context)
3305 def write(self, cr, user, ids, vals, context=None):
3307 Update records with given ids with the given field values
3309 :param cr: database cursor
3310 :param user: current user id
3312 :param ids: object id or list of object ids to update according to **vals**
3313 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3314 :type vals: dictionary
3315 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3316 :type context: dictionary
3318 :raise AccessError: * if user has no write rights on the requested object
3319 * if user tries to bypass access rules for write on the requested object
3320 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3321 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3323 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3325 + For a many2many field, a list of tuples is expected.
3326 Here is the list of tuple that are accepted, with the corresponding semantics ::
3328 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3329 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3330 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3331 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3332 (4, ID) link to existing record with id = ID (adds a relationship)
3333 (5) unlink all (like using (3,ID) for all linked records)
3334 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3337 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3339 + For a one2many field, a lits of tuples is expected.
3340 Here is the list of tuple that are accepted, with the corresponding semantics ::
3342 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3343 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3344 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3347 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
3349 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
3350 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
3354 for field in vals.copy():
3356 if field in self._columns:
3357 fobj = self._columns[field]
3358 elif field in self._inherit_fields:
3359 fobj = self._inherit_fields[field][2]
3366 for group in groups:
3367 module = group.split(".")[0]
3368 grp = group.split(".")[1]
3369 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3370 (grp, module, 'res.groups', user))
3371 readonly = cr.fetchall()
3372 if readonly[0][0] >= 1:
3375 elif readonly[0][0] == 0:
3387 if isinstance(ids, (int, long)):
3390 self._check_concurrency(cr, ids, context)
3391 self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
3393 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
3395 # No direct update of parent_left/right
3396 vals.pop('parent_left', None)
3397 vals.pop('parent_right', None)
3399 parents_changed = []
3400 if self._parent_store and (self._parent_name in vals):
3401 # The parent_left/right computation may take up to
3402 # 5 seconds. No need to recompute the values if the
3403 # parent is the same. Get the current value of the parent
3404 parent_val = vals[self._parent_name]
3406 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL)" % \
3407 (self._table, self._parent_name, self._parent_name)
3408 cr.execute(query, (tuple(ids), parent_val))
3410 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL)" % \
3411 (self._table, self._parent_name)
3412 cr.execute(query, (tuple(ids),))
3413 parents_changed = map(operator.itemgetter(0), cr.fetchall())
3420 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
3422 if field in self._columns:
3423 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
3424 if (not totranslate) or not self._columns[field].translate:
3425 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
3426 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
3427 direct.append(field)
3429 upd_todo.append(field)
3431 updend.append(field)
3432 if field in self._columns \
3433 and hasattr(self._columns[field], 'selection') \
3435 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3437 if self._log_access:
3438 upd0.append('write_uid=%s')
3439 upd0.append('write_date=now()')
3443 self.check_access_rule(cr, user, ids, 'write', context=context)
3444 for sub_ids in cr.split_for_in_conditions(ids):
3445 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
3446 'where id IN %s', upd1 + [sub_ids])
3447 if cr.rowcount != len(sub_ids):
3448 raise except_orm(_('AccessError'),
3449 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
3454 if self._columns[f].translate:
3455 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
3458 # Inserting value to DB
3459 self.write(cr, user, ids, {f: vals[f]})
3460 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
3463 # call the 'set' method of fields which are not classic_write
3464 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3466 # default element in context must be removed when call a one2many or many2many
3467 rel_context = context.copy()
3468 for c in context.items():
3469 if c[0].startswith('default_'):
3470 del rel_context[c[0]]
3472 for field in upd_todo:
3474 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
3476 for table in self._inherits:
3477 col = self._inherits[table]
3479 for sub_ids in cr.split_for_in_conditions(ids):
3480 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
3481 'where id IN %s', (sub_ids,))
3482 nids.extend([x[0] for x in cr.fetchall()])
3486 if self._inherit_fields[val][0] == table:
3489 self.pool.get(table).write(cr, user, nids, v, context)
3491 self._validate(cr, user, ids, context)
3493 # TODO: use _order to set dest at the right position and not first node of parent
3494 # We can't defer parent_store computation because the stored function
3495 # fields that are computer may refer (directly or indirectly) to
3496 # parent_left/right (via a child_of domain)
3499 self.pool._init_parent[self._name] = True
3501 order = self._parent_order or self._order
3502 parent_val = vals[self._parent_name]
3504 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
3506 clause, params = '%s IS NULL' % (self._parent_name,), ()
3508 for id in parents_changed:
3509 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
3510 pleft, pright = cr.fetchone()
3511 distance = pright - pleft + 1
3513 # Positions of current siblings, to locate proper insertion point;
3514 # this can _not_ be fetched outside the loop, as it needs to be refreshed
3515 # after each update, in case several nodes are sequentially inserted one
3516 # next to the other (i.e computed incrementally)
3517 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, order), params)
3518 parents = cr.fetchall()
3520 # Find Position of the element
3522 for (parent_pright, parent_id) in parents:
3525 position = parent_pright + 1
3527 # It's the first node of the parent
3532 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
3533 position = cr.fetchone()[0] + 1
3535 if pleft < position <= pright:
3536 raise except_orm(_('UserError'), _('Recursivity Detected.'))
3538 if pleft < position:
3539 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3540 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3541 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
3543 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3544 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3545 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
3547 result += self._store_get_values(cr, user, ids, vals.keys(), context)
3551 for order, object, ids_to_update, fields_to_recompute in result:
3552 key = (object, tuple(fields_to_recompute))
3553 done.setdefault(key, {})
3554 # avoid to do several times the same computation
3556 for id in ids_to_update:
3557 if id not in done[key]:
3558 done[key][id] = True
3560 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
3562 wf_service = netsvc.LocalService("workflow")
3564 wf_service.trg_write(user, self._name, id, cr)
3568 # TODO: Should set perm to user.xxx
3570 def create(self, cr, user, vals, context=None):
3572 Create new record with specified value
3574 :param cr: database cursor
3575 :param user: current user id
3577 :param vals: field values for new record, e.g {'field_name': field_value, ...}
3578 :type vals: dictionary
3579 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3580 :type context: dictionary
3581 :return: id of new record created
3582 :raise AccessError: * if user has no create rights on the requested object
3583 * if user tries to bypass access rules for create on the requested object
3584 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3585 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3587 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
3588 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
3594 self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
3596 vals = self._add_missing_default_values(cr, user, vals, context)
3599 for v in self._inherits:
3600 if self._inherits[v] not in vals:
3603 tocreate[v] = {'id': vals[self._inherits[v]]}
3604 (upd0, upd1, upd2) = ('', '', [])
3606 for v in vals.keys():
3607 if v in self._inherit_fields:
3608 (table, col, col_detail, original_parent) = self._inherit_fields[v]
3609 tocreate[table][v] = vals[v]
3612 if (v not in self._inherit_fields) and (v not in self._columns):
3615 # Try-except added to filter the creation of those records whose filds are readonly.
3616 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
3618 cr.execute("SELECT nextval('"+self._sequence+"')")
3620 raise except_orm(_('UserError'),
3621 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
3623 id_new = cr.fetchone()[0]
3624 for table in tocreate:
3625 if self._inherits[table] in vals:
3626 del vals[self._inherits[table]]
3628 record_id = tocreate[table].pop('id', None)
3630 # When linking/creating parent records, force context without 'no_store_function' key that
3631 # defers stored functions computing, as these won't be computed in batch at the end of create().
3632 parent_context = dict(context)
3633 parent_context.pop('no_store_function', None)
3635 if record_id is None or not record_id:
3636 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=parent_context)
3638 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=parent_context)
3640 upd0 += ',' + self._inherits[table]
3642 upd2.append(record_id)
3644 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
3645 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
3647 for bool_field in bool_fields:
3648 if bool_field not in vals:
3649 vals[bool_field] = False
3651 for field in vals.copy():
3653 if field in self._columns:
3654 fobj = self._columns[field]
3656 fobj = self._inherit_fields[field][2]
3662 for group in groups:
3663 module = group.split(".")[0]
3664 grp = group.split(".")[1]
3665 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
3666 (grp, module, 'res.groups', user))
3667 readonly = cr.fetchall()
3668 if readonly[0][0] >= 1:
3671 elif readonly[0][0] == 0:
3679 if self._columns[field]._classic_write:
3680 upd0 = upd0 + ',"' + field + '"'
3681 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
3682 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
3684 if not isinstance(self._columns[field], fields.related):
3685 upd_todo.append(field)
3686 if field in self._columns \
3687 and hasattr(self._columns[field], 'selection') \
3689 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3690 if self._log_access:
3691 upd0 += ',create_uid,create_date'
3694 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
3695 self.check_access_rule(cr, user, [id_new], 'create', context=context)
3696 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3698 if self._parent_store and not context.get('defer_parent_store_computation'):
3700 self.pool._init_parent[self._name] = True
3702 parent = vals.get(self._parent_name, False)
3704 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
3706 result_p = cr.fetchall()
3707 for (pleft,) in result_p:
3712 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
3713 pleft_old = cr.fetchone()[0]
3716 cr.execute('select max(parent_right) from '+self._table)
3717 pleft = cr.fetchone()[0] or 0
3718 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
3719 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
3720 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
3722 # default element in context must be remove when call a one2many or many2many
3723 rel_context = context.copy()
3724 for c in context.items():
3725 if c[0].startswith('default_'):
3726 del rel_context[c[0]]
3729 for field in upd_todo:
3730 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
3731 self._validate(cr, user, [id_new], context)
3733 if not context.get('no_store_function', False):
3734 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
3737 for order, object, ids, fields2 in result:
3738 if not (object, ids, fields2) in done:
3739 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
3740 done.append((object, ids, fields2))
3742 if self._log_create and not (context and context.get('no_store_function', False)):
3743 message = self._description + \
3745 self.name_get(cr, user, [id_new], context=context)[0][1] + \
3746 "' " + _("created.")
3747 self.log(cr, user, id_new, message, True, context=context)
3748 wf_service = netsvc.LocalService("workflow")
3749 wf_service.trg_create(user, self._name, id_new, cr)
3752 def _store_get_values(self, cr, uid, ids, fields, context):
3753 """Returns an ordered list of fields.functions to call due to
3754 an update operation on ``fields`` of records with ``ids``,
3755 obtained by calling the 'store' functions of these fields,
3756 as setup by their 'store' attribute.
3758 :return: [(priority, model_name, [record_ids,], [function_fields,])]
3760 # FIXME: rewrite, cleanup, use real variable names
3761 # e.g.: http://pastie.org/1222060
3763 fncts = self.pool._store_function.get(self._name, [])
3764 for fnct in range(len(fncts)):
3769 for f in (fields or []):
3770 if f in fncts[fnct][3]:
3776 result.setdefault(fncts[fnct][0], {})
3778 # uid == 1 for accessing objects having rules defined on store fields
3779 ids2 = fncts[fnct][2](self, cr, 1, ids, context)
3780 for id in filter(None, ids2):
3781 result[fncts[fnct][0]].setdefault(id, [])
3782 result[fncts[fnct][0]][id].append(fnct)
3784 for object in result:
3786 for id, fnct in result[object].items():
3787 k2.setdefault(tuple(fnct), [])
3788 k2[tuple(fnct)].append(id)
3789 for fnct, id in k2.items():
3790 dict.setdefault(fncts[fnct[0]][4], [])
3791 dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4], object, id, map(lambda x: fncts[x][1], fnct)))
3799 def _store_set_values(self, cr, uid, ids, fields, context):
3800 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
3801 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
3806 if self._log_access:
3807 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
3811 field_dict.setdefault(r[0], [])
3812 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
3813 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
3814 for i in self.pool._store_function.get(self._name, []):
3816 up_write_date = write_date + datetime.timedelta(hours=i[5])
3817 if datetime.datetime.now() < up_write_date:
3819 field_dict[r[0]].append(i[1])
3825 if self._columns[f]._multi not in keys:
3826 keys.append(self._columns[f]._multi)
3827 todo.setdefault(self._columns[f]._multi, [])
3828 todo[self._columns[f]._multi].append(f)
3832 # uid == 1 for accessing objects having rules defined on store fields
3833 result = self._columns[val[0]].get(cr, self, ids, val, 1, context=context)
3834 for id, value in result.items():
3836 for f in value.keys():
3837 if f in field_dict[id]:
3844 if self._columns[v]._type in ('many2one', 'one2one'):
3846 value[v] = value[v][0]
3849 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
3850 upd1.append(self._columns[v]._symbol_set[1](value[v]))
3853 cr.execute('update "' + self._table + '" set ' + \
3854 ','.join(upd0) + ' where id = %s', upd1)
3858 # uid == 1 for accessing objects having rules defined on store fields
3859 result = self._columns[f].get(cr, self, ids, f, 1, context=context)
3860 for r in result.keys():
3862 if r in field_dict.keys():
3863 if f in field_dict[r]:
3865 for id, value in result.items():
3866 if self._columns[f]._type in ('many2one', 'one2one'):
3871 cr.execute('update "' + self._table + '" set ' + \
3872 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
3878 def perm_write(self, cr, user, ids, fields, context=None):
3879 raise NotImplementedError(_('This method does not exist anymore'))
3881 # TODO: ameliorer avec NULL
3882 def _where_calc(self, cr, user, domain, active_test=True, context=None):
3883 """Computes the WHERE clause needed to implement an OpenERP domain.
3884 :param domain: the domain to compute
3886 :param active_test: whether the default filtering of records with ``active``
3887 field set to ``False`` should be applied.
3888 :return: the query expressing the given domain as provided in domain
3889 :rtype: osv.query.Query
3894 # if the object has a field named 'active', filter out all inactive
3895 # records unless they were explicitely asked for
3896 if 'active' in (self._columns.keys() + self._inherit_fields.keys()) and (active_test and context.get('active_test', True)):
3898 active_in_args = False
3900 if a[0] == 'active':
3901 active_in_args = True
3902 if not active_in_args:
3903 domain.insert(0, ('active', '=', 1))
3905 domain = [('active', '=', 1)]
3909 e = expression.expression(domain)
3910 e.parse(cr, user, self, context)
3911 tables = e.get_tables()
3912 where_clause, where_params = e.to_sql()
3913 where_clause = where_clause and [where_clause] or []
3915 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
3917 return Query(tables, where_clause, where_params)
3919 def _check_qorder(self, word):
3920 if not regex_order.match(word):
3921 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
3924 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
3925 """Add what's missing in ``query`` to implement all appropriate ir.rules
3926 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
3928 :param query: the current query object
3930 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
3932 if parent_model and child_object:
3933 # as inherited rules are being applied, we need to add the missing JOIN
3934 # to reach the parent table (if it was not JOINed yet in the query)
3935 child_object._inherits_join_add(parent_model, query)
3936 query.where_clause += added_clause
3937 query.where_clause_params += added_params
3938 for table in added_tables:
3939 if table not in query.tables:
3940 query.tables.append(table)
3944 # apply main rules on the object
3945 rule_obj = self.pool.get('ir.rule')
3946 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
3948 # apply ir.rules from the parents (through _inherits)
3949 for inherited_model in self._inherits:
3950 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
3951 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
3953 def _generate_m2o_order_by(self, order_field, query):
3955 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
3956 either native m2o fields or function/related fields that are stored, including
3957 intermediate JOINs for inheritance if required.
3959 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
3961 if order_field not in self._columns and order_field in self._inherit_fields:
3962 # also add missing joins for reaching the table containing the m2o field
3963 qualified_field = self._inherits_join_calc(order_field, query)
3964 order_field_column = self._inherit_fields[order_field][2]
3966 qualified_field = '"%s"."%s"' % (self._table, order_field)
3967 order_field_column = self._columns[order_field]
3969 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
3970 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
3971 logging.getLogger('orm.search').debug("Many2one function/related fields must be stored " \
3972 "to be used as ordering fields! Ignoring sorting for %s.%s",
3973 self._name, order_field)
3976 # figure out the applicable order_by for the m2o
3977 dest_model = self.pool.get(order_field_column._obj)
3978 m2o_order = dest_model._order
3979 if not regex_order.match(m2o_order):
3980 # _order is complex, can't use it here, so we default to _rec_name
3981 m2o_order = dest_model._rec_name
3983 # extract the field names, to be able to qualify them and add desc/asc
3985 for order_part in m2o_order.split(","):
3986 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
3987 m2o_order = m2o_order_list
3989 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
3990 # as we don't want to exclude results that have NULL values for the m2o
3991 src_table, src_field = qualified_field.replace('"','').split('.', 1)
3992 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
3993 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
3994 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
3997 def _generate_order_by(self, order_spec, query):
3999 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4000 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4002 :raise" except_orm in case order_spec is malformed
4004 order_by_clause = self._order
4006 order_by_elements = []
4007 self._check_qorder(order_spec)
4008 for order_part in order_spec.split(','):
4009 order_split = order_part.strip().split(' ')
4010 order_field = order_split[0].strip()
4011 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4013 if order_field == 'id':
4014 inner_clause = '"%s"."%s"' % (self._table, order_field)
4015 elif order_field in self._columns:
4016 order_column = self._columns[order_field]
4017 if order_column._classic_read:
4018 inner_clause = '"%s"."%s"' % (self._table, order_field)
4019 elif order_column._type == 'many2one':
4020 inner_clause = self._generate_m2o_order_by(order_field, query)
4022 continue # ignore non-readable or "non-joinable" fields
4023 elif order_field in self._inherit_fields:
4024 parent_obj = self.pool.get(self._inherit_fields[order_field][3])
4025 order_column = parent_obj._columns[order_field]
4026 if order_column._classic_read:
4027 inner_clause = self._inherits_join_calc(order_field, query)
4028 elif order_column._type == 'many2one':
4029 inner_clause = self._generate_m2o_order_by(order_field, query)
4031 continue # ignore non-readable or "non-joinable" fields
4033 if isinstance(inner_clause, list):
4034 for clause in inner_clause:
4035 order_by_elements.append("%s %s" % (clause, order_direction))
4037 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4038 if order_by_elements:
4039 order_by_clause = ",".join(order_by_elements)
4041 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4043 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4045 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4046 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4047 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4048 This is ok at the security level because this method is private and not callable through XML-RPC.
4050 :param access_rights_uid: optional user ID to use when checking access rights
4051 (not for ir.rules, this is only for ir.model.access)
4055 self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
4057 query = self._where_calc(cr, user, args, context=context)
4058 self._apply_ir_rules(cr, user, query, 'read', context=context)
4059 order_by = self._generate_order_by(order, query)
4060 from_clause, where_clause, where_clause_params = query.get_sql()
4062 limit_str = limit and ' limit %d' % limit or ''
4063 offset_str = offset and ' offset %d' % offset or ''
4064 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4067 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4070 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4072 return [x[0] for x in res]
4074 # returns the different values ever entered for one field
4075 # this is used, for example, in the client when the user hits enter on
4077 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4080 if field in self._inherit_fields:
4081 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4083 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4085 def copy_data(self, cr, uid, id, default=None, context=None):
4087 Copy given record's data with all its fields values
4089 :param cr: database cursor
4090 :param user: current user id
4091 :param id: id of the record to copy
4092 :param default: field values to override in the original values of the copied record
4093 :type default: dictionary
4094 :param context: context arguments, like lang, time zone
4095 :type context: dictionary
4096 :return: dictionary containing all the field values
4102 # avoid recursion through already copied records in case of circular relationship
4103 seen_map = context.setdefault('__copy_data_seen',{})
4104 if id in seen_map.setdefault(self._name,[]):
4106 seen_map[self._name].append(id)
4110 if 'state' not in default:
4111 if 'state' in self._defaults:
4112 if callable(self._defaults['state']):
4113 default['state'] = self._defaults['state'](self, cr, uid, context)
4115 default['state'] = self._defaults['state']
4117 context_wo_lang = context.copy()
4118 if 'lang' in context:
4119 del context_wo_lang['lang']
4120 data = self.read(cr, uid, [id,], context=context_wo_lang)
4124 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4126 fields = self.fields_get(cr, uid, context=context)
4128 ftype = fields[f]['type']
4130 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
4134 data[f] = default[f]
4135 elif 'function' in fields[f]:
4137 elif ftype == 'many2one':
4139 data[f] = data[f] and data[f][0]
4142 elif ftype in ('one2many', 'one2one'):
4144 rel = self.pool.get(fields[f]['relation'])
4146 # duplicate following the order of the ids
4147 # because we'll rely on it later for copying
4148 # translations in copy_translation()!
4150 for rel_id in data[f]:
4151 # the lines are first duplicated using the wrong (old)
4152 # parent but then are reassigned to the correct one thanks
4153 # to the (0, 0, ...)
4154 d = rel.copy_data(cr, uid, rel_id, context=context)
4156 res.append((0, 0, d))
4158 elif ftype == 'many2many':
4159 data[f] = [(6, 0, data[f])]
4163 # make sure we don't break the current parent_store structure and
4164 # force a clean recompute!
4165 for parent_column in ['parent_left', 'parent_right']:
4166 data.pop(parent_column, None)
4168 # remove _inherits field's from data recursively, missing parents will
4169 # be created by create() (so that copy() copy everything).
4170 def remove_ids(inherits_dict):
4171 for parent_table in inherits_dict:
4172 del data[inherits_dict[parent_table]]
4173 remove_ids(self.pool.get(parent_table)._inherits)
4174 remove_ids(self._inherits)
4177 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4181 # avoid recursion through already copied records in case of circular relationship
4182 seen_map = context.setdefault('__copy_translations_seen',{})
4183 if old_id in seen_map.setdefault(self._name,[]):
4185 seen_map[self._name].append(old_id)
4187 trans_obj = self.pool.get('ir.translation')
4188 fields = self.fields_get(cr, uid, context=context)
4190 translation_records = []
4191 for field_name, field_def in fields.items():
4192 # we must recursively copy the translations for o2o and o2m
4193 if field_def['type'] in ('one2one', 'one2many'):
4194 target_obj = self.pool.get(field_def['relation'])
4195 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4196 # here we rely on the order of the ids to match the translations
4197 # as foreseen in copy_data()
4198 old_children = sorted(old_record[field_name])
4199 new_children = sorted(new_record[field_name])
4200 for (old_child, new_child) in zip(old_children, new_children):
4201 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4202 # and for translatable fields we keep them for copy
4203 elif field_def.get('translate'):
4205 if field_name in self._columns:
4206 trans_name = self._name + "," + field_name
4207 elif field_name in self._inherit_fields:
4208 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4210 trans_ids = trans_obj.search(cr, uid, [
4211 ('name', '=', trans_name),
4212 ('res_id', '=', old_id)
4214 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4216 for record in translation_records:
4218 record['res_id'] = new_id
4219 trans_obj.create(cr, uid, record, context=context)
4222 def copy(self, cr, uid, id, default=None, context=None):
4224 Duplicate record with given id updating it with default values
4226 :param cr: database cursor
4227 :param uid: current user id
4228 :param id: id of the record to copy
4229 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4230 :type default: dictionary
4231 :param context: context arguments, like lang, time zone
4232 :type context: dictionary
4238 context = context.copy()
4239 data = self.copy_data(cr, uid, id, default, context)
4240 new_id = self.create(cr, uid, data, context)
4241 self.copy_translations(cr, uid, id, new_id, context)
4244 def exists(self, cr, uid, ids, context=None):
4245 if type(ids) in (int, long):
4247 query = 'SELECT count(1) FROM "%s"' % (self._table)
4248 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4249 return cr.fetchone()[0] == len(ids)
4251 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4252 warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4253 self._name, DeprecationWarning, stacklevel=3)
4254 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4255 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4256 return self._check_recursion(cr, uid, ids, context, parent)
4258 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4260 Verifies that there is no loop in a hierarchical structure of records,
4261 by following the parent relationship using the **parent** field until a loop
4262 is detected or until a top-level record is found.
4264 :param cr: database cursor
4265 :param uid: current user id
4266 :param ids: list of ids of records to check
4267 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4268 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4272 parent = self._parent_name
4274 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
4277 for i in range(0, len(ids), cr.IN_MAX):
4278 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
4279 cr.execute(query, (tuple(sub_ids_parent),))
4280 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
4281 ids_parent = ids_parent2
4282 for i in ids_parent:
4287 def _get_xml_ids(self, cr, uid, ids, *args, **kwargs):
4288 """Find out the XML ID(s) of any database record.
4290 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
4292 :return: map of ids to the list of their fully qualified XML IDs
4293 (empty list when there's none).
4295 model_data_obj = self.pool.get('ir.model.data')
4296 data_ids = model_data_obj.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
4297 data_results = model_data_obj.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
4300 # can't use dict.fromkeys() as the list would be shared!
4302 for record in data_results:
4303 result[record['res_id']].append('%(module)s.%(name)s' % record)
4306 def get_xml_id(self, cr, uid, ids, *args, **kwargs):
4307 """Find out the XML ID of any database record, if there
4308 is one. This method works as a possible implementation
4309 for a function field, to be able to add it to any
4310 model object easily, referencing it as ``osv.osv.get_xml_id``.
4312 When multiple XML IDs exist for a record, only one
4313 of them is returned (randomly).
4315 **Synopsis**: ``get_xml_id(cr, uid, ids) -> { 'id': 'module.xml_id' }``
4317 :return: map of ids to their fully qualified XML ID,
4318 defaulting to an empty string when there's none
4319 (to be usable as a function field).
4321 results = self._get_xml_ids(cr, uid, ids)
4322 for k, v in results.items():
4329 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: