1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
23 # Object relationnal mapping to postgresql module
24 # . Hierarchical structure
25 # . Constraints consistency, validations
26 # . Object meta Data depends on its status
27 # . Optimised processing by complex query (multiple actions at once)
28 # . Default fields value
29 # . Permissions optimisation
30 # . Persistant object: DB postgresql
32 # . Multi-level caching system
33 # . 2 different inheritancies
35 # - classicals (varchar, integer, boolean, ...)
36 # - relations (one2many, many2one, many2many)
53 from lxml import etree
54 from tools.config import config
55 from tools.translate import _
58 from query import Query
60 from tools.safe_eval import safe_eval as eval
62 # List of etree._Element subclasses that we choose to ignore when parsing XML.
63 from tools import SKIPPED_ELEMENT_TYPES
65 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
67 POSTGRES_CONFDELTYPES = {
75 def last_day_of_current_month():
76 today = datetime.date.today()
77 last_day = str(calendar.monthrange(today.year, today.month)[1])
78 return time.strftime('%Y-%m-' + last_day)
80 def intersect(la, lb):
81 return filter(lambda x: x in lb, la)
83 class except_orm(Exception):
84 def __init__(self, name, value):
87 self.args = (name, value)
89 class BrowseRecordError(Exception):
92 # Readonly python database object browser
93 class browse_null(object):
98 def __getitem__(self, name):
101 def __getattr__(self, name):
102 return None # XXX: return self ?
110 def __nonzero__(self):
113 def __unicode__(self):
118 # TODO: execute an object method on browse_record_list
120 class browse_record_list(list):
122 def __init__(self, lst, context=None):
125 super(browse_record_list, self).__init__(lst)
126 self.context = context
129 class browse_record(object):
130 logger = netsvc.Logger()
132 def __init__(self, cr, uid, id, table, cache, context=None, list_class=None, fields_process=None):
134 table : the object (inherited from orm)
135 context : dictionary with an optional context
137 if fields_process is None:
141 self._list_class = list_class or browse_record_list
146 self._table_name = self._table._name
147 self.__logger = logging.getLogger(
148 'osv.browse_record.' + self._table_name)
149 self._context = context
150 self._fields_process = fields_process
152 cache.setdefault(table._name, {})
153 self._data = cache[table._name]
155 if not (id and isinstance(id, (int, long,))):
156 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
157 # if not table.exists(cr, uid, id, context):
158 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
160 if id not in self._data:
161 self._data[id] = {'id': id}
165 def __getitem__(self, name):
169 if name not in self._data[self._id]:
170 # build the list of fields we will fetch
172 # fetch the definition of the field which was asked for
173 if name in self._table._columns:
174 col = self._table._columns[name]
175 elif name in self._table._inherit_fields:
176 col = self._table._inherit_fields[name][2]
177 elif hasattr(self._table, str(name)):
178 attr = getattr(self._table, name)
180 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
181 return lambda *args, **argv: attr(self._cr, self._uid, [self._id], *args, **argv)
185 self.logger.notifyChannel("browse_record", netsvc.LOG_WARNING,
186 "Field '%s' does not exist in object '%s': \n%s" % (
187 name, self, ''.join(traceback.format_exc())))
188 raise KeyError("Field '%s' does not exist in object '%s'" % (
191 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
193 # gen the list of "local" (ie not inherited) fields which are classic or many2one
194 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
195 # gen the list of inherited fields
196 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
197 # complete the field list with the inherited fields which are classic or many2one
198 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
199 # otherwise we fetch only that field
201 fields_to_fetch = [(name, col)]
202 ids = filter(lambda id: name not in self._data[id], self._data.keys())
204 field_names = map(lambda x: x[0], fields_to_fetch)
205 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
207 # TODO: improve this, very slow for reports
208 if self._fields_process:
209 lang = self._context.get('lang', 'en_US') or 'en_US'
210 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
212 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
213 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
215 for field_name, field_column in fields_to_fetch:
216 if field_column._type in self._fields_process:
217 for result_line in field_values:
218 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
219 if result_line[field_name]:
220 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
223 # Where did those ids come from? Perhaps old entries in ir_model_dat?
224 self.__logger.warn("No field_values found for ids %s in %s", ids, self)
225 raise KeyError('Field %s not found in %s'%(name, self))
226 # create browse records for 'remote' objects
227 for result_line in field_values:
229 for field_name, field_column in fields_to_fetch:
230 if field_column._type in ('many2one', 'one2one'):
231 if result_line[field_name]:
232 obj = self._table.pool.get(field_column._obj)
233 if isinstance(result_line[field_name], (list, tuple)):
234 value = result_line[field_name][0]
236 value = result_line[field_name]
238 # FIXME: this happen when a _inherits object
239 # overwrite a field of it parent. Need
240 # testing to be sure we got the right
241 # object and not the parent one.
242 if not isinstance(value, browse_record):
244 # In some cases the target model is not available yet, so we must ignore it,
245 # which is safe in most cases, this value will just be loaded later when needed.
246 # This situation can be caused by custom fields that connect objects with m2o without
247 # respecting module dependencies, causing relationships to be connected to soon when
248 # the target is not loaded yet.
250 new_data[field_name] = browse_record(self._cr,
251 self._uid, value, obj, self._cache,
252 context=self._context,
253 list_class=self._list_class,
254 fields_process=self._fields_process)
256 new_data[field_name] = value
258 new_data[field_name] = browse_null()
260 new_data[field_name] = browse_null()
261 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
262 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
263 elif field_column._type in ('reference'):
264 if result_line[field_name]:
265 if isinstance(result_line[field_name], browse_record):
266 new_data[field_name] = result_line[field_name]
268 ref_obj, ref_id = result_line[field_name].split(',')
269 ref_id = long(ref_id)
271 obj = self._table.pool.get(ref_obj)
272 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
274 new_data[field_name] = browse_null()
276 new_data[field_name] = browse_null()
278 new_data[field_name] = result_line[field_name]
279 self._data[result_line['id']].update(new_data)
281 if not name in self._data[self._id]:
282 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
283 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
284 "Fields to fetch: %s, Field values: %s"%(field_names, field_values))
285 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
286 "Cached: %s, Table: %s"%(self._data[self._id], self._table))
287 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
288 return self._data[self._id][name]
290 def __getattr__(self, name):
294 raise AttributeError(e)
296 def __contains__(self, name):
297 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
299 def __hasattr__(self, name):
306 return "browse_record(%s, %d)" % (self._table_name, self._id)
308 def __eq__(self, other):
309 if not isinstance(other, browse_record):
311 return (self._table_name, self._id) == (other._table_name, other._id)
313 def __ne__(self, other):
314 if not isinstance(other, browse_record):
316 return (self._table_name, self._id) != (other._table_name, other._id)
318 # we need to define __unicode__ even though we've already defined __str__
319 # because we have overridden __getattr__
320 def __unicode__(self):
321 return unicode(str(self))
324 return hash((self._table_name, self._id))
332 (type returned by postgres when the column was created, type expression to create the column)
336 fields.boolean: 'bool',
337 fields.integer: 'int4',
338 fields.integer_big: 'int8',
342 fields.datetime: 'timestamp',
343 fields.binary: 'bytea',
344 fields.many2one: 'int4',
346 if type(f) in type_dict:
347 f_type = (type_dict[type(f)], type_dict[type(f)])
348 elif isinstance(f, fields.float):
350 f_type = ('numeric', 'NUMERIC')
352 f_type = ('float8', 'DOUBLE PRECISION')
353 elif isinstance(f, (fields.char, fields.reference)):
354 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
355 elif isinstance(f, fields.selection):
356 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
357 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
358 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
361 f_size = getattr(f, 'size', None) or 16
364 f_type = ('int4', 'INTEGER')
366 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
367 elif isinstance(f, fields.function) and eval('fields.'+(f._type), globals()) in type_dict:
368 t = eval('fields.'+(f._type), globals())
369 f_type = (type_dict[t], type_dict[t])
370 elif isinstance(f, fields.function) and f._type == 'float':
372 f_type = ('numeric', 'NUMERIC')
374 f_type = ('float8', 'DOUBLE PRECISION')
375 elif isinstance(f, fields.function) and f._type == 'selection':
376 f_type = ('text', 'text')
377 elif isinstance(f, fields.function) and f._type == 'char':
378 f_type = ('varchar', 'VARCHAR(%d)' % (f.size))
380 logger = netsvc.Logger()
381 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
386 class orm_template(object):
392 _parent_name = 'parent_id'
393 _parent_store = False
394 _parent_order = False
404 CONCURRENCY_CHECK_FIELD = '__last_update'
405 def log(self, cr, uid, id, message, secondary=False, context=None):
406 return self.pool.get('res.log').create(cr, uid,
409 'res_model': self._name,
410 'secondary': secondary,
416 def view_init(self, cr, uid, fields_list, context=None):
417 """Override this method to do specific things when a view on the object is opened."""
420 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
421 raise NotImplementedError(_('The read_group method is not implemented on this object !'))
423 def _field_create(self, cr, context=None):
426 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
428 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
429 model_id = cr.fetchone()[0]
430 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
432 model_id = cr.fetchone()[0]
433 if 'module' in context:
434 name_id = 'model_'+self._name.replace('.', '_')
435 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
437 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
438 (name_id, context['module'], 'ir.model', model_id)
443 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
445 for rec in cr.dictfetchall():
446 cols[rec['name']] = rec
448 for (k, f) in self._columns.items():
450 'model_id': model_id,
453 'field_description': f.string.replace("'", " "),
455 'relation': f._obj or '',
456 'view_load': (f.view_load and 1) or 0,
457 'select_level': tools.ustr(f.select or 0),
458 'readonly': (f.readonly and 1) or 0,
459 'required': (f.required and 1) or 0,
460 'selectable': (f.selectable and 1) or 0,
461 'translate': (f.translate and 1) or 0,
462 'relation_field': (f._type=='one2many' and isinstance(f, fields.one2many)) and f._fields_id or '',
465 # When its a custom field,it does not contain f.select
466 if context.get('field_state', 'base') == 'manual':
467 if context.get('field_name', '') == k:
468 vals['select_level'] = context.get('select', '0')
469 #setting value to let the problem NOT occur next time
471 vals['select_level'] = cols[k]['select_level']
474 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
475 id = cr.fetchone()[0]
477 cr.execute("""INSERT INTO ir_model_fields (
478 id, model_id, model, name, field_description, ttype,
479 relation,view_load,state,select_level,relation_field, translate
481 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
483 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
484 vals['relation'], bool(vals['view_load']), 'base',
485 vals['select_level'], vals['relation_field'], bool(vals['translate'])
487 if 'module' in context:
488 name1 = 'field_' + self._table + '_' + k
489 cr.execute("select name from ir_model_data where name=%s", (name1,))
491 name1 = name1 + "_" + str(id)
492 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
493 (name1, context['module'], 'ir.model.fields', id)
496 for key, val in vals.items():
497 if cols[k][key] != vals[key]:
498 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
500 cr.execute("""UPDATE ir_model_fields SET
501 model_id=%s, field_description=%s, ttype=%s, relation=%s,
502 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s
504 model=%s AND name=%s""", (
505 vals['model_id'], vals['field_description'], vals['ttype'],
506 vals['relation'], bool(vals['view_load']),
507 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['model'], vals['name']
512 def _auto_init(self, cr, context=None):
513 self._field_create(cr, context=context)
515 def __init__(self, cr):
516 if not self._name and not hasattr(self, '_inherit'):
517 name = type(self).__name__.split('.')[0]
518 msg = "The class %s has to have a _name attribute" % name
520 logger = netsvc.Logger()
521 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg)
522 raise except_orm('ValueError', msg)
524 if not self._description:
525 self._description = self._name
527 self._table = self._name.replace('.', '_')
529 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
530 """Fetch records as objects allowing to use dot notation to browse fields and relations
532 :param cr: database cursor
533 :param user: current user id
534 :param select: id or list of ids
535 :param context: context arguments, like lang, time zone
536 :rtype: object or list of objects requested
539 self._list_class = list_class or browse_record_list
541 # need to accepts ints and longs because ids coming from a method
542 # launched by button in the interface have a type long...
543 if isinstance(select, (int, long)):
544 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
545 elif isinstance(select, list):
546 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
550 def __export_row(self, cr, uid, row, fields, context=None):
554 def check_type(field_type):
555 if field_type == 'float':
557 elif field_type == 'integer':
559 elif field_type == 'boolean':
563 def selection_field(in_field):
564 col_obj = self.pool.get(in_field.keys()[0])
565 if f[i] in col_obj._columns.keys():
566 return col_obj._columns[f[i]]
567 elif f[i] in col_obj._inherits.keys():
568 selection_field(col_obj._inherits)
573 data = map(lambda x: '', range(len(fields)))
575 for fpos in range(len(fields)):
584 model_data = self.pool.get('ir.model.data')
585 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
587 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
589 r = '%s.%s' % (d['module'], d['name'])
596 # To display external name of selection field when its exported
598 if f[i] in self._columns.keys():
599 cols = self._columns[f[i]]
600 elif f[i] in self._inherit_fields.keys():
601 cols = selection_field(self._inherits)
602 if cols and cols._type == 'selection':
603 sel_list = cols.selection
604 if r and type(sel_list) == type([]):
605 r = [x[1] for x in sel_list if r==x[0]]
606 r = r and r[0] or False
608 if f[i] in self._columns:
609 r = check_type(self._columns[f[i]]._type)
610 elif f[i] in self._inherit_fields:
611 r = check_type(self._inherit_fields[f[i]][2]._type)
612 data[fpos] = r or False
614 if isinstance(r, (browse_record_list, list)):
616 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
619 if [x for x in fields2 if x]:
623 lines2 = self.__export_row(cr, uid, row2, fields2,
626 for fpos2 in range(len(fields)):
627 if lines2 and lines2[0][fpos2]:
628 data[fpos2] = lines2[0][fpos2]
632 name_relation = self.pool.get(rr._table_name)._rec_name
633 if isinstance(rr[name_relation], browse_record):
634 rr = rr[name_relation]
635 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
636 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
637 dt += tools.ustr(rr_name or '') + ','
647 if isinstance(r, browse_record):
648 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
649 r = r and r[0] and r[0][1] or ''
650 data[fpos] = tools.ustr(r or '')
651 return [data] + lines
653 def export_data(self, cr, uid, ids, fields_to_export, context=None):
655 Export fields for selected objects
657 :param cr: database cursor
658 :param uid: current user id
659 :param ids: list of ids
660 :param fields_to_export: list of fields
661 :param context: context arguments, like lang, time zone
662 :rtype: dictionary with a *datas* matrix
664 This method is used when exporting data via client menu
669 cols = self._columns.copy()
670 for f in self._inherit_fields:
671 cols.update({f: self._inherit_fields[f][2]})
673 if x=='.id': return [x]
674 return x.replace(':id','/id').replace('.id','/.id').split('/')
675 fields_to_export = map(fsplit, fields_to_export)
676 fields_export = fields_to_export + []
680 for row in self.browse(cr, uid, ids, context):
681 datas += self.__export_row(cr, uid, row, fields_to_export, context)
682 return {'datas': datas}
684 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
686 Import given data in given module
688 :param cr: database cursor
689 :param uid: current user id
690 :param fields: list of fields
691 :param data: data to import
692 :param mode: 'init' or 'update' for record creation
693 :param current_module: module name
694 :param noupdate: flag for record creation
695 :param context: context arguments, like lang, time zone,
696 :param filename: optional file to store partial import state for recovery
699 This method is used when importing data via client menu.
701 Example of fields to import for a sale.order::
704 partner_id, (=name_search)
705 order_line/.id, (=database_id)
707 order_line/product_id/id, (=xml id)
708 order_line/price_unit,
709 order_line/product_uom_qty,
710 order_line/product_uom/id (=xml_id)
714 def _replace_field(x):
715 x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
716 return x.replace(':id','/id').split('/')
717 fields = map(_replace_field, fields)
718 logger = netsvc.Logger()
719 ir_model_data_obj = self.pool.get('ir.model.data')
721 # mode: id (XML id) or .id (database id) or False for name_get
722 def _get_id(model_name, id, current_module=False, mode='id'):
725 obj_model = self.pool.get(model_name)
726 dom = [('id', '=', id)]
727 if obj_model._columns.get('active'):
728 dom.append(('active', 'in', ['True','False']))
729 ids = obj_model.search(cr, uid, dom, context=context)
731 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, id))
734 module, xml_id = id.rsplit('.', 1)
736 module, xml_id = current_module, id
737 record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
738 ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id'], context=context)
739 if not ir_model_data:
740 raise ValueError('No references to %s.%s' % (module, xml_id))
741 id = ir_model_data[0]['res_id']
743 obj_model = self.pool.get(model_name)
744 ids = obj_model.name_search(cr, uid, id, operator='=', context=context)
746 raise ValueError('No record found for %s' % (id,))
751 # datas: a list of records, each record is defined by a list of values
752 # prefix: a list of prefix fields ['line_ids']
753 # position: the line to process, skip is False if it's the first line of the current record
755 # (res, position, warning, res_id) with
756 # res: the record for the next line to process (including it's one2many)
757 # position: the new position for the next line
758 # res_id: the ID of the record if it's a modification
759 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0, skip=0):
760 line = datas[position]
768 for i in range(len(fields)):
771 raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
774 if field[:len(prefix)] <> prefix:
779 # ID of the record using a XML ID
780 if field[len(prefix)]=='id':
782 data_res_id = _get_id(model_name, line[i], current_module, 'id')
783 except ValueError, e:
788 # ID of the record using a database ID
789 elif field[len(prefix)]=='.id':
790 data_res_id = _get_id(model_name, line[i], current_module, '.id')
793 # recursive call for getting children and returning [(0,0,{})] or [(1,ID,{})]
794 if fields_def[field[len(prefix)]]['type']=='one2many':
795 if field[len(prefix)] in done:
797 done[field[len(prefix)]] = True
798 relation_obj = self.pool.get(fields_def[field[len(prefix)]]['relation'])
799 newfd = relation_obj.fields_get(cr, uid, context=context)
803 while pos < len(datas):
804 res2 = process_liness(self, datas, prefix + [field[len(prefix)]], current_module, relation_obj._name, newfd, pos, first)
807 (newrow, pos, w2, data_res_id2, xml_id2) = res2
808 nbrmax = max(nbrmax, pos)
811 if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
813 res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
815 elif fields_def[field[len(prefix)]]['type']=='many2one':
816 relation = fields_def[field[len(prefix)]]['relation']
817 if len(field) == len(prefix)+1:
820 mode = field[len(prefix)+1]
821 res = line[i] and _get_id(relation, line[i], current_module, mode) or False
823 elif fields_def[field[len(prefix)]]['type']=='many2many':
824 relation = fields_def[field[len(prefix)]]['relation']
825 if len(field) == len(prefix)+1:
828 mode = field[len(prefix)+1]
830 # TODO: improve this by using csv.csv_reader
833 for db_id in line[i].split(config.get('csv_internal_sep')):
834 res.append( _get_id(relation, db_id, current_module, mode) )
837 elif fields_def[field[len(prefix)]]['type'] == 'integer':
838 res = line[i] and int(line[i]) or 0
839 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
840 res = line[i].lower() not in ('0', 'false', 'off')
841 elif fields_def[field[len(prefix)]]['type'] == 'float':
842 res = line[i] and float(line[i]) or 0.0
843 elif fields_def[field[len(prefix)]]['type'] == 'selection':
844 for key, val in fields_def[field[len(prefix)]]['selection']:
845 if line[i] in [tools.ustr(key), tools.ustr(val)]:
848 if line[i] and not res:
849 logger.notifyChannel("import", netsvc.LOG_WARNING,
850 _("key '%s' not found in selection field '%s'") % \
851 (line[i], field[len(prefix)]))
852 warning += [_("Key/value '%s' not found in selection field '%s'") % (line[i], field[len(prefix)])]
856 row[field[len(prefix)]] = res or False
858 result = (row, nbrmax, warning, data_res_id, xml_id)
861 fields_def = self.fields_get(cr, uid, context=context)
863 if config.get('import_partial', False) and filename:
864 data = pickle.load(file(config.get('import_partial')))
865 original_value = data.get(filename, 0)
868 while position<len(datas):
871 (res, position, warning, res_id, xml_id) = \
872 process_liness(self, datas, [], current_module, self._name, fields_def, position=position)
875 return (-1, res, 'Line ' + str(position) +' : ' + '!\n'.join(warning), '')
878 id = ir_model_data_obj._update(cr, uid, self._name,
879 current_module, res, mode=mode, xml_id=xml_id,
880 noupdate=noupdate, res_id=res_id, context=context)
882 return (-1, res, 'Line ' + str(position) +' : ' + str(e), '')
884 if config.get('import_partial', False) and filename and (not (position%100)):
885 data = pickle.load(file(config.get('import_partial')))
886 data[filename] = position
887 pickle.dump(data, file(config.get('import_partial'), 'wb'))
888 if context.get('defer_parent_store_computation'):
889 self._parent_store_compute(cr)
892 if context.get('defer_parent_store_computation'):
893 self._parent_store_compute(cr)
894 return (position, 0, 0, 0)
896 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
898 Read records with given ids with the given fields
900 :param cr: database cursor
901 :param user: current user id
902 :param ids: id or list of the ids of the records to read
903 :param fields: optional list of field names to return (default: all fields would be returned)
904 :type fields: list (example ['field_name_1', ...])
905 :param context: optional context dictionary - it may contains keys for specifying certain options
906 like ``context_lang``, ``context_tz`` to alter the results of the call.
907 A special ``bin_size`` boolean flag may also be passed in the context to request the
908 value of all fields.binary columns to be returned as the size of the binary instead of its
909 contents. This can also be selectively overriden by passing a field-specific flag
910 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
911 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
912 :return: list of dictionaries((dictionary per record asked)) with requested field values
913 :rtype: [{‘name_of_the_field’: value, ...}, ...]
914 :raise AccessError: * if user has no read rights on the requested object
915 * if user tries to bypass access rules for read on the requested object
918 raise NotImplementedError(_('The read method is not implemented on this object !'))
920 def get_invalid_fields(self, cr, uid):
921 return list(self._invalids)
923 def _validate(self, cr, uid, ids, context=None):
924 context = context or {}
925 lng = context.get('lang', False) or 'en_US'
926 trans = self.pool.get('ir.translation')
928 for constraint in self._constraints:
929 fun, msg, fields = constraint
930 if not fun(self, cr, uid, ids):
931 # Check presence of __call__ directly instead of using
932 # callable() because it will be deprecated as of Python 3.0
933 if hasattr(msg, '__call__'):
934 tmp_msg = msg(self, cr, uid, ids, context=context)
935 if isinstance(tmp_msg, tuple):
936 tmp_msg, params = tmp_msg
937 translated_msg = tmp_msg % params
939 translated_msg = tmp_msg
941 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
943 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
945 self._invalids.update(fields)
948 raise except_orm('ValidateError', '\n'.join(error_msgs))
950 self._invalids.clear()
952 def default_get(self, cr, uid, fields_list, context=None):
954 Returns default values for the fields in fields_list.
956 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
957 :type fields_list: list
958 :param context: optional context dictionary - it may contains keys for specifying certain options
959 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
960 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
961 or override a default value for a field.
962 A special ``bin_size`` boolean flag may also be passed in the context to request the
963 value of all fields.binary columns to be returned as the size of the binary instead of its
964 contents. This can also be selectively overriden by passing a field-specific flag
965 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
966 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
967 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
969 # trigger view init hook
970 self.view_init(cr, uid, fields_list, context)
976 # get the default values for the inherited fields
977 for t in self._inherits.keys():
978 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
981 # get the default values defined in the object
982 for f in fields_list:
983 if f in self._defaults:
984 if callable(self._defaults[f]):
985 defaults[f] = self._defaults[f](self, cr, uid, context)
987 defaults[f] = self._defaults[f]
989 fld_def = ((f in self._columns) and self._columns[f]) \
990 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
993 if isinstance(fld_def, fields.property):
994 property_obj = self.pool.get('ir.property')
995 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
997 if isinstance(prop_value, (browse_record, browse_null)):
998 defaults[f] = prop_value.id
1000 defaults[f] = prop_value
1002 if f not in defaults:
1005 # get the default values set by the user and override the default
1006 # values defined in the object
1007 ir_values_obj = self.pool.get('ir.values')
1008 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1009 for id, field, field_value in res:
1010 if field in fields_list:
1011 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1012 if fld_def._type in ('many2one', 'one2one'):
1013 obj = self.pool.get(fld_def._obj)
1014 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1016 if fld_def._type in ('many2many'):
1017 obj = self.pool.get(fld_def._obj)
1019 for i in range(len(field_value)):
1020 if not obj.search(cr, uid, [('id', '=',
1023 field_value2.append(field_value[i])
1024 field_value = field_value2
1025 if fld_def._type in ('one2many'):
1026 obj = self.pool.get(fld_def._obj)
1028 for i in range(len(field_value)):
1029 field_value2.append({})
1030 for field2 in field_value[i]:
1031 if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'):
1032 obj2 = self.pool.get(obj._columns[field2]._obj)
1033 if not obj2.search(cr, uid,
1034 [('id', '=', field_value[i][field2])]):
1036 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type in ('many2one', 'one2one'):
1037 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1038 if not obj2.search(cr, uid,
1039 [('id', '=', field_value[i][field2])]):
1041 # TODO add test for many2many and one2many
1042 field_value2[i][field2] = field_value[i][field2]
1043 field_value = field_value2
1044 defaults[field] = field_value
1046 # get the default values from the context
1047 for key in context or {}:
1048 if key.startswith('default_') and (key[8:] in fields_list):
1049 defaults[key[8:]] = context[key]
1053 def perm_read(self, cr, user, ids, context=None, details=True):
1054 raise NotImplementedError(_('The perm_read method is not implemented on this object !'))
1056 def unlink(self, cr, uid, ids, context=None):
1057 raise NotImplementedError(_('The unlink method is not implemented on this object !'))
1059 def write(self, cr, user, ids, vals, context=None):
1060 raise NotImplementedError(_('The write method is not implemented on this object !'))
1062 def create(self, cr, user, vals, context=None):
1063 raise NotImplementedError(_('The create method is not implemented on this object !'))
1065 def fields_get_keys(self, cr, user, context=None):
1066 res = self._columns.keys()
1067 for parent in self._inherits:
1068 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1071 # returns the definition of each field in the object
1072 # the optional fields parameter can limit the result to some fields
1073 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
1077 translation_obj = self.pool.get('ir.translation')
1078 for parent in self._inherits:
1079 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
1081 if self._columns.keys():
1082 for f in self._columns.keys():
1083 field_col = self._columns[f]
1084 if allfields and f not in allfields:
1086 res[f] = {'type': field_col._type}
1087 # This additional attributes for M2M and function field is added
1088 # because we need to display tooltip with this additional information
1089 # when client is started in debug mode.
1090 if isinstance(field_col, fields.function):
1091 res[f]['function'] = field_col._fnct and field_col._fnct.func_name or False
1092 res[f]['store'] = field_col.store
1093 if isinstance(field_col.store, dict):
1094 res[f]['store'] = str(field_col.store)
1095 res[f]['fnct_search'] = field_col._fnct_search and field_col._fnct_search.func_name or False
1096 res[f]['fnct_inv'] = field_col._fnct_inv and field_col._fnct_inv.func_name or False
1097 res[f]['fnct_inv_arg'] = field_col._fnct_inv_arg or False
1098 res[f]['func_obj'] = field_col._obj or False
1099 res[f]['func_method'] = field_col._method
1100 if isinstance(field_col, fields.many2many):
1101 res[f]['related_columns'] = list((field_col._id1, field_col._id2))
1102 res[f]['third_table'] = field_col._rel
1103 for arg in ('string', 'readonly', 'states', 'size', 'required', 'group_operator',
1104 'change_default', 'translate', 'help', 'select', 'selectable'):
1105 if getattr(field_col, arg):
1106 res[f][arg] = getattr(field_col, arg)
1107 if not write_access:
1108 res[f]['readonly'] = True
1109 res[f]['states'] = {}
1110 for arg in ('digits', 'invisible', 'filters'):
1111 if getattr(field_col, arg, None):
1112 res[f][arg] = getattr(field_col, arg)
1114 if field_col.string:
1115 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
1117 res[f]['string'] = res_trans
1119 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
1121 res[f]['help'] = help_trans
1123 if hasattr(field_col, 'selection'):
1124 if isinstance(field_col.selection, (tuple, list)):
1125 sel = field_col.selection
1126 # translate each selection option
1128 for (key, val) in sel:
1131 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
1132 sel2.append((key, val2 or val))
1134 res[f]['selection'] = sel
1136 # call the 'dynamic selection' function
1137 res[f]['selection'] = field_col.selection(self, cr, user, context)
1138 if res[f]['type'] in ('one2many', 'many2many', 'many2one', 'one2one'):
1139 res[f]['relation'] = field_col._obj
1140 res[f]['domain'] = field_col._domain
1141 res[f]['context'] = field_col._context
1143 #TODO : read the fields from the database
1147 # filter out fields which aren't in the fields list
1148 for r in res.keys():
1149 if r not in allfields:
1154 # Overload this method if you need a window title which depends on the context
1156 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1159 def __view_look_dom(self, cr, user, node, view_id, context=None):
1167 if isinstance(s, unicode):
1168 return s.encode('utf8')
1171 # return True if node can be displayed to current user
1172 def check_group(node):
1173 if node.get('groups'):
1174 groups = node.get('groups').split(',')
1175 access_pool = self.pool.get('ir.model.access')
1176 can_see = any(access_pool.check_groups(cr, user, group) for group in groups)
1178 node.set('invisible', '1')
1179 if 'attrs' in node.attrib:
1180 del(node.attrib['attrs']) #avoid making field visible later
1181 del(node.attrib['groups'])
1186 if node.tag in ('field', 'node', 'arrow'):
1187 if node.get('object'):
1192 if f.tag in ('field'):
1193 xml += etree.tostring(f, encoding="utf-8")
1195 new_xml = etree.fromstring(encode(xml))
1196 ctx = context.copy()
1197 ctx['base_model_name'] = self._name
1198 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1203 attrs = {'views': views}
1205 if node.get('name'):
1208 if node.get('name') in self._columns:
1209 column = self._columns[node.get('name')]
1211 column = self._inherit_fields[node.get('name')][2]
1216 relation = self.pool.get(column._obj)
1221 if f.tag in ('form', 'tree', 'graph'):
1223 ctx = context.copy()
1224 ctx['base_model_name'] = self._name
1225 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1226 views[str(f.tag)] = {
1230 attrs = {'views': views}
1231 if node.get('widget') and node.get('widget') == 'selection':
1232 # Prepare the cached selection list for the client. This needs to be
1233 # done even when the field is invisible to the current user, because
1234 # other events could need to change its value to any of the selectable ones
1235 # (such as on_change events, refreshes, etc.)
1237 # If domain and context are strings, we keep them for client-side, otherwise
1238 # we evaluate them server-side to consider them when generating the list of
1240 # TODO: find a way to remove this hack, by allow dynamic domains
1242 if column._domain and not isinstance(column._domain, basestring):
1243 dom = column._domain
1244 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1245 search_context = dict(context)
1246 if column._context and not isinstance(column._context, basestring):
1247 search_context.update(column._context)
1248 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1249 if (node.get('required') and not int(node.get('required'))) or not column.required:
1250 attrs['selection'].append((False, ''))
1251 fields[node.get('name')] = attrs
1253 elif node.tag in ('form', 'tree'):
1254 result = self.view_header_get(cr, user, False, node.tag, context)
1256 node.set('string', result)
1258 elif node.tag == 'calendar':
1259 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1260 if node.get(additional_field):
1261 fields[node.get(additional_field)] = {}
1263 if 'groups' in node.attrib:
1267 if ('lang' in context) and not result:
1268 if node.get('string'):
1269 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1270 if trans == node.get('string') and ('base_model_name' in context):
1271 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1272 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1273 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1275 node.set('string', trans)
1276 if node.get('confirm'):
1277 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm'))
1279 node.set('confirm', trans)
1281 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
1283 node.set('sum', trans)
1286 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1287 fields.update(self.__view_look_dom(cr, user, f, view_id, context))
1291 def _disable_workflow_buttons(self, cr, user, node):
1293 # admin user can always activate workflow buttons
1296 # TODO handle the case of more than one workflow for a model or multiple
1297 # transitions with different groups and same signal
1298 usersobj = self.pool.get('res.users')
1299 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1300 for button in buttons:
1301 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1302 cr.execute("""SELECT DISTINCT t.group_id
1304 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1305 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1308 AND t.group_id is NOT NULL
1309 """, (self._name, button.get('name')))
1310 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1311 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1312 button.set('readonly', str(int(not can_click)))
1315 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1316 fields_def = self.__view_look_dom(cr, user, node, view_id, context=context)
1317 node = self._disable_workflow_buttons(cr, user, node)
1318 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1320 if node.tag == 'diagram':
1321 if node.getchildren()[0].tag == 'node':
1322 node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, fields_def.keys(), context)
1323 if node.getchildren()[1].tag == 'arrow':
1324 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, fields_def.keys(), context)
1325 for key, value in node_fields.items():
1327 for key, value in arrow_fields.items():
1330 fields = self.fields_get(cr, user, fields_def.keys(), context)
1331 for field in fields_def:
1333 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1334 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1335 elif field in fields:
1336 fields[field].update(fields_def[field])
1338 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1339 res = cr.fetchall()[:]
1341 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1342 msg = "\n * ".join([r[0] for r in res])
1343 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1344 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1345 raise except_orm('View error', msg)
1348 def __get_default_calendar_view(self):
1349 """Generate a default calendar view (For internal use only).
1352 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
1353 '<calendar string="%s"') % (self._description)
1355 if (self._date_name not in self._columns):
1357 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
1358 if dt in self._columns:
1359 self._date_name = dt
1364 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
1367 arch += ' date_start="%s"' % (self._date_name)
1369 for color in ["user_id", "partner_id", "x_user_id", "x_partner_id"]:
1370 if color in self._columns:
1371 arch += ' color="' + color + '"'
1374 dt_stop_flag = False
1376 for dt_stop in ["date_stop", "date_end", "x_date_stop", "x_date_end"]:
1377 if dt_stop in self._columns:
1378 arch += ' date_stop="' + dt_stop + '"'
1382 if not dt_stop_flag:
1383 for dt_delay in ["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"]:
1384 if dt_delay in self._columns:
1385 arch += ' date_delay="' + dt_delay + '"'
1389 ' <field name="%s"/>\n'
1390 '</calendar>') % (self._rec_name)
1394 def __get_default_search_view(self, cr, uid, context=None):
1395 form_view = self.fields_view_get(cr, uid, False, 'form', context=context)
1396 tree_view = self.fields_view_get(cr, uid, False, 'tree', context=context)
1398 fields_to_search = set()
1399 fields = self.fields_get(cr, uid, context=context)
1400 for field in fields:
1401 if fields[field].get('select'):
1402 fields_to_search.add(field)
1403 for view in (form_view, tree_view):
1404 view_root = etree.fromstring(view['arch'])
1405 # Only care about select=1 in xpath below, because select=2 is covered
1406 # by the custom advanced search in clients
1407 fields_to_search = fields_to_search.union(view_root.xpath("//field[@select=1]/@name"))
1409 tree_view_root = view_root # as provided by loop above
1410 search_view = etree.Element("search", attrib={'string': tree_view_root.get("string", "")})
1411 field_group = etree.Element("group")
1412 search_view.append(field_group)
1414 for field_name in fields_to_search:
1415 field_group.append(etree.Element("field", attrib={'name': field_name}))
1417 return etree.tostring(search_view, encoding="utf-8").replace('\t', '')
1420 # if view_id, view_type is not required
1422 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1424 Get the detailed composition of the requested view like fields, model, view architecture
1426 :param cr: database cursor
1427 :param user: current user id
1428 :param view_id: id of the view or None
1429 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
1430 :param context: context arguments, like lang, time zone
1431 :param toolbar: true to include contextual actions
1432 :param submenu: example (portal_project module)
1433 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
1434 :raise AttributeError:
1435 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
1436 * if some tag other than 'position' is found in parent view
1437 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
1444 if isinstance(s, unicode):
1445 return s.encode('utf8')
1448 def raise_view_error(error_msg, child_view_id):
1449 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
1450 raise AttributeError(("View definition error for inherited view '%(xml_id)s' on '%(model)s' model: " + error_msg)
1451 % { 'xml_id': child_view.xml_id,
1452 'parent_xml_id': view.xml_id,
1453 'model': self._name, })
1455 def _inherit_apply(src, inherit, inherit_id=None):
1456 def _find(node, node2):
1457 if node2.tag == 'xpath':
1458 res = node.xpath(node2.get('expr'))
1464 for n in node.getiterator(node2.tag):
1466 if node2.tag == 'field':
1467 # only compare field names, a field can be only once in a given view
1468 # at a given level (and for multilevel expressions, we should use xpath
1469 # inheritance spec anyway)
1470 if node2.get('name') == n.get('name'):
1474 for attr in node2.attrib:
1475 if attr == 'position':
1478 if n.get(attr) == node2.get(attr):
1485 # End: _find(node, node2)
1487 doc_dest = etree.fromstring(encode(inherit))
1488 toparse = [doc_dest]
1491 node2 = toparse.pop(0)
1492 if isinstance(node2, SKIPPED_ELEMENT_TYPES):
1494 if node2.tag == 'data':
1495 toparse += [ c for c in doc_dest ]
1497 node = _find(src, node2)
1498 if node is not None:
1500 if node2.get('position'):
1501 pos = node2.get('position')
1502 if pos == 'replace':
1503 parent = node.getparent()
1505 src = copy.deepcopy(node2[0])
1508 node.addprevious(child)
1509 node.getparent().remove(node)
1510 elif pos == 'attributes':
1511 for child in node2.getiterator('attribute'):
1512 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
1514 node.set(attribute[0], attribute[1])
1516 del(node.attrib[attribute[0]])
1518 sib = node.getnext()
1522 elif pos == 'after':
1527 sib.addprevious(child)
1528 elif pos == 'before':
1529 node.addprevious(child)
1531 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
1534 ' %s="%s"' % (attr, node2.get(attr))
1535 for attr in node2.attrib
1536 if attr != 'position'
1538 tag = "<%s%s>" % (node2.tag, attrs)
1539 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
1541 # End: _inherit_apply(src, inherit)
1543 result = {'type': view_type, 'model': self._name}
1548 parent_view_model = None
1550 view_ref = context.get(view_type + '_view_ref', False)
1551 if view_ref and not view_id:
1553 module, view_ref = view_ref.split('.', 1)
1554 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
1555 view_ref_res = cr.fetchone()
1557 view_id = view_ref_res[0]
1560 query = "SELECT arch,name,field_parent,id,type,inherit_id,model FROM ir_ui_view WHERE id=%s"
1563 query += " AND model=%s"
1564 params += (self._name,)
1565 cr.execute(query, params)
1567 cr.execute('''SELECT
1568 arch,name,field_parent,id,type,inherit_id,model
1575 ORDER BY priority''', (self._name, view_type))
1576 sql_res = cr.fetchone()
1582 view_id = ok or sql_res[3]
1584 parent_view_model = sql_res[6]
1586 # if a view was found
1588 result['type'] = sql_res[4]
1589 result['view_id'] = sql_res[3]
1590 result['arch'] = sql_res[0]
1592 def _inherit_apply_rec(result, inherit_id):
1593 # get all views which inherit from (ie modify) this view
1594 cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
1595 sql_inherit = cr.fetchall()
1596 for (inherit, id) in sql_inherit:
1597 result = _inherit_apply(result, inherit, id)
1598 result = _inherit_apply_rec(result, id)
1601 inherit_result = etree.fromstring(encode(result['arch']))
1602 result['arch'] = _inherit_apply_rec(inherit_result, sql_res[3])
1604 result['name'] = sql_res[1]
1605 result['field_parent'] = sql_res[2] or False
1608 # otherwise, build some kind of default view
1609 if view_type == 'form':
1610 res = self.fields_get(cr, user, context=context)
1611 xml = '<?xml version="1.0" encoding="utf-8"?> ' \
1612 '<form string="%s">' % (self._description,)
1614 if res[x]['type'] not in ('one2many', 'many2many'):
1615 xml += '<field name="%s"/>' % (x,)
1616 if res[x]['type'] == 'text':
1620 elif view_type == 'tree':
1621 _rec_name = self._rec_name
1622 if _rec_name not in self._columns:
1623 _rec_name = self._columns.keys()[0]
1624 xml = '<?xml version="1.0" encoding="utf-8"?>' \
1625 '<tree string="%s"><field name="%s"/></tree>' \
1626 % (self._description, self._rec_name)
1628 elif view_type == 'calendar':
1629 xml = self.__get_default_calendar_view()
1631 elif view_type == 'search':
1632 xml = self.__get_default_search_view(cr, user, context)
1635 xml = '<?xml version="1.0"?>' # what happens here, graph case?
1636 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
1637 result['arch'] = etree.fromstring(encode(xml))
1638 result['name'] = 'default'
1639 result['field_parent'] = False
1640 result['view_id'] = 0
1642 if parent_view_model != self._name:
1643 ctx = context.copy()
1644 ctx['base_model_name'] = parent_view_model
1647 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
1648 result['arch'] = xarch
1649 result['fields'] = xfields
1652 if context and context.get('active_id', False):
1653 data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
1655 act_id = data_menu.id
1657 data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
1658 result['submenu'] = getattr(data_action, 'menus', False)
1662 for key in ('report_sxw_content', 'report_rml_content',
1663 'report_sxw', 'report_rml',
1664 'report_sxw_content_data', 'report_rml_content_data'):
1668 ir_values_obj = self.pool.get('ir.values')
1669 resprint = ir_values_obj.get(cr, user, 'action',
1670 'client_print_multi', [(self._name, False)], False,
1672 resaction = ir_values_obj.get(cr, user, 'action',
1673 'client_action_multi', [(self._name, False)], False,
1676 resrelate = ir_values_obj.get(cr, user, 'action',
1677 'client_action_relate', [(self._name, False)], False,
1679 resprint = map(clean, resprint)
1680 resaction = map(clean, resaction)
1681 resaction = filter(lambda x: not x.get('multi', False), resaction)
1682 resprint = filter(lambda x: not x.get('multi', False), resprint)
1683 resrelate = map(lambda x: x[2], resrelate)
1685 for x in resprint + resaction + resrelate:
1686 x['string'] = x['name']
1688 result['toolbar'] = {
1690 'action': resaction,
1695 _view_look_dom_arch = __view_look_dom_arch
1697 def search_count(self, cr, user, args, context=None):
1700 res = self.search(cr, user, args, context=context, count=True)
1701 if isinstance(res, list):
1705 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
1707 Search for records based on a search domain.
1709 :param cr: database cursor
1710 :param user: current user id
1711 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
1712 :param offset: optional number of results to skip in the returned values (default: 0)
1713 :param limit: optional max number of records to return (default: **None**)
1714 :param order: optional columns to sort by (default: self._order=id )
1715 :param context: optional context arguments, like lang, time zone
1716 :type context: dictionary
1717 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
1718 :return: id or list of ids of records matching the criteria
1719 :rtype: integer or list of integers
1720 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
1722 **Expressing a search domain (args)**
1724 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
1726 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
1727 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
1728 The semantics of most of these operators are obvious.
1729 The ``child_of`` operator will look for records who are children or grand-children of a given record,
1730 according to the semantics of this model (i.e following the relationship field named by
1731 ``self._parent_name``, by default ``parent_id``.
1732 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
1734 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
1735 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
1736 Be very careful about this when you combine them the first time.
1738 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
1740 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
1742 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
1744 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
1747 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
1749 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
1751 Private implementation of search() method, allowing specifying the uid to use for the access right check.
1752 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
1753 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
1755 :param access_rights_uid: optional user ID to use when checking access rights
1756 (not for ir.rules, this is only for ir.model.access)
1758 raise NotImplementedError(_('The search method is not implemented on this object !'))
1760 def name_get(self, cr, user, ids, context=None):
1763 :param cr: database cursor
1764 :param user: current user id
1766 :param ids: list of ids
1767 :param context: context arguments, like lang, time zone
1768 :type context: dictionary
1769 :return: tuples with the text representation of requested objects for to-many relationships
1776 if isinstance(ids, (int, long)):
1778 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
1779 [self._rec_name], context, load='_classic_write')]
1781 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
1783 Search for records and their display names according to a search domain.
1785 :param cr: database cursor
1786 :param user: current user id
1787 :param name: object name to search
1788 :param args: list of tuples specifying search criteria [('field_name', 'operator', 'value'), ...]
1789 :param operator: operator for search criterion
1790 :param context: context arguments, like lang, time zone
1791 :type context: dictionary
1792 :param limit: optional max number of records to return
1793 :return: list of object names matching the search criteria, used to provide completion for to-many relationships
1795 This method is equivalent of :py:meth:`~osv.osv.osv.search` on **name** + :py:meth:`~osv.osv.osv.name_get` on the result.
1796 See :py:meth:`~osv.osv.osv.search` for an explanation of the possible values for the search domain specified in **args**.
1799 return self._name_search(cr, user, name, args, operator, context, limit)
1801 # private implementation of name_search, allows passing a dedicated user for the name_get part to
1802 # solve some access rights issues
1803 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
1810 args += [(self._rec_name, operator, name)]
1811 access_rights_uid = name_get_uid or user
1812 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
1813 res = self.name_get(cr, access_rights_uid, ids, context)
1816 def copy(self, cr, uid, id, default=None, context=None):
1817 raise NotImplementedError(_('The copy method is not implemented on this object !'))
1819 def exists(self, cr, uid, ids, context=None):
1820 raise NotImplementedError(_('The exists method is not implemented on this object !'))
1822 def read_string(self, cr, uid, id, langs, fields=None, context=None):
1825 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
1827 fields = self._columns.keys() + self._inherit_fields.keys()
1828 #FIXME: collect all calls to _get_source into one SQL call.
1830 res[lang] = {'code': lang}
1832 if f in self._columns:
1833 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
1835 res[lang][f] = res_trans
1837 res[lang][f] = self._columns[f].string
1838 for table in self._inherits:
1839 cols = intersect(self._inherit_fields.keys(), fields)
1840 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
1843 res[lang]['code'] = lang
1844 for f in res2[lang]:
1845 res[lang][f] = res2[lang][f]
1848 def write_string(self, cr, uid, id, langs, vals, context=None):
1849 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
1850 #FIXME: try to only call the translation in one SQL
1853 if field in self._columns:
1854 src = self._columns[field].string
1855 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
1856 for table in self._inherits:
1857 cols = intersect(self._inherit_fields.keys(), vals)
1859 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
1862 def _check_removed_columns(self, cr, log=False):
1863 raise NotImplementedError()
1865 def _add_missing_default_values(self, cr, uid, values, context=None):
1866 missing_defaults = []
1867 avoid_tables = [] # avoid overriding inherited values when parent is set
1868 for tables, parent_field in self._inherits.items():
1869 if parent_field in values:
1870 avoid_tables.append(tables)
1871 for field in self._columns.keys():
1872 if not field in values:
1873 missing_defaults.append(field)
1874 for field in self._inherit_fields.keys():
1875 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
1876 missing_defaults.append(field)
1878 if len(missing_defaults):
1879 # override defaults with the provided values, never allow the other way around
1880 defaults = self.default_get(cr, uid, missing_defaults, context)
1882 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
1883 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
1884 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
1885 defaults[dv] = [(6, 0, defaults[dv])]
1886 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
1887 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
1888 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
1889 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
1890 defaults.update(values)
1894 class orm_memory(orm_template):
1896 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
1897 _inherit_fields = {}
1898 _max_count = config.get('osv_memory_count_limit')
1899 _max_hours = config.get('osv_memory_age_limit')
1902 def __init__(self, cr):
1903 super(orm_memory, self).__init__(cr)
1907 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
1909 def _check_access(self, uid, object_id, mode):
1910 if uid != 1 and self.datas[object_id]['internal.create_uid'] != uid:
1911 raise except_orm(_('AccessError'), '%s access is only allowed on your own records for osv_memory objects except for the super-user' % mode.capitalize())
1913 def vaccum(self, cr, uid, force=False):
1914 """Run the vaccuum cleaning system, expiring and removing old records from the
1915 virtual osv_memory tables if the "max count" or "max age" conditions are enabled
1916 and have been reached. This method can be called very often (e.g. everytime a record
1917 is created), but will only actually trigger the cleanup process once out of
1918 "_check_time" times (by default once out of 20 calls)."""
1920 if (not force) and (self.check_id % self._check_time):
1924 # Age-based expiration
1926 max = time.time() - self._max_hours * 60 * 60
1927 for k,v in self.datas.iteritems():
1928 if v['internal.date_access'] < max:
1930 self.unlink(cr, 1, tounlink)
1932 # Count-based expiration
1933 if self._max_count and len(self.datas) > self._max_count:
1934 # sort by access time to remove only the first/oldest ones in LRU fashion
1935 records = self.datas.items()
1936 records.sort(key=lambda x:x[1]['internal.date_access'])
1937 self.unlink(cr, 1, [x[0] for x in records[:len(self.datas)-self._max_count]])
1941 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
1944 if not fields_to_read:
1945 fields_to_read = self._columns.keys()
1949 if isinstance(ids, (int, long)):
1953 for f in fields_to_read:
1954 record = self.datas.get(id)
1956 self._check_access(user, id, 'read')
1957 r[f] = record.get(f, False)
1958 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
1961 if id in self.datas:
1962 self.datas[id]['internal.date_access'] = time.time()
1963 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
1964 for f in fields_post:
1965 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
1966 for record in result:
1967 record[f] = res2[record['id']]
1968 if isinstance(ids_orig, (int, long)):
1972 def write(self, cr, user, ids, vals, context=None):
1978 if self._columns[field]._classic_write:
1979 vals2[field] = vals[field]
1981 upd_todo.append(field)
1982 for object_id in ids:
1983 self._check_access(user, object_id, mode='write')
1984 self.datas[object_id].update(vals2)
1985 self.datas[object_id]['internal.date_access'] = time.time()
1986 for field in upd_todo:
1987 self._columns[field].set_memory(cr, self, object_id, field, vals[field], user, context)
1988 self._validate(cr, user, [object_id], context)
1989 wf_service = netsvc.LocalService("workflow")
1990 wf_service.trg_write(user, self._name, object_id, cr)
1993 def create(self, cr, user, vals, context=None):
1994 self.vaccum(cr, user)
1996 id_new = self.next_id
1998 vals = self._add_missing_default_values(cr, user, vals, context)
2003 if self._columns[field]._classic_write:
2004 vals2[field] = vals[field]
2006 upd_todo.append(field)
2007 self.datas[id_new] = vals2
2008 self.datas[id_new]['internal.date_access'] = time.time()
2009 self.datas[id_new]['internal.create_uid'] = user
2011 for field in upd_todo:
2012 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
2013 self._validate(cr, user, [id_new], context)
2014 if self._log_create and not (context and context.get('no_store_function', False)):
2015 message = self._description + \
2017 self.name_get(cr, user, [id_new], context=context)[0][1] + \
2019 self.log(cr, user, id_new, message, True, context=context)
2020 wf_service = netsvc.LocalService("workflow")
2021 wf_service.trg_create(user, self._name, id_new, cr)
2024 def _where_calc(self, cr, user, args, active_test=True, context=None):
2029 # if the object has a field named 'active', filter out all inactive
2030 # records unless they were explicitely asked for
2031 if 'active' in self._columns and (active_test and context.get('active_test', True)):
2033 active_in_args = False
2035 if a[0] == 'active':
2036 active_in_args = True
2037 if not active_in_args:
2038 args.insert(0, ('active', '=', 1))
2040 args = [('active', '=', 1)]
2043 e = expression.expression(args)
2044 e.parse(cr, user, self, context)
2048 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
2052 # implicit filter on current user except for superuser
2056 args.insert(0, ('internal.create_uid', '=', user))
2058 result = self._where_calc(cr, user, args, context=context)
2060 return self.datas.keys()
2064 #Find the value of dict
2067 for id, data in self.datas.items():
2068 counter = counter + 1
2070 if limit and (counter > int(limit)):
2075 val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
2076 elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
2077 val = eval('data[arg[0]]'+arg[1] +' arg[2]', locals())
2078 elif arg[1] in ['ilike']:
2079 val = (str(data[arg[0]]).find(str(arg[2]))!=-1)
2089 def unlink(self, cr, uid, ids, context=None):
2091 self._check_access(uid, id, 'unlink')
2092 self.datas.pop(id, None)
2094 cr.execute('delete from wkf_instance where res_type=%s and res_id IN %s', (self._name, tuple(ids)))
2097 def perm_read(self, cr, user, ids, context=None, details=True):
2099 credentials = self.pool.get('res.users').name_get(cr, user, [user])[0]
2100 create_date = time.strftime('%Y-%m-%d %H:%M:%S')
2102 self._check_access(user, id, 'read')
2104 'create_uid': credentials,
2105 'create_date': create_date,
2107 'write_date': False,
2113 def _check_removed_columns(self, cr, log=False):
2114 # nothing to check in memory...
2117 def exists(self, cr, uid, ids, context=None):
2118 if isinstance(ids, (int,long)):
2120 return all(( id in self.datas for id in ids ))
2122 class orm(orm_template):
2123 _sql_constraints = []
2125 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
2126 __logger = logging.getLogger('orm')
2127 __schema = logging.getLogger('orm.schema')
2128 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2130 Get the list of records in list view grouped by the given ``groupby`` fields
2132 :param cr: database cursor
2133 :param uid: current user id
2134 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2135 :param fields: list of fields present in the list view specified on the object
2136 :param groupby: list of fields on which to groupby the records
2137 :type fields_list: list (example ['field_name_1', ...])
2138 :param offset: optional number of records to skip
2139 :param limit: optional max number of records to return
2140 :param context: context arguments, like lang, time zone
2141 :param order: optional ``order by`` specification, for overriding the natural
2142 sort ordering of the groups, see also :py:meth:`~osv.osv.osv.search`
2143 (supported only for many2one fields currently)
2144 :return: list of dictionaries(one dictionary for each record) containing:
2146 * the values of fields grouped by the fields in ``groupby`` argument
2147 * __domain: list of tuples specifying the search criteria
2148 * __context: dictionary with argument like ``groupby``
2149 :rtype: [{'field_name_1': value, ...]
2150 :raise AccessError: * if user has no read rights on the requested object
2151 * if user tries to bypass access rules for read on the requested object
2154 context = context or {}
2155 self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
2157 fields = self._columns.keys()
2159 query = self._where_calc(cr, uid, domain, context=context)
2160 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2162 # Take care of adding join(s) if groupby is an '_inherits'ed field
2163 groupby_list = groupby
2164 qualified_groupby_field = groupby
2166 if isinstance(groupby, list):
2167 groupby = groupby[0]
2168 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2171 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2172 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2173 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2175 fget = self.fields_get(cr, uid, fields, context=context)
2176 float_int_fields = filter(lambda x: fget[x]['type'] in ('float', 'integer'), fields)
2178 group_count = group_by = groupby
2180 if fget.get(groupby):
2181 groupby_type = fget[groupby]['type']
2182 if groupby_type in ('date', 'datetime'):
2183 qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
2184 flist = "%s as %s " % (qualified_groupby_field, groupby)
2185 elif groupby_type == 'boolean':
2186 qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
2187 flist = "%s as %s " % (qualified_groupby_field, groupby)
2189 flist = qualified_groupby_field
2191 # Don't allow arbitrary values, as this would be a SQL injection vector!
2192 raise except_orm(_('Invalid group_by'),
2193 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2196 fields_pre = [f for f in float_int_fields if
2197 f == self.CONCURRENCY_CHECK_FIELD
2198 or (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2199 for f in fields_pre:
2200 if f not in ['id', 'sequence']:
2201 group_operator = fget[f].get('group_operator', 'sum')
2204 qualified_field = '"%s"."%s"' % (self._table, f)
2205 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2207 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2209 from_clause, where_clause, where_clause_params = query.get_sql()
2210 where_clause = where_clause and ' WHERE ' + where_clause
2211 limit_str = limit and ' limit %d' % limit or ''
2212 offset_str = offset and ' offset %d' % offset or ''
2213 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2215 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2218 for r in cr.dictfetchall():
2219 for fld, val in r.items():
2220 if val == None: r[fld] = False
2221 alldata[r['id']] = r
2224 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=orderby or groupby, context=context)
2225 # the IDS of records that have groupby field value = False or '' should be sorted too
2226 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2227 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2228 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2229 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2233 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2234 if not isinstance(groupby_list, (str, unicode)):
2235 if groupby or not context.get('group_by_no_leaf', False):
2236 d['__context'] = {'group_by': groupby_list[1:]}
2237 if groupby and groupby in fget:
2238 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2239 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2240 days = calendar.monthrange(dt.year, dt.month)[1]
2242 d[groupby] = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d').strftime('%B %Y')
2243 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2244 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2245 del alldata[d['id']][groupby]
2246 d.update(alldata[d['id']])
2250 def _inherits_join_add(self, parent_model_name, query):
2252 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2254 :param parent_model_name: name of the parent model for which the clauses should be added
2255 :param query: query object on which the JOIN should be added
2257 inherits_field = self._inherits[parent_model_name]
2258 parent_model = self.pool.get(parent_model_name)
2259 parent_table_name = parent_model._table
2260 quoted_parent_table_name = '"%s"' % parent_table_name
2261 if quoted_parent_table_name not in query.tables:
2262 query.tables.append(quoted_parent_table_name)
2263 query.where_clause.append('("%s".%s = %s.id)' % (self._table, inherits_field, parent_table_name))
2265 def _inherits_join_calc(self, field, query):
2267 Adds missing table select and join clause(s) to ``query`` for reaching
2268 the field coming from an '_inherits' parent table (no duplicates).
2270 :param field: name of inherited field to reach
2271 :param query: query object on which the JOIN should be added
2272 :return: qualified name of field, to be used in SELECT clause
2274 current_table = self
2275 while field in current_table._inherit_fields and not field in current_table._columns:
2276 parent_model_name = current_table._inherit_fields[field][0]
2277 parent_table = self.pool.get(parent_model_name)
2278 self._inherits_join_add(parent_model_name, query)
2279 current_table = parent_table
2280 return '"%s".%s' % (current_table._table, field)
2282 def _parent_store_compute(self, cr):
2283 if not self._parent_store:
2285 logger = netsvc.Logger()
2286 logger.notifyChannel('data', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
2287 def browse_rec(root, pos=0):
2289 where = self._parent_name+'='+str(root)
2291 where = self._parent_name+' IS NULL'
2292 if self._parent_order:
2293 where += ' order by '+self._parent_order
2294 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2296 for id in cr.fetchall():
2297 pos2 = browse_rec(id[0], pos2)
2298 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2300 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2301 if self._parent_order:
2302 query += ' order by ' + self._parent_order
2305 for (root,) in cr.fetchall():
2306 pos = browse_rec(root, pos)
2309 def _update_store(self, cr, f, k):
2310 logger = netsvc.Logger()
2311 logger.notifyChannel('data', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
2312 ss = self._columns[k]._symbol_set
2313 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2314 cr.execute('select id from '+self._table)
2315 ids_lst = map(lambda x: x[0], cr.fetchall())
2318 ids_lst = ids_lst[40:]
2319 res = f.get(cr, self, iids, k, 1, {})
2320 for key, val in res.items():
2323 # if val is a many2one, just write the ID
2324 if type(val) == tuple:
2326 if (val<>False) or (type(val)<>bool):
2327 cr.execute(update_query, (ss[1](val), key))
2329 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2330 """Raise except_orm if value is not among the valid values for the selection field"""
2331 if self._columns[field]._type == 'reference':
2332 val_model, val_id_str = value.split(',', 1)
2335 val_id = long(val_id_str)
2339 raise except_orm(_('ValidateError'),
2340 _('Invalid value for reference field "%s" (last part must be a non-zero integer): "%s"') % (field, value))
2344 if isinstance(self._columns[field].selection, (tuple, list)):
2345 if val in dict(self._columns[field].selection):
2347 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2349 raise except_orm(_('ValidateError'),
2350 _('The value "%s" for the field "%s" is not in the selection') % (value, field))
2352 def _check_removed_columns(self, cr, log=False):
2353 # iterate on the database columns to drop the NOT NULL constraints
2354 # of fields which were required but have been removed (or will be added by another module)
2355 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2356 columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
2357 cr.execute("SELECT a.attname, a.attnotnull"
2358 " FROM pg_class c, pg_attribute a"
2359 " WHERE c.relname=%s"
2360 " AND c.oid=a.attrelid"
2361 " AND a.attisdropped=%s"
2362 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2363 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2365 for column in cr.dictfetchall():
2367 self.__logger.debug("column %s is in the table %s but not in the corresponding object %s",
2368 column['attname'], self._table, self._name)
2369 if column['attnotnull']:
2370 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2371 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2372 self._table, column['attname'])
2374 def _auto_init(self, cr, context=None):
2377 store_compute = False
2380 self._field_create(cr, context=context)
2381 if getattr(self, '_auto', True):
2382 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2384 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
2385 cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''")))
2387 self.__schema.debug("Table '%s': created", self._table)
2390 if self._parent_store:
2391 cr.execute("""SELECT c.relname
2392 FROM pg_class c, pg_attribute a
2393 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2394 """, (self._table, 'parent_left'))
2396 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
2397 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
2398 if 'parent_left' not in self._columns:
2399 self.__logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
2401 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2402 self._table, 'parent_left', 'INTEGER')
2403 elif not self._columns['parent_left'].select:
2404 self.__logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
2406 if 'parent_right' not in self._columns:
2407 self.__logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
2409 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2410 self._table, 'parent_right', 'INTEGER')
2411 elif not self._columns['parent_right'].select:
2412 self.__logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
2414 if self._columns[self._parent_name].ondelete != 'cascade':
2415 self.__logger.error("The column %s on object %s must be set as ondelete='cascade'",
2416 self._parent_name, self._name)
2419 store_compute = True
2421 if self._log_access:
2423 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2424 'create_date': 'TIMESTAMP',
2425 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2426 'write_date': 'TIMESTAMP'
2431 FROM pg_class c, pg_attribute a
2432 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2433 """, (self._table, k))
2435 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
2437 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2438 self._table, k, logs[k])
2440 self._check_removed_columns(cr, log=False)
2442 # iterate on the "object columns"
2443 todo_update_store = []
2444 update_custom_fields = context.get('update_custom_fields', False)
2446 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2447 "FROM pg_class c,pg_attribute a,pg_type t " \
2448 "WHERE c.relname=%s " \
2449 "AND c.oid=a.attrelid " \
2450 "AND a.atttypid=t.oid", (self._table,))
2451 col_data = dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
2454 for k in self._columns:
2455 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
2457 #Not Updating Custom fields
2458 if k.startswith('x_') and not update_custom_fields:
2461 f = self._columns[k]
2463 if isinstance(f, fields.one2many):
2464 cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,))
2466 if self.pool.get(f._obj):
2467 if f._fields_id not in self.pool.get(f._obj)._columns.keys():
2468 if not self.pool.get(f._obj)._inherits or (f._fields_id not in self.pool.get(f._obj)._inherit_fields.keys()):
2469 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
2472 cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id))
2473 res = cr.fetchone()[0]
2475 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY (%s) REFERENCES "%s" ON DELETE SET NULL' % (self._obj, f._fields_id, f._table))
2476 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE SET NULL",
2477 self._obj, f._fields_id, f._table)
2478 elif isinstance(f, fields.many2many):
2479 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (f._rel,))
2480 if not cr.dictfetchall():
2481 if not self.pool.get(f._obj):
2482 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2483 ref = self.pool.get(f._obj)._table
2484 # ref = f._obj.replace('.', '_')
2485 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, UNIQUE("%s","%s")) WITH OIDS' % (f._rel, f._id1, self._table, f._id2, ref, f._id1, f._id2))
2486 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
2487 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
2488 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref))
2490 self.__schema.debug("Create table '%s': relation between '%s' and '%s'",
2491 f._rel, self._table, ref)
2493 res = col_data.get(k, [])
2494 res = res and [res] or []
2495 if not res and hasattr(f, 'oldname'):
2496 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2497 "FROM pg_class c,pg_attribute a,pg_type t " \
2498 "WHERE c.relname=%s " \
2499 "AND a.attname=%s " \
2500 "AND c.oid=a.attrelid " \
2501 "AND a.atttypid=t.oid", (self._table, f.oldname))
2502 res_old = cr.dictfetchall()
2503 if res_old and len(res_old) == 1:
2504 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
2506 res[0]['attname'] = k
2507 self.__schema.debug("Table '%s': renamed column '%s' to '%s'",
2508 self._table, f.oldname, k)
2512 f_pg_type = f_pg_def['typname']
2513 f_pg_size = f_pg_def['size']
2514 f_pg_notnull = f_pg_def['attnotnull']
2515 if isinstance(f, fields.function) and not f.store and\
2516 not getattr(f, 'nodrop', False):
2517 self.__logger.info('column %s (%s) in table %s removed: converted to a function !\n',
2518 k, f.string, self._table)
2519 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
2521 self.__schema.debug("Table '%s': dropped column '%s' with cascade",
2525 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
2530 ('text', 'char', 'VARCHAR(%d)' % (f.size or 0,), '::VARCHAR(%d)'%(f.size or 0,)),
2531 ('varchar', 'text', 'TEXT', ''),
2532 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2533 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
2534 ('timestamp', 'date', 'date', '::date'),
2535 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2536 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2538 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
2539 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2540 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
2541 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size))
2542 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2544 self.__schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
2545 self._table, k, f_pg_size, f.size)
2547 if (f_pg_type==c[0]) and (f._type==c[1]):
2548 if f_pg_type != f_obj_type:
2550 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2551 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
2552 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
2553 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2555 self.__schema.debug("Table '%s': column '%s' changed type from %s to %s",
2556 self._table, k, c[0], c[1])
2559 if f_pg_type != f_obj_type:
2563 newname = k + '_moved' + str(i)
2564 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
2565 "WHERE c.relname=%s " \
2566 "AND a.attname=%s " \
2567 "AND c.oid=a.attrelid ", (self._table, newname))
2568 if not cr.fetchone()[0]:
2572 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2573 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
2574 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2575 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2576 self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
2577 self._table, k, f_pg_type, f._type, newname)
2579 # if the field is required and hasn't got a NOT NULL constraint
2580 if f.required and f_pg_notnull == 0:
2581 # set the field to the default value if any
2582 if k in self._defaults:
2583 if callable(self._defaults[k]):
2584 default = self._defaults[k](self, cr, 1, context)
2586 default = self._defaults[k]
2588 if (default is not None):
2589 ss = self._columns[k]._symbol_set
2590 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
2591 cr.execute(query, (ss[1](default),))
2592 # add the NOT NULL constraint
2595 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2597 self.__schema.debug("Table '%s': column '%s': added NOT NULL constraint",
2600 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
2601 "If you want to have it, you should update the records and execute manually:\n"\
2602 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2603 self.__schema.warn(msg, self._table, k, self._table, k)
2605 elif not f.required and f_pg_notnull == 1:
2606 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2608 self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2611 indexname = '%s_%s_index' % (self._table, k)
2612 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
2613 res2 = cr.dictfetchall()
2614 if not res2 and f.select:
2615 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2617 if f._type == 'text':
2618 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
2619 msg = "Table '%s': Adding (b-tree) index for text column '%s'."\
2620 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
2621 " because there is a length limit for indexable btree values!\n"\
2622 "Use a search view instead if you simply want to make the field searchable."
2623 self.__schema.warn(msg, self._table, k, f._type)
2624 if res2 and not f.select:
2625 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
2627 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
2628 self.__schema.debug(msg, self._table, k, f._type)
2630 if isinstance(f, fields.many2one):
2631 ref = self.pool.get(f._obj)._table
2632 if ref != 'ir_actions':
2633 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
2634 'pg_attribute as att1, pg_attribute as att2 '
2635 'WHERE con.conrelid = cl1.oid '
2636 'AND cl1.relname = %s '
2637 'AND con.confrelid = cl2.oid '
2638 'AND cl2.relname = %s '
2639 'AND array_lower(con.conkey, 1) = 1 '
2640 'AND con.conkey[1] = att1.attnum '
2641 'AND att1.attrelid = cl1.oid '
2642 'AND att1.attname = %s '
2643 'AND array_lower(con.confkey, 1) = 1 '
2644 'AND con.confkey[1] = att2.attnum '
2645 'AND att2.attrelid = cl2.oid '
2646 'AND att2.attname = %s '
2647 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
2648 res2 = cr.dictfetchall()
2650 if res2[0]['confdeltype'] != POSTGRES_CONFDELTYPES.get(f.ondelete.upper(), 'a'):
2651 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"')
2652 cr.execute('ALTER TABLE "' + self._table + '" ADD FOREIGN KEY ("' + k + '") REFERENCES "' + ref + '" ON DELETE ' + f.ondelete)
2654 self.__schema.debug("Table '%s': column '%s': XXX",
2657 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, "Programming error, column %s->%s has multiple instances !" % (self._table, k))
2659 if not isinstance(f, fields.function) or f.store:
2660 # add the missing field
2661 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2662 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
2663 self.__schema.debug("Table '%s': added column '%s' with definition=%s",
2664 self._table, k, get_pg_type(f)[1])
2667 if not create and k in self._defaults:
2668 if callable(self._defaults[k]):
2669 default = self._defaults[k](self, cr, 1, context)
2671 default = self._defaults[k]
2673 ss = self._columns[k]._symbol_set
2674 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
2675 cr.execute(query, (ss[1](default),))
2677 netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
2679 if isinstance(f, fields.function):
2681 if f.store is not True:
2682 order = f.store[f.store.keys()[0]][2]
2683 todo_update_store.append((order, f, k))
2685 # and add constraints if needed
2686 if isinstance(f, fields.many2one):
2687 if not self.pool.get(f._obj):
2688 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2689 ref = self.pool.get(f._obj)._table
2690 # ref = f._obj.replace('.', '_')
2691 # ir_actions is inherited so foreign key doesn't work on it
2692 if ref != 'ir_actions':
2693 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete))
2694 self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2695 self._table, k, ref, f.ondelete)
2697 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2701 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
2702 self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
2705 msg = "WARNING: unable to set column %s of table %s not null !\n"\
2706 "Try to re-run: openerp-server.py --update=module\n"\
2707 "If it doesn't work, update records and execute manually:\n"\
2708 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
2709 self.__logger.warn(msg, k, self._table, self._table, k)
2711 for order, f, k in todo_update_store:
2712 todo_end.append((order, self._update_store, (f, k)))
2715 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
2716 create = not bool(cr.fetchone())
2718 cr.commit() # start a new transaction
2720 for (key, con, _) in self._sql_constraints:
2721 conname = '%s_%s' % (self._table, key)
2723 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
2724 existing_constraints = cr.dictfetchall()
2729 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
2730 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
2731 self._table, conname, con),
2732 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
2737 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
2738 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
2739 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
2745 if not existing_constraints:
2746 # constraint does not exists:
2747 sql_actions['add']['execute'] = True
2748 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
2749 elif con.lower() not in [item['condef'].lower() for item in existing_constraints]:
2750 # constraint exists but its definition has changed:
2751 sql_actions['drop']['execute'] = True
2752 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
2753 sql_actions['add']['execute'] = True
2754 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
2756 # we need to add the constraint:
2757 sql_actions = [item for item in sql_actions.values()]
2758 sql_actions.sort(key=lambda x: x['order'])
2759 for sql_action in [action for action in sql_actions if action['execute']]:
2761 cr.execute(sql_action['query'])
2763 self.__schema.debug(sql_action['msg_ok'])
2765 self.__schema.warn(sql_action['msg_err'])
2769 if hasattr(self, "_sql"):
2770 for line in self._sql.split(';'):
2771 line2 = line.replace('\n', '').strip()
2776 self._parent_store_compute(cr)
2780 def __init__(self, cr):
2781 super(orm, self).__init__(cr)
2783 if not hasattr(self, '_log_access'):
2784 # if not access is not specify, it is the same value as _auto
2785 self._log_access = getattr(self, "_auto", True)
2787 self._columns = self._columns.copy()
2788 for store_field in self._columns:
2789 f = self._columns[store_field]
2790 if hasattr(f, 'digits_change'):
2792 def not_this_field(stored_func):
2793 x, y, z, e, f, l = stored_func
2794 return x != self._name or y != store_field
2795 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
2796 if not isinstance(f, fields.function):
2802 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
2803 for object, aa in sm.items():
2805 (fnct, fields2, order, length) = aa
2807 (fnct, fields2, order) = aa
2810 raise except_orm('Error',
2811 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
2812 self.pool._store_function.setdefault(object, [])
2813 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
2814 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
2816 for (key, _, msg) in self._sql_constraints:
2817 self.pool._sql_error[self._table+'_'+key] = msg
2819 # Load manual fields
2821 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
2823 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
2824 for field in cr.dictfetchall():
2825 if field['name'] in self._columns:
2828 'string': field['field_description'],
2829 'required': bool(field['required']),
2830 'readonly': bool(field['readonly']),
2831 'domain': eval(field['domain']) if field['domain'] else None,
2832 'size': field['size'],
2833 'ondelete': field['on_delete'],
2834 'translate': (field['translate']),
2836 #'select': int(field['select_level'])
2839 if field['ttype'] == 'selection':
2840 self._columns[field['name']] = getattr(fields, field['ttype'])(eval(field['selection']), **attrs)
2841 elif field['ttype'] == 'reference':
2842 self._columns[field['name']] = getattr(fields, field['ttype'])(selection=eval(field['selection']), **attrs)
2843 elif field['ttype'] == 'many2one':
2844 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], **attrs)
2845 elif field['ttype'] == 'one2many':
2846 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], field['relation_field'], **attrs)
2847 elif field['ttype'] == 'many2many':
2848 _rel1 = field['relation'].replace('.', '_')
2849 _rel2 = field['model'].replace('.', '_')
2850 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
2851 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], _rel_name, 'id1', 'id2', **attrs)
2853 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
2854 self._inherits_check()
2855 self._inherits_reload()
2856 if not self._sequence:
2857 self._sequence = self._table + '_id_seq'
2858 for k in self._defaults:
2859 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
2860 for f in self._columns:
2861 self._columns[f].restart()
2864 # Update objects that uses this one to update their _inherits fields
2867 def _inherits_reload_src(self):
2868 for obj in self.pool.obj_pool.values():
2869 if self._name in obj._inherits:
2870 obj._inherits_reload()
2872 def _inherits_reload(self):
2874 for table in self._inherits:
2875 res.update(self.pool.get(table)._inherit_fields)
2876 for col in self.pool.get(table)._columns.keys():
2877 res[col] = (table, self._inherits[table], self.pool.get(table)._columns[col])
2878 for col in self.pool.get(table)._inherit_fields.keys():
2879 res[col] = (table, self._inherits[table], self.pool.get(table)._inherit_fields[col][2])
2880 self._inherit_fields = res
2881 self._inherits_reload_src()
2883 def _inherits_check(self):
2884 for table, field_name in self._inherits.items():
2885 if field_name not in self._columns:
2886 logging.getLogger('init').info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.' % (field_name, self._name))
2887 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
2888 required=True, ondelete="cascade")
2889 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
2890 logging.getLogger('init').warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.' % (field_name, self._name))
2891 self._columns[field_name].required = True
2892 self._columns[field_name].ondelete = "cascade"
2894 #def __getattr__(self, name):
2896 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
2897 # (though inherits doesn't use Python inheritance).
2898 # Handles translating between local ids and remote ids.
2899 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
2900 # when you have inherits.
2902 # for model, field in self._inherits.iteritems():
2903 # proxy = self.pool.get(model)
2904 # if hasattr(proxy, name):
2905 # attribute = getattr(proxy, name)
2906 # if not hasattr(attribute, '__call__'):
2910 # return super(orm, self).__getattr__(name)
2912 # def _proxy(cr, uid, ids, *args, **kwargs):
2913 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
2914 # lst = [obj[field].id for obj in objects if obj[field]]
2915 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
2920 def fields_get(self, cr, user, fields=None, context=None):
2922 Get the description of list of fields
2924 :param cr: database cursor
2925 :param user: current user id
2926 :param fields: list of fields
2927 :param context: context arguments, like lang, time zone
2928 :return: dictionary of field dictionaries, each one describing a field of the business object
2929 :raise AccessError: * if user has no create/write rights on the requested object
2932 ira = self.pool.get('ir.model.access')
2933 write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
2934 ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
2935 return super(orm, self).fields_get(cr, user, fields, context, write_access)
2937 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
2940 self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
2942 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
2943 if isinstance(ids, (int, long)):
2947 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
2948 result = self._read_flat(cr, user, select, fields, context, load)
2951 for key, v in r.items():
2955 if isinstance(ids, (int, long, dict)):
2956 return result and result[0] or False
2959 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
2964 if fields_to_read == None:
2965 fields_to_read = self._columns.keys()
2967 # Construct a clause for the security rules.
2968 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
2969 # or will at least contain self._table.
2970 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
2972 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
2973 fields_pre = [f for f in fields_to_read if
2974 f == self.CONCURRENCY_CHECK_FIELD
2975 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
2976 ] + self._inherits.values()
2980 def convert_field(f):
2981 f_qual = "%s.%s" % (self._table, f) # need fully-qualified references in case len(tables) > 1
2982 if f in ('create_date', 'write_date'):
2983 return "date_trunc('second', %s) as %s" % (f_qual, f)
2984 if f == self.CONCURRENCY_CHECK_FIELD:
2985 if self._log_access:
2986 return "COALESCE(%s.write_date, %s.create_date, now())::timestamp AS %s" % (self._table, self._table, f,)
2987 return "now()::timestamp AS %s" % (f,)
2988 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
2989 return 'length(%s) as "%s"' % (f_qual, f)
2992 fields_pre2 = map(convert_field, fields_pre)
2993 order_by = self._parent_order or self._order
2994 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
2995 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
2997 query += " AND " + (' OR '.join(rule_clause))
2998 query += " ORDER BY " + order_by
2999 for sub_ids in cr.split_for_in_conditions(ids):
3001 cr.execute(query, [tuple(sub_ids)] + rule_params)
3002 if cr.rowcount != len(sub_ids):
3003 raise except_orm(_('AccessError'),
3004 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: read, Document type: %s).')
3005 % (self._description,))
3007 cr.execute(query, (tuple(sub_ids),))
3008 res.extend(cr.dictfetchall())
3010 res = map(lambda x: {'id': x}, ids)
3012 for f in fields_pre:
3013 if f == self.CONCURRENCY_CHECK_FIELD:
3015 if self._columns[f].translate:
3016 ids = [x['id'] for x in res]
3017 #TODO: optimize out of this loop
3018 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3020 r[f] = res_trans.get(r['id'], False) or r[f]
3022 for table in self._inherits:
3023 col = self._inherits[table]
3024 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3027 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3035 if not record[col]: # if the record is deleted from _inherits table?
3037 record.update(res3[record[col]])
3038 if col not in fields_to_read:
3041 # all fields which need to be post-processed by a simple function (symbol_get)
3042 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3045 for f in fields_post:
3046 r[f] = self._columns[f]._symbol_get(r[f])
3047 ids = [x['id'] for x in res]
3049 # all non inherited fields for which the attribute whose name is in load is False
3050 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3052 # Compute POST fields
3054 for f in fields_post:
3055 todo.setdefault(self._columns[f]._multi, [])
3056 todo[self._columns[f]._multi].append(f)
3057 for key, val in todo.items():
3059 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3062 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3063 multi_fields = res2.get(record['id'],{})
3065 record[pos] = multi_fields.get(pos,[])
3068 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3071 record[f] = res2[record['id']]
3076 for field in vals.copy():
3078 if field in self._columns:
3079 fobj = self._columns[field]
3086 for group in groups:
3087 module = group.split(".")[0]
3088 grp = group.split(".")[1]
3089 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3090 (grp, module, 'res.groups', user))
3091 readonly = cr.fetchall()
3092 if readonly[0][0] >= 1:
3095 elif readonly[0][0] == 0:
3101 if type(vals[field]) == type([]):
3103 elif type(vals[field]) == type(0.0):
3105 elif type(vals[field]) == type(''):
3106 vals[field] = '=No Permission='
3111 def perm_read(self, cr, user, ids, context=None, details=True):
3113 Returns some metadata about the given records.
3115 :param details: if True, \*_uid fields are replaced with the name of the user
3116 :return: list of ownership dictionaries for each requested record
3117 :rtype: list of dictionaries with the following keys:
3120 * create_uid: user who created the record
3121 * create_date: date when the record was created
3122 * write_uid: last user who changed the record
3123 * write_date: date of the last change to the record
3124 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3131 uniq = isinstance(ids, (int, long))
3135 if self._log_access:
3136 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3137 quoted_table = '"%s"' % self._table
3138 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3139 query = '''SELECT %s, __imd.module, __imd.name
3140 FROM %s LEFT JOIN ir_model_data __imd
3141 ON (__imd.model = %%s and __imd.res_id = %s.id)
3142 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3143 cr.execute(query, (self._name, tuple(ids)))
3144 res = cr.dictfetchall()
3147 r[key] = r[key] or False
3148 if details and key in ('write_uid', 'create_uid') and r[key]:
3150 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3152 pass # Leave the numeric uid there
3153 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3154 del r['name'], r['module']
3159 def _check_concurrency(self, cr, ids, context):
3162 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3164 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
3165 for sub_ids in cr.split_for_in_conditions(ids):
3168 id_ref = "%s,%s" % (self._name, id)
3169 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3171 ids_to_check.extend([id, update_date])
3172 if not ids_to_check:
3174 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3177 # mention the first one only to keep the error message readable
3178 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3180 def check_access_rule(self, cr, uid, ids, operation, context=None):
3181 """Verifies that the operation given by ``operation`` is allowed for the user
3182 according to ir.rules.
3184 :param operation: one of ``write``, ``unlink``
3185 :raise except_orm: * if current ir.rules do not permit this operation.
3186 :return: None if the operation is allowed
3188 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3190 where_clause = ' and ' + ' and '.join(where_clause)
3191 for sub_ids in cr.split_for_in_conditions(ids):
3192 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3193 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3194 [sub_ids] + where_params)
3195 if cr.rowcount != len(sub_ids):
3196 raise except_orm(_('AccessError'),
3197 _('Operation prohibited by access rules, or performed on an already deleted document (Operation: %s, Document type: %s).')
3198 % (operation, self._description))
3200 def unlink(self, cr, uid, ids, context=None):
3202 Delete records with given ids
3204 :param cr: database cursor
3205 :param uid: current user id
3206 :param ids: id or list of ids
3207 :param context: (optional) context arguments, like lang, time zone
3209 :raise AccessError: * if user has no unlink rights on the requested object
3210 * if user tries to bypass access rules for unlink on the requested object
3211 :raise UserError: if the record is default property for other records
3216 if isinstance(ids, (int, long)):
3219 result_store = self._store_get_values(cr, uid, ids, None, context)
3221 self._check_concurrency(cr, ids, context)
3223 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
3225 properties = self.pool.get('ir.property')
3226 domain = [('res_id', '=', False),
3227 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3229 if properties.search(cr, uid, domain, context=context):
3230 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3232 wf_service = netsvc.LocalService("workflow")
3234 wf_service.trg_delete(uid, self._name, oid, cr)
3237 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3238 pool_model_data = self.pool.get('ir.model.data')
3239 pool_ir_values = self.pool.get('ir.values')
3240 for sub_ids in cr.split_for_in_conditions(ids):
3241 cr.execute('delete from ' + self._table + ' ' \
3242 'where id IN %s', (sub_ids,))
3244 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3245 # as these are not connected with real database foreign keys, and would be dangling references.
3246 # Step 1. Calling unlink of ir_model_data only for the affected IDS.
3247 referenced_ids = pool_model_data.search(cr, uid, [('res_id','in',list(sub_ids)),('model','=',self._name)], context=context)
3248 # Step 2. Marching towards the real deletion of referenced records
3249 pool_model_data.unlink(cr, uid, referenced_ids, context=context)
3251 # For the same reason, removing the record relevant to ir_values
3252 ir_value_ids = pool_ir_values.search(cr, uid,
3253 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3256 pool_ir_values.unlink(cr, uid, ir_value_ids, context=context)
3258 for order, object, store_ids, fields in result_store:
3259 if object != self._name:
3260 obj = self.pool.get(object)
3261 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3262 rids = map(lambda x: x[0], cr.fetchall())
3264 obj._store_set_values(cr, uid, rids, fields, context)
3271 def write(self, cr, user, ids, vals, context=None):
3273 Update records with given ids with the given field values
3275 :param cr: database cursor
3276 :param user: current user id
3278 :param ids: object id or list of object ids to update according to **vals**
3279 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3280 :type vals: dictionary
3281 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3282 :type context: dictionary
3284 :raise AccessError: * if user has no write rights on the requested object
3285 * if user tries to bypass access rules for write on the requested object
3286 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3287 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3289 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3291 + For a many2many field, a list of tuples is expected.
3292 Here is the list of tuple that are accepted, with the corresponding semantics ::
3294 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3295 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3296 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3297 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3298 (4, ID) link to existing record with id = ID (adds a relationship)
3299 (5) unlink all (like using (3,ID) for all linked records)
3300 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3303 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3305 + For a one2many field, a lits of tuples is expected.
3306 Here is the list of tuple that are accepted, with the corresponding semantics ::
3308 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3309 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3310 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3313 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
3315 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
3316 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
3320 for field in vals.copy():
3322 if field in self._columns:
3323 fobj = self._columns[field]
3324 elif field in self._inherit_fields:
3325 fobj = self._inherit_fields[field][2]
3332 for group in groups:
3333 module = group.split(".")[0]
3334 grp = group.split(".")[1]
3335 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3336 (grp, module, 'res.groups', user))
3337 readonly = cr.fetchall()
3338 if readonly[0][0] >= 1:
3341 elif readonly[0][0] == 0:
3353 if isinstance(ids, (int, long)):
3356 self._check_concurrency(cr, ids, context)
3357 self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
3359 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
3361 # No direct update of parent_left/right
3362 vals.pop('parent_left', None)
3363 vals.pop('parent_right', None)
3365 parents_changed = []
3366 if self._parent_store and (self._parent_name in vals):
3367 # The parent_left/right computation may take up to
3368 # 5 seconds. No need to recompute the values if the
3369 # parent is the same. Get the current value of the parent
3370 parent_val = vals[self._parent_name]
3372 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL)" % \
3373 (self._table, self._parent_name, self._parent_name)
3374 cr.execute(query, (tuple(ids), parent_val))
3376 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL)" % \
3377 (self._table, self._parent_name)
3378 cr.execute(query, (tuple(ids),))
3379 parents_changed = map(operator.itemgetter(0), cr.fetchall())
3386 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
3388 if field in self._columns:
3389 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
3390 if (not totranslate) or not self._columns[field].translate:
3391 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
3392 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
3393 direct.append(field)
3395 upd_todo.append(field)
3397 updend.append(field)
3398 if field in self._columns \
3399 and hasattr(self._columns[field], 'selection') \
3401 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3403 if self._log_access:
3404 upd0.append('write_uid=%s')
3405 upd0.append('write_date=now()')
3409 self.check_access_rule(cr, user, ids, 'write', context=context)
3410 for sub_ids in cr.split_for_in_conditions(ids):
3411 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
3412 'where id IN %s', upd1 + [sub_ids])
3413 if cr.rowcount != len(sub_ids):
3414 raise except_orm(_('AccessError'),
3415 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
3420 if self._columns[f].translate:
3421 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
3424 # Inserting value to DB
3425 self.write(cr, user, ids, {f: vals[f]})
3426 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
3429 # call the 'set' method of fields which are not classic_write
3430 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3432 # default element in context must be removed when call a one2many or many2many
3433 rel_context = context.copy()
3434 for c in context.items():
3435 if c[0].startswith('default_'):
3436 del rel_context[c[0]]
3438 for field in upd_todo:
3440 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
3442 for table in self._inherits:
3443 col = self._inherits[table]
3445 for sub_ids in cr.split_for_in_conditions(ids):
3446 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
3447 'where id IN %s', (sub_ids,))
3448 nids.extend([x[0] for x in cr.fetchall()])
3452 if self._inherit_fields[val][0] == table:
3455 self.pool.get(table).write(cr, user, nids, v, context)
3457 self._validate(cr, user, ids, context)
3459 # TODO: use _order to set dest at the right position and not first node of parent
3460 # We can't defer parent_store computation because the stored function
3461 # fields that are computer may refer (directly or indirectly) to
3462 # parent_left/right (via a child_of domain)
3465 self.pool._init_parent[self._name] = True
3467 order = self._parent_order or self._order
3468 parent_val = vals[self._parent_name]
3470 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
3472 clause, params = '%s IS NULL' % (self._parent_name,), ()
3474 for id in parents_changed:
3475 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
3476 pleft, pright = cr.fetchone()
3477 distance = pright - pleft + 1
3479 # Positions of current siblings, to locate proper insertion point;
3480 # this can _not_ be fetched outside the loop, as it needs to be refreshed
3481 # after each update, in case several nodes are sequentially inserted one
3482 # next to the other (i.e computed incrementally)
3483 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, order), params)
3484 parents = cr.fetchall()
3486 # Find Position of the element
3488 for (parent_pright, parent_id) in parents:
3491 position = parent_pright + 1
3493 # It's the first node of the parent
3498 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
3499 position = cr.fetchone()[0] + 1
3501 if pleft < position <= pright:
3502 raise except_orm(_('UserError'), _('Recursivity Detected.'))
3504 if pleft < position:
3505 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3506 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3507 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
3509 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
3510 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
3511 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
3513 result += self._store_get_values(cr, user, ids, vals.keys(), context)
3517 for order, object, ids_to_update, fields_to_recompute in result:
3518 key = (object, tuple(fields_to_recompute))
3519 done.setdefault(key, {})
3520 # avoid to do several times the same computation
3522 for id in ids_to_update:
3523 if id not in done[key]:
3524 done[key][id] = True
3526 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
3528 wf_service = netsvc.LocalService("workflow")
3530 wf_service.trg_write(user, self._name, id, cr)
3534 # TODO: Should set perm to user.xxx
3536 def create(self, cr, user, vals, context=None):
3538 Create new record with specified value
3540 :param cr: database cursor
3541 :param user: current user id
3543 :param vals: field values for new record, e.g {'field_name': field_value, ...}
3544 :type vals: dictionary
3545 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3546 :type context: dictionary
3547 :return: id of new record created
3548 :raise AccessError: * if user has no create rights on the requested object
3549 * if user tries to bypass access rules for create on the requested object
3550 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3551 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3553 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
3554 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
3560 self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
3562 vals = self._add_missing_default_values(cr, user, vals, context)
3565 for v in self._inherits:
3566 if self._inherits[v] not in vals:
3569 tocreate[v] = {'id': vals[self._inherits[v]]}
3570 (upd0, upd1, upd2) = ('', '', [])
3572 for v in vals.keys():
3573 if v in self._inherit_fields:
3574 (table, col, col_detail) = self._inherit_fields[v]
3575 tocreate[table][v] = vals[v]
3578 if (v not in self._inherit_fields) and (v not in self._columns):
3581 # Try-except added to filter the creation of those records whose filds are readonly.
3582 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
3584 cr.execute("SELECT nextval('"+self._sequence+"')")
3586 raise except_orm(_('UserError'),
3587 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
3589 id_new = cr.fetchone()[0]
3590 for table in tocreate:
3591 if self._inherits[table] in vals:
3592 del vals[self._inherits[table]]
3594 record_id = tocreate[table].pop('id', None)
3596 if record_id is None or not record_id:
3597 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
3599 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
3601 upd0 += ',' + self._inherits[table]
3603 upd2.append(record_id)
3605 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
3606 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
3608 for bool_field in bool_fields:
3609 if bool_field not in vals:
3610 vals[bool_field] = False
3612 for field in vals.copy():
3614 if field in self._columns:
3615 fobj = self._columns[field]
3617 fobj = self._inherit_fields[field][2]
3623 for group in groups:
3624 module = group.split(".")[0]
3625 grp = group.split(".")[1]
3626 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
3627 (grp, module, 'res.groups', user))
3628 readonly = cr.fetchall()
3629 if readonly[0][0] >= 1:
3632 elif readonly[0][0] == 0:
3640 if self._columns[field]._classic_write:
3641 upd0 = upd0 + ',"' + field + '"'
3642 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
3643 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
3645 if not isinstance(self._columns[field], fields.related):
3646 upd_todo.append(field)
3647 if field in self._columns \
3648 and hasattr(self._columns[field], 'selection') \
3650 self._check_selection_field_value(cr, user, field, vals[field], context=context)
3651 if self._log_access:
3652 upd0 += ',create_uid,create_date'
3655 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
3656 self.check_access_rule(cr, user, [id_new], 'create', context=context)
3657 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3659 if self._parent_store and not context.get('defer_parent_store_computation'):
3661 self.pool._init_parent[self._name] = True
3663 parent = vals.get(self._parent_name, False)
3665 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
3667 result_p = cr.fetchall()
3668 for (pleft,) in result_p:
3673 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
3674 pleft_old = cr.fetchone()[0]
3677 cr.execute('select max(parent_right) from '+self._table)
3678 pleft = cr.fetchone()[0] or 0
3679 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
3680 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
3681 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
3683 # default element in context must be remove when call a one2many or many2many
3684 rel_context = context.copy()
3685 for c in context.items():
3686 if c[0].startswith('default_'):
3687 del rel_context[c[0]]
3690 for field in upd_todo:
3691 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
3692 self._validate(cr, user, [id_new], context)
3694 if not context.get('no_store_function', False):
3695 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
3698 for order, object, ids, fields2 in result:
3699 if not (object, ids, fields2) in done:
3700 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
3701 done.append((object, ids, fields2))
3703 if self._log_create and not (context and context.get('no_store_function', False)):
3704 message = self._description + \
3706 self.name_get(cr, user, [id_new], context=context)[0][1] + \
3707 "' " + _("created.")
3708 self.log(cr, user, id_new, message, True, context=context)
3709 wf_service = netsvc.LocalService("workflow")
3710 wf_service.trg_create(user, self._name, id_new, cr)
3713 def _store_get_values(self, cr, uid, ids, fields, context):
3714 """Returns an ordered list of fields.functions to call due to
3715 an update operation on ``fields`` of records with ``ids``,
3716 obtained by calling the 'store' functions of these fields,
3717 as setup by their 'store' attribute.
3719 :return: [(priority, model_name, [record_ids,], [function_fields,])]
3721 # FIXME: rewrite, cleanup, use real variable names
3722 # e.g.: http://pastie.org/1222060
3724 fncts = self.pool._store_function.get(self._name, [])
3725 for fnct in range(len(fncts)):
3730 for f in (fields or []):
3731 if f in fncts[fnct][3]:
3737 result.setdefault(fncts[fnct][0], {})
3739 # uid == 1 for accessing objects having rules defined on store fields
3740 ids2 = fncts[fnct][2](self, cr, 1, ids, context)
3741 for id in filter(None, ids2):
3742 result[fncts[fnct][0]].setdefault(id, [])
3743 result[fncts[fnct][0]][id].append(fnct)
3745 for object in result:
3747 for id, fnct in result[object].items():
3748 k2.setdefault(tuple(fnct), [])
3749 k2[tuple(fnct)].append(id)
3750 for fnct, id in k2.items():
3751 dict.setdefault(fncts[fnct[0]][4], [])
3752 dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4], object, id, map(lambda x: fncts[x][1], fnct)))
3760 def _store_set_values(self, cr, uid, ids, fields, context):
3761 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
3762 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
3767 if self._log_access:
3768 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
3772 field_dict.setdefault(r[0], [])
3773 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
3774 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
3775 for i in self.pool._store_function.get(self._name, []):
3777 up_write_date = write_date + datetime.timedelta(hours=i[5])
3778 if datetime.datetime.now() < up_write_date:
3780 field_dict[r[0]].append(i[1])
3786 if self._columns[f]._multi not in keys:
3787 keys.append(self._columns[f]._multi)
3788 todo.setdefault(self._columns[f]._multi, [])
3789 todo[self._columns[f]._multi].append(f)
3793 # uid == 1 for accessing objects having rules defined on store fields
3794 result = self._columns[val[0]].get(cr, self, ids, val, 1, context=context)
3795 for id, value in result.items():
3797 for f in value.keys():
3798 if f in field_dict[id]:
3805 if self._columns[v]._type in ('many2one', 'one2one'):
3807 value[v] = value[v][0]
3810 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
3811 upd1.append(self._columns[v]._symbol_set[1](value[v]))
3814 cr.execute('update "' + self._table + '" set ' + \
3815 ','.join(upd0) + ' where id = %s', upd1)
3819 # uid == 1 for accessing objects having rules defined on store fields
3820 result = self._columns[f].get(cr, self, ids, f, 1, context=context)
3821 for r in result.keys():
3823 if r in field_dict.keys():
3824 if f in field_dict[r]:
3826 for id, value in result.items():
3827 if self._columns[f]._type in ('many2one', 'one2one'):
3832 cr.execute('update "' + self._table + '" set ' + \
3833 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
3839 def perm_write(self, cr, user, ids, fields, context=None):
3840 raise NotImplementedError(_('This method does not exist anymore'))
3842 # TODO: ameliorer avec NULL
3843 def _where_calc(self, cr, user, domain, active_test=True, context=None):
3844 """Computes the WHERE clause needed to implement an OpenERP domain.
3845 :param domain: the domain to compute
3847 :param active_test: whether the default filtering of records with ``active``
3848 field set to ``False`` should be applied.
3849 :return: the query expressing the given domain as provided in domain
3850 :rtype: osv.query.Query
3855 # if the object has a field named 'active', filter out all inactive
3856 # records unless they were explicitely asked for
3857 if 'active' in (self._columns.keys() + self._inherit_fields.keys()) and (active_test and context.get('active_test', True)):
3859 active_in_args = False
3861 if a[0] == 'active':
3862 active_in_args = True
3863 if not active_in_args:
3864 domain.insert(0, ('active', '=', 1))
3866 domain = [('active', '=', 1)]
3870 e = expression.expression(domain)
3871 e.parse(cr, user, self, context)
3872 tables = e.get_tables()
3873 where_clause, where_params = e.to_sql()
3874 where_clause = where_clause and [where_clause] or []
3876 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
3878 return Query(tables, where_clause, where_params)
3880 def _check_qorder(self, word):
3881 if not regex_order.match(word):
3882 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
3885 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
3886 """Add what's missing in ``query`` to implement all appropriate ir.rules
3887 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
3889 :param query: the current query object
3891 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
3893 if parent_model and child_object:
3894 # as inherited rules are being applied, we need to add the missing JOIN
3895 # to reach the parent table (if it was not JOINed yet in the query)
3896 child_object._inherits_join_add(parent_model, query)
3897 query.where_clause += added_clause
3898 query.where_clause_params += added_params
3899 for table in added_tables:
3900 if table not in query.tables:
3901 query.tables.append(table)
3905 # apply main rules on the object
3906 rule_obj = self.pool.get('ir.rule')
3907 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
3909 # apply ir.rules from the parents (through _inherits)
3910 for inherited_model in self._inherits:
3911 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
3912 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
3914 def _generate_m2o_order_by(self, order_field, query):
3916 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
3917 either native m2o fields or function/related fields that are stored, including
3918 intermediate JOINs for inheritance if required.
3920 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
3922 if order_field not in self._columns and order_field in self._inherit_fields:
3923 # also add missing joins for reaching the table containing the m2o field
3924 qualified_field = self._inherits_join_calc(order_field, query)
3925 order_field_column = self._inherit_fields[order_field][2]
3927 qualified_field = '"%s"."%s"' % (self._table, order_field)
3928 order_field_column = self._columns[order_field]
3930 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
3931 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
3932 logging.getLogger('orm.search').debug("Many2one function/related fields must be stored " \
3933 "to be used as ordering fields! Ignoring sorting for %s.%s",
3934 self._name, order_field)
3937 # figure out the applicable order_by for the m2o
3938 dest_model = self.pool.get(order_field_column._obj)
3939 m2o_order = dest_model._order
3940 if not regex_order.match(m2o_order):
3941 # _order is complex, can't use it here, so we default to _rec_name
3942 m2o_order = dest_model._rec_name
3944 # extract the field names, to be able to qualify them and add desc/asc
3946 for order_part in m2o_order.split(","):
3947 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
3948 m2o_order = m2o_order_list
3950 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
3951 # as we don't want to exclude results that have NULL values for the m2o
3952 src_table, src_field = qualified_field.replace('"','').split('.', 1)
3953 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
3954 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
3955 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
3958 def _generate_order_by(self, order_spec, query):
3960 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
3961 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
3963 :raise" except_orm in case order_spec is malformed
3965 order_by_clause = self._order
3967 order_by_elements = []
3968 self._check_qorder(order_spec)
3969 for order_part in order_spec.split(','):
3970 order_split = order_part.strip().split(' ')
3971 order_field = order_split[0].strip()
3972 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
3974 if order_field == 'id':
3975 order_by_clause = '"%s"."%s"' % (self._table, order_field)
3976 elif order_field in self._columns:
3977 order_column = self._columns[order_field]
3978 if order_column._classic_read:
3979 inner_clause = '"%s"."%s"' % (self._table, order_field)
3980 elif order_column._type == 'many2one':
3981 inner_clause = self._generate_m2o_order_by(order_field, query)
3983 continue # ignore non-readable or "non-joinable" fields
3984 elif order_field in self._inherit_fields:
3985 parent_obj = self.pool.get(self._inherit_fields[order_field][0])
3986 order_column = parent_obj._columns[order_field]
3987 if order_column._classic_read:
3988 inner_clause = self._inherits_join_calc(order_field, query)
3989 elif order_column._type == 'many2one':
3990 inner_clause = self._generate_m2o_order_by(order_field, query)
3992 continue # ignore non-readable or "non-joinable" fields
3994 if isinstance(inner_clause, list):
3995 for clause in inner_clause:
3996 order_by_elements.append("%s %s" % (clause, order_direction))
3998 order_by_elements.append("%s %s" % (inner_clause, order_direction))
3999 if order_by_elements:
4000 order_by_clause = ",".join(order_by_elements)
4002 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4004 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4006 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4007 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4008 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4009 This is ok at the security level because this method is private and not callable through XML-RPC.
4011 :param access_rights_uid: optional user ID to use when checking access rights
4012 (not for ir.rules, this is only for ir.model.access)
4016 self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
4018 query = self._where_calc(cr, user, args, context=context)
4019 self._apply_ir_rules(cr, user, query, 'read', context=context)
4020 order_by = self._generate_order_by(order, query)
4021 from_clause, where_clause, where_clause_params = query.get_sql()
4023 limit_str = limit and ' limit %d' % limit or ''
4024 offset_str = offset and ' offset %d' % offset or ''
4025 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4028 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4031 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4033 return [x[0] for x in res]
4035 # returns the different values ever entered for one field
4036 # this is used, for example, in the client when the user hits enter on
4038 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4041 if field in self._inherit_fields:
4042 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4044 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4046 def copy_data(self, cr, uid, id, default=None, context=None):
4048 Copy given record's data with all its fields values
4050 :param cr: database cursor
4051 :param user: current user id
4052 :param id: id of the record to copy
4053 :param default: field values to override in the original values of the copied record
4054 :type default: dictionary
4055 :param context: context arguments, like lang, time zone
4056 :type context: dictionary
4057 :return: dictionary containing all the field values
4063 # avoid recursion through already copied records in case of circular relationship
4064 seen_map = context.setdefault('__copy_data_seen',{})
4065 if id in seen_map.setdefault(self._name,[]):
4067 seen_map[self._name].append(id)
4071 if 'state' not in default:
4072 if 'state' in self._defaults:
4073 if callable(self._defaults['state']):
4074 default['state'] = self._defaults['state'](self, cr, uid, context)
4076 default['state'] = self._defaults['state']
4078 context_wo_lang = context.copy()
4079 if 'lang' in context:
4080 del context_wo_lang['lang']
4081 data = self.read(cr, uid, [id,], context=context_wo_lang)
4085 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4087 fields = self.fields_get(cr, uid, context=context)
4089 ftype = fields[f]['type']
4091 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
4095 data[f] = default[f]
4096 elif 'function' in fields[f]:
4098 elif ftype == 'many2one':
4100 data[f] = data[f] and data[f][0]
4103 elif ftype in ('one2many', 'one2one'):
4105 rel = self.pool.get(fields[f]['relation'])
4107 # duplicate following the order of the ids
4108 # because we'll rely on it later for copying
4109 # translations in copy_translation()!
4111 for rel_id in data[f]:
4112 # the lines are first duplicated using the wrong (old)
4113 # parent but then are reassigned to the correct one thanks
4114 # to the (0, 0, ...)
4115 d = rel.copy_data(cr, uid, rel_id, context=context)
4117 res.append((0, 0, d))
4119 elif ftype == 'many2many':
4120 data[f] = [(6, 0, data[f])]
4124 # make sure we don't break the current parent_store structure and
4125 # force a clean recompute!
4126 for parent_column in ['parent_left', 'parent_right']:
4127 data.pop(parent_column, None)
4129 for v in self._inherits:
4130 del data[self._inherits[v]]
4133 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4137 # avoid recursion through already copied records in case of circular relationship
4138 seen_map = context.setdefault('__copy_translations_seen',{})
4139 if old_id in seen_map.setdefault(self._name,[]):
4141 seen_map[self._name].append(old_id)
4143 trans_obj = self.pool.get('ir.translation')
4144 fields = self.fields_get(cr, uid, context=context)
4146 translation_records = []
4147 for field_name, field_def in fields.items():
4148 # we must recursively copy the translations for o2o and o2m
4149 if field_def['type'] in ('one2one', 'one2many'):
4150 target_obj = self.pool.get(field_def['relation'])
4151 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4152 # here we rely on the order of the ids to match the translations
4153 # as foreseen in copy_data()
4154 old_children = sorted(old_record[field_name])
4155 new_children = sorted(new_record[field_name])
4156 for (old_child, new_child) in zip(old_children, new_children):
4157 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4158 # and for translatable fields we keep them for copy
4159 elif field_def.get('translate'):
4161 if field_name in self._columns:
4162 trans_name = self._name + "," + field_name
4163 elif field_name in self._inherit_fields:
4164 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4166 trans_ids = trans_obj.search(cr, uid, [
4167 ('name', '=', trans_name),
4168 ('res_id', '=', old_id)
4170 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4172 for record in translation_records:
4174 record['res_id'] = new_id
4175 trans_obj.create(cr, uid, record, context=context)
4178 def copy(self, cr, uid, id, default=None, context=None):
4180 Duplicate record with given id updating it with default values
4182 :param cr: database cursor
4183 :param uid: current user id
4184 :param id: id of the record to copy
4185 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4186 :type default: dictionary
4187 :param context: context arguments, like lang, time zone
4188 :type context: dictionary
4194 context = context.copy()
4195 data = self.copy_data(cr, uid, id, default, context)
4196 new_id = self.create(cr, uid, data, context)
4197 self.copy_translations(cr, uid, id, new_id, context)
4200 def exists(self, cr, uid, ids, context=None):
4201 if type(ids) in (int, long):
4203 query = 'SELECT count(1) FROM "%s"' % (self._table)
4204 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4205 return cr.fetchone()[0] == len(ids)
4207 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4208 warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4209 self._name, DeprecationWarning, stacklevel=3)
4210 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4211 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4212 return self._check_recursion(cr, uid, ids, context, parent)
4214 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4216 Verifies that there is no loop in a hierarchical structure of records,
4217 by following the parent relationship using the **parent** field until a loop
4218 is detected or until a top-level record is found.
4220 :param cr: database cursor
4221 :param uid: current user id
4222 :param ids: list of ids of records to check
4223 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4224 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4228 parent = self._parent_name
4230 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
4233 for i in range(0, len(ids), cr.IN_MAX):
4234 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
4235 cr.execute(query, (tuple(sub_ids_parent),))
4236 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
4237 ids_parent = ids_parent2
4238 for i in ids_parent:
4243 def _get_xml_ids(self, cr, uid, ids, *args, **kwargs):
4244 """Find out the XML ID(s) of any database record.
4246 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
4248 :return: map of ids to the list of their fully qualified XML IDs
4249 (empty list when there's none).
4251 model_data_obj = self.pool.get('ir.model.data')
4252 data_ids = model_data_obj.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
4253 data_results = model_data_obj.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
4256 # can't use dict.fromkeys() as the list would be shared!
4258 for record in data_results:
4259 result[record['res_id']].append('%(module)s.%(name)s' % record)
4262 def get_xml_id(self, cr, uid, ids, *args, **kwargs):
4263 """Find out the XML ID of any database record, if there
4264 is one. This method works as a possible implementation
4265 for a function field, to be able to add it to any
4266 model object easily, referencing it as ``osv.osv.get_xml_id``.
4268 When multiple XML IDs exist for a record, only one
4269 of them is returned (randomly).
4271 **Synopsis**: ``get_xml_id(cr, uid, ids) -> { 'id': 'module.xml_id' }``
4273 :return: map of ids to their fully qualified XML ID,
4274 defaulting to an empty string when there's none
4275 (to be usable as a function field).
4277 results = self._get_xml_ids(cr, uid, ids)
4278 for k, v in results.items():
4285 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: