1 # -*- encoding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 ##############################################################################
24 # Object relationnal mapping to postgresql module
25 # . Hierarchical structure
26 # . Constraints consistency, validations
27 # . Object meta Data depends on its status
28 # . Optimised processing by complex query (multiple actions at once)
29 # . Default fields value
30 # . Permissions optimisation
31 # . Persistant object: DB postgresql
33 # . Multi-level caching system
34 # . 2 different inheritancies
36 # - classicals (varchar, integer, boolean, ...)
37 # - relations (one2many, many2one, many2many)
54 from tools.translate import _
59 from lxml import etree
61 sys.stderr.write("ERROR: Import lxml module\n")
62 sys.stderr.write("ERROR: Try to install the python-lxml package\n")
65 from tools.config import config
67 regex_order = re.compile('^([a-z0-9_]+( *desc| *asc)?( *, *|))+$', re.I)
69 def last_day_of_current_month():
72 today = datetime.date.today()
73 last_day = str(calendar.monthrange(today.year, today.month)[1])
74 return time.strftime('%Y-%m-' + last_day)
76 def intersect(la, lb):
77 return filter(lambda x: x in lb, la)
80 class except_orm(Exception):
81 def __init__(self, name, value):
84 self.args = (name, value)
86 class BrowseRecordError(Exception):
89 # Readonly python database object browser
90 class browse_null(object):
95 def __getitem__(self, name):
98 def __getattr__(self, name):
99 return False # XXX: return self ?
107 def __nonzero__(self):
110 def __unicode__(self):
115 # TODO: execute an object method on browse_record_list
117 class browse_record_list(list):
119 def __init__(self, lst, context=None):
122 super(browse_record_list, self).__init__(lst)
123 self.context = context
126 class browse_record(object):
127 def __init__(self, cr, uid, id, table, cache, context=None, list_class = None, fields_process={}):
129 table : the object (inherited from orm)
130 context : a dictionary with an optional context
134 self._list_class = list_class or browse_record_list
139 self._table_name = self._table._name
140 self._context = context
141 self._fields_process = fields_process
143 cache.setdefault(table._name, {})
144 self._data = cache[table._name]
146 if not (id and isinstance(id, (int, long,))):
147 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
148 # if not table.exists(cr, uid, id, context):
149 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
151 if id not in self._data:
152 self._data[id] = {'id': id}
156 def __getitem__(self, name):
159 if name not in self._data[self._id]:
160 # build the list of fields we will fetch
162 # fetch the definition of the field which was asked for
163 if name in self._table._columns:
164 col = self._table._columns[name]
165 elif name in self._table._inherit_fields:
166 col = self._table._inherit_fields[name][2]
167 elif hasattr(self._table, name):
168 if isinstance(getattr(self._table, name), (types.MethodType, types.LambdaType, types.FunctionType)):
169 return lambda *args, **argv: getattr(self._table, name)(self._cr, self._uid, [self._id], *args, **argv)
171 return getattr(self._table, name)
173 logger = netsvc.Logger()
174 logger.notifyChannel('orm', netsvc.LOG_ERROR, "Programming error: field '%s' does not exist in object '%s' !" % (name, self._table._name))
177 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
179 # gen the list of "local" (ie not inherited) fields which are classic or many2one
180 ffields = filter(lambda x: x[1]._classic_write, self._table._columns.items())
181 # gen the list of inherited fields
182 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
183 # complete the field list with the inherited fields which are classic or many2one
184 ffields += filter(lambda x: x[1]._classic_write, inherits)
185 # otherwise we fetch only that field
187 ffields = [(name, col)]
188 ids = filter(lambda id: name not in self._data[id], self._data.keys())
190 fffields = map(lambda x: x[0], ffields)
191 datas = self._table.read(self._cr, self._uid, ids, fffields, context=self._context, load="_classic_write")
192 if self._fields_process:
193 lang = self._context.get('lang', 'en_US') or 'en_US'
194 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid,self.pool.get('res.lang').search(self._cr, self._uid,[('code','=',lang)])[0])
197 if f._type in self._fields_process:
199 d[n] = self._fields_process[f._type](d[n])
201 d[n].set_value(self._cr, self._uid, d[n], self, f, lang_obj)
204 # create browse records for 'remote' objects
207 if f._type in ('many2one', 'one2one'):
209 obj = self._table.pool.get(f._obj)
211 if type(data[n]) in (type([]),type( (1,) )):
216 data[n] = browse_record(self._cr, self._uid, ids2, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
218 data[n] = browse_null()
220 data[n] = browse_null()
221 elif f._type in ('one2many', 'many2many') and len(data[n]):
222 data[n] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(f._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in data[n]], self._context)
223 self._data[data['id']].update(data)
224 return self._data[self._id][name]
226 def __getattr__(self, name):
227 # raise an AttributeError exception.
230 def __contains__(self, name):
231 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
233 def __hasattr__(self, name):
240 return "browse_record(%s, %d)" % (self._table_name, self._id)
242 def __eq__(self, other):
243 return (self._table_name, self._id) == (other._table_name, other._id)
245 def __ne__(self, other):
246 return (self._table_name, self._id) != (other._table_name, other._id)
248 # we need to define __unicode__ even though we've already defined __str__
249 # because we have overridden __getattr__
250 def __unicode__(self):
251 return unicode(str(self))
254 return hash((self._table_name, self._id))
262 (type returned by postgres when the column was created, type expression to create the column)
266 fields.boolean: 'bool',
267 fields.integer: 'int4',
268 fields.integer_big: 'int8',
272 fields.datetime: 'timestamp',
273 fields.binary: 'bytea',
274 fields.many2one: 'int4',
276 if type(f) in type_dict:
277 f_type = (type_dict[type(f)], type_dict[type(f)])
278 elif isinstance(f, fields.float):
280 f_type = ('numeric', 'NUMERIC(%d,%d)' % (f.digits[0], f.digits[1]))
282 f_type = ('float8', 'DOUBLE PRECISION')
283 elif isinstance(f, (fields.char, fields.reference)):
284 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
285 elif isinstance(f, fields.selection):
286 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
287 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
288 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
291 f_size = (hasattr(f, 'size') and f.size) or 16
294 f_type = ('int4', 'INTEGER')
296 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
297 elif isinstance(f, fields.function) and eval('fields.'+(f._type)) in type_dict:
298 t = eval('fields.'+(f._type))
299 f_type = (type_dict[t], type_dict[t])
300 elif isinstance(f, fields.function) and f._type == 'float':
301 f_type = ('float8', 'DOUBLE PRECISION')
302 elif isinstance(f, fields.function) and f._type == 'selection':
303 f_type = ('text', 'text')
304 elif isinstance(f, fields.function) and f._type == 'char':
305 f_type = ('varchar', 'VARCHAR(%d)' % (f.size))
307 logger = netsvc.Logger()
308 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
313 class orm_template(object):
319 _parent_name = 'parent_id'
320 _parent_store = False
321 _parent_order = False
330 CONCURRENCY_CHECK_FIELD = '__last_update'
332 def _field_create(self, cr, context={}):
333 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
335 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
336 model_id = cr.fetchone()[0]
337 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
339 model_id = cr.fetchone()[0]
340 if 'module' in context:
341 name_id = 'model_'+self._name.replace('.','_')
342 cr.execute('select * from ir_model_data where name=%s and res_id=%s', (name_id,model_id))
344 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
345 (name_id, context['module'], 'ir.model', model_id)
350 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
352 for rec in cr.dictfetchall():
353 cols[rec['name']] = rec
355 for (k, f) in self._columns.items():
357 'model_id': model_id,
360 'field_description': f.string.replace("'", " "),
362 'relation': f._obj or 'NULL',
363 'view_load': (f.view_load and 1) or 0,
364 'select_level': str(f.select or 0),
365 'readonly':(f.readonly and 1) or 0,
366 'required':(f.required and 1) or 0,
367 'selectable' : (f.selectable and 1) or 0,
370 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
371 id = cr.fetchone()[0]
373 cr.execute("""INSERT INTO ir_model_fields (
374 id, model_id, model, name, field_description, ttype,
375 relation,view_load,state,select_level
377 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s
379 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
380 vals['relation'], bool(vals['view_load']), 'base',
383 if 'module' in context:
384 name1 = 'field_' + self._table + '_' + k
385 cr.execute("select name from ir_model_data where name=%s", (name1,))
387 name1 = name1 + "_" + str(id)
388 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
389 (name1, context['module'], 'ir.model.fields', id)
392 for key, val in vals.items():
393 if cols[k][key] != vals[key]:
394 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
396 cr.execute("""UPDATE ir_model_fields SET
397 model_id=%s, field_description=%s, ttype=%s, relation=%s,
398 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s
400 model=%s AND name=%s""", (
401 vals['model_id'], vals['field_description'], vals['ttype'],
402 vals['relation'], bool(vals['view_load']),
403 vals['select_level'], bool(vals['readonly']),bool(vals['required']),bool(vals['selectable']),vals['model'], vals['name']
408 def _auto_init(self, cr, context={}):
409 self._field_create(cr, context)
411 def __init__(self, cr):
412 if not self._name and not hasattr(self, '_inherit'):
413 name = type(self).__name__.split('.')[0]
414 msg = "The class %s has to have a _name attribute" % name
416 logger = netsvc.Logger()
417 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg )
418 raise except_orm('ValueError', msg )
420 if not self._description:
421 self._description = self._name
423 self._table = self._name.replace('.', '_')
425 def browse(self, cr, uid, select, context=None, list_class=None, fields_process={}):
428 self._list_class = list_class or browse_record_list
430 # need to accepts ints and longs because ids coming from a method
431 # launched by button in the interface have a type long...
432 if isinstance(select, (int, long)):
433 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
434 elif isinstance(select, list):
435 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context)
439 def __export_row(self, cr, uid, row, fields, context=None):
441 def check_type(field_type):
442 if field_type == 'float':
444 elif field_type == 'integer':
446 elif field_type == 'boolean':
451 data = map(lambda x: '', range(len(fields)))
453 for fpos in range(len(fields)):
462 model_data = self.pool.get('ir.model.data')
463 data_ids = model_data.search(cr, uid, [('model','=',r._table_name),('res_id','=',r['id'])])
465 d = model_data.read(cr, uid, data_ids, ['name','module'])[0]
467 r = '%s.%s'%(d['module'],d['name'])
475 if f[i] in self._columns:
476 r = check_type(self._columns[f[i]]._type)
477 elif f[i] in self._inherit_fields:
478 r = check_type(self._inherit_fields[f[i]][2]._type)
481 if isinstance(r, (browse_record_list, list)):
483 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
489 lines2 = self.__export_row(cr, uid, row2, fields2,
492 for fpos2 in range(len(fields)):
493 if lines2 and lines2[0][fpos2]:
494 data[fpos2] = lines2[0][fpos2]
498 if isinstance(rr.name, browse_record):
500 dt += rr.name or '' + ','
510 if isinstance(r, browse_record):
512 data[fpos] = tools.ustr(r or '')
513 return [data] + lines
515 def export_data(self, cr, uid, ids, fields_to_export, context=None):
518 imp_comp = context.get('import_comp',False)
519 cols = self._columns.copy()
520 for f in self._inherit_fields:
521 cols.update({f: self._inherit_fields[f][2]})
522 fields_to_export = map(lambda x: x.split('/'), fields_to_export)
523 fields_export = fields_to_export+[]
526 for field in fields_export:
527 if imp_comp and len(field)>1:
528 warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
529 elif len (field) <=1:
530 if imp_comp and cols.get(field and field[0],False):
531 if ((isinstance(cols[field[0]], fields.function) and not cols[field[0]].store) \
532 or isinstance(cols[field[0]], fields.related)\
533 or isinstance(cols[field[0]], fields.one2many)):
534 warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
536 if imp_comp and len(warning_fields):
537 warning = 'Following columns cannot be exported since you select to be import compatible.\n%s' %('\n'.join(warning_fields))
539 return {'warning' : warning}
540 for row in self.browse(cr, uid, ids, context):
541 datas += self.__export_row(cr, uid, row, fields_to_export, context)
542 return {'datas':datas}
544 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
547 fields = map(lambda x: x.split('/'), fields)
548 logger = netsvc.Logger()
549 ir_model_data_obj = self.pool.get('ir.model.data')
551 def _check_db_id(self, model_name, db_id):
552 obj_model = self.pool.get(model_name)
553 ids = obj_model.search(cr, uid, [('id','=',int(db_id))])
555 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, db_id))
558 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0):
559 line = datas[position]
568 ir_model_data_obj = self.pool.get('ir.model.data')
570 # Import normal fields
572 for i in range(len(fields)):
574 raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
578 if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':db_id'):
582 field_name = field[0].split(':')[0]
583 model_rel = fields_def[field_name]['relation']
585 if fields_def[field[len(prefix)][:-6]]['type']=='many2many':
587 for db_id in line[i].split(config.get('csv_internal_sep')):
589 _check_db_id(self, model_rel, db_id)
592 warning += [tools.exception_to_unicode(e)]
593 logger.notifyChannel("import", netsvc.LOG_ERROR,
594 tools.exception_to_unicode(e))
596 res = [(6, 0, res_id)]
599 _check_db_id(self, model_rel, line[i])
602 warning += [tools.exception_to_unicode(e)]
603 logger.notifyChannel("import", netsvc.LOG_ERROR,
604 tools.exception_to_unicode(e))
605 row[field_name] = res or False
608 if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':id'):
611 if fields_def[field[len(prefix)][:-3]]['type']=='many2many':
613 for word in line[i].split(config.get('csv_internal_sep')):
615 module, xml_id = word.rsplit('.', 1)
617 module, xml_id = current_module, word
618 id = ir_model_data_obj._get_id(cr, uid, module,
620 res_id2 = ir_model_data_obj.read(cr, uid, [id],
621 ['res_id'])[0]['res_id']
623 res_id.append(res_id2)
625 res_id = [(6, 0, res_id)]
628 module, xml_id = line[i].rsplit('.', 1)
630 module, xml_id = current_module, line[i]
631 id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
632 res_id = ir_model_data_obj.read(cr, uid, [id],
633 ['res_id'])[0]['res_id']
634 row[field[0][:-3]] = res_id or False
636 if (len(field) == len(prefix)+1) and \
637 len(field[len(prefix)].split(':lang=')) == 2:
638 f, lang = field[len(prefix)].split(':lang=')
639 translate.setdefault(lang, {})[f]=line[i] or False
641 if (len(field) == len(prefix)+1) and \
642 (prefix == field[0:len(prefix)]):
643 if field[len(prefix)] == "id":
646 is_xml_id = data_id = line[i]
647 d = data_id.split('.')
648 module = len(d)>1 and d[0] or ''
649 name = len(d)>1 and d[1] or d[0]
650 data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('name','=',name)])
652 d = ir_model_data_obj.read(cr, uid, data_ids, ['res_id'])[0]
654 if is_db_id and not db_id:
655 data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('res_id','=',is_db_id)])
656 if not len(data_ids):
657 ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id})
659 if is_db_id and int(db_id) != int(is_db_id):
660 warning += [_("Id is not the same than existing one: %s")%(is_db_id)]
661 logger.notifyChannel("import", netsvc.LOG_ERROR,
662 _("Id is not the same than existing one: %s")%(is_db_id))
665 if field[len(prefix)] == "db_id":
668 _check_db_id(self, model_name, line[i])
669 data_res_id = is_db_id = int(line[i])
671 warning += [tools.exception_to_unicode(e)]
672 logger.notifyChannel("import", netsvc.LOG_ERROR,
673 tools.exception_to_unicode(e))
675 data_ids = ir_model_data_obj.search(cr, uid, [('model','=',model_name),('res_id','=',line[i])])
677 d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0]
680 data_id = '%s.%s'%(d['module'],d['name'])
683 if is_xml_id and not data_id:
685 if is_xml_id and is_xml_id!=data_id:
686 warning += [_("Id is not the same than existing one: %s")%(line[i])]
687 logger.notifyChannel("import", netsvc.LOG_ERROR,
688 _("Id is not the same than existing one: %s")%(line[i]))
691 if fields_def[field[len(prefix)]]['type'] == 'integer':
692 res = line[i] and int(line[i])
693 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
694 res = line[i].lower() not in ('0', 'false', 'off')
695 elif fields_def[field[len(prefix)]]['type'] == 'float':
696 res = line[i] and float(line[i])
697 elif fields_def[field[len(prefix)]]['type'] == 'selection':
699 if isinstance(fields_def[field[len(prefix)]]['selection'],
701 sel = fields_def[field[len(prefix)]]['selection']
703 sel = fields_def[field[len(prefix)]]['selection'](self,
706 if line[i] in [tools.ustr(key),tools.ustr(val)]: #Acepting key or value for selection field
709 if line[i] and not res:
710 logger.notifyChannel("import", netsvc.LOG_WARNING,
711 _("key '%s' not found in selection field '%s'") % \
712 (line[i], field[len(prefix)]))
714 warning += [_("Key/value '%s' not found in selection field '%s'")%(line[i],field[len(prefix)])]
716 elif fields_def[field[len(prefix)]]['type']=='many2one':
719 relation = fields_def[field[len(prefix)]]['relation']
720 res2 = self.pool.get(relation).name_search(cr, uid,
721 line[i], [], operator='=', context=context)
722 res = (res2 and res2[0][0]) or False
724 warning += [_("Relation not found: %s on '%s'")%(line[i],relation)]
725 logger.notifyChannel("import", netsvc.LOG_WARNING,
726 _("Relation not found: %s on '%s'")%(line[i],relation))
727 elif fields_def[field[len(prefix)]]['type']=='many2many':
730 relation = fields_def[field[len(prefix)]]['relation']
731 for word in line[i].split(config.get('csv_internal_sep')):
732 res2 = self.pool.get(relation).name_search(cr,
733 uid, word, [], operator='=', context=context)
734 res3 = (res2 and res2[0][0]) or False
736 warning += [_("Relation not found: %s on '%s'")%(line[i],relation)]
737 logger.notifyChannel("import",
739 _("Relation not found: %s on '%s'")%(line[i],relation))
745 res = line[i] or False
746 row[field[len(prefix)]] = res
747 elif (prefix==field[0:len(prefix)]):
748 if field[0] not in todo:
749 todo.append(field[len(prefix)])
751 # Import one2many, many2many fields
755 relation_obj = self.pool.get(fields_def[field]['relation'])
756 newfd = relation_obj.fields_get(
757 cr, uid, context=context)
758 res = process_liness(self, datas, prefix + [field], current_module, relation_obj._name, newfd, position)
759 (newrow, max2, w2, translate2, data_id2, data_res_id2) = res
760 nbrmax = max(nbrmax, max2)
761 warning = warning + w2
762 reduce(lambda x, y: x and y, newrow)
763 row[field] = (reduce(lambda x, y: x or y, newrow.values()) and \
764 [(0, 0, newrow)]) or []
766 while (position+i)<len(datas):
768 for j in range(len(fields)):
770 if (len(field2) <= (len(prefix)+1)) and datas[position+i][j]:
775 (newrow, max2, w2, translate2, data_id2, data_res_id2) = process_liness(
776 self, datas, prefix+[field], current_module, relation_obj._name, newfd, position+i)
778 if reduce(lambda x, y: x or y, newrow.values()):
779 row[field].append((0, 0, newrow))
781 nbrmax = max(nbrmax, i)
784 for i in range(max(nbrmax, 1)):
787 result = (row, nbrmax, warning, translate, data_id, data_res_id)
790 fields_def = self.fields_get(cr, uid, context=context)
793 initial_size = len(datas)
794 if config.get('import_partial', False) and filename:
795 data = pickle.load(file(config.get('import_partial')))
796 original_value = data.get(filename, 0)
802 (res, other, warning, translate, data_id, res_id) = \
803 process_liness(self, datas, [], current_module, self._name, fields_def)
806 return (-1, res, 'Line ' + str(counter) +' : ' + '!\n'.join(warning), '')
809 id = ir_model_data_obj._update(cr, uid, self._name,
810 current_module, res, xml_id=data_id, mode=mode,
811 noupdate=noupdate, res_id=res_id)
814 if isinstance(e,psycopg2.IntegrityError):
815 msg= _('Insertion Failed!')
816 for key in self.pool._sql_error.keys():
818 msg = self.pool._sql_error[key]
820 return (-1, res,'Line ' + str(counter) +' : ' + msg,'' )
822 for lang in translate:
823 context2 = context.copy()
824 context2['lang'] = lang
825 self.write(cr, uid, [id], translate[lang], context2)
826 if config.get('import_partial', False) and filename and (not (counter%100)) :
827 data = pickle.load(file(config.get('import_partial')))
828 data[filename] = initial_size - len(datas) + original_value
829 pickle.dump(data, file(config.get('import_partial'),'wb'))
832 #except Exception, e:
833 # logger.notifyChannel("import", netsvc.LOG_ERROR, e)
836 # return (-1, res, e[0], warning)
838 # return (-1, res, e[0], '')
841 # TODO: Send a request with the result and multi-thread !
843 return (done, 0, 0, 0)
845 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
846 raise _('The read method is not implemented on this object !')
848 def get_invalid_fields(self,cr,uid):
849 return list(self._invalids)
851 def _validate(self, cr, uid, ids, context=None):
852 context = context or {}
853 lng = context.get('lang', False) or 'en_US'
854 trans = self.pool.get('ir.translation')
856 for constraint in self._constraints:
857 fun, msg, fields = constraint
858 if not fun(self, cr, uid, ids):
859 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
861 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
863 self._invalids.update(fields)
866 raise except_orm('ValidateError', '\n'.join(error_msgs))
868 self._invalids.clear()
870 def default_get(self, cr, uid, fields_list, context=None):
873 def perm_read(self, cr, user, ids, context=None, details=True):
874 raise _('The perm_read method is not implemented on this object !')
876 def unlink(self, cr, uid, ids, context=None):
877 raise _('The unlink method is not implemented on this object !')
879 def write(self, cr, user, ids, vals, context=None):
880 raise _('The write method is not implemented on this object !')
882 def create(self, cr, user, vals, context=None):
883 raise _('The create method is not implemented on this object !')
885 # returns the definition of each field in the object
886 # the optional fields parameter can limit the result to some fields
887 def fields_get_keys(self, cr, user, context=None, read_access=True):
890 res = self._columns.keys()
891 for parent in self._inherits:
892 res.extend(self.pool.get(parent).fields_get_keys(cr, user, fields, context))
895 def fields_get(self, cr, user, fields=None, context=None, read_access=True):
899 translation_obj = self.pool.get('ir.translation')
900 model_access_obj = self.pool.get('ir.model.access')
901 for parent in self._inherits:
902 res.update(self.pool.get(parent).fields_get(cr, user, fields, context))
904 if self._columns.keys():
905 for f in self._columns.keys():
906 if fields and f not in fields:
908 res[f] = {'type': self._columns[f]._type}
909 for arg in ('string', 'readonly', 'states', 'size', 'required',
910 'change_default', 'translate', 'help', 'select', 'selectable'):
911 if getattr(self._columns[f], arg):
912 res[f][arg] = getattr(self._columns[f], arg)
914 res[f]['readonly'] = True
915 res[f]['states'] = {}
916 for arg in ('digits', 'invisible','filters'):
917 if hasattr(self._columns[f], arg) \
918 and getattr(self._columns[f], arg):
919 res[f][arg] = getattr(self._columns[f], arg)
921 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
923 res[f]['string'] = res_trans
924 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
926 res[f]['help'] = help_trans
928 if hasattr(self._columns[f], 'selection'):
929 if isinstance(self._columns[f].selection, (tuple, list)):
930 sel = self._columns[f].selection
931 # translate each selection option
933 for (key, val) in sel:
936 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
937 sel2.append((key, val2 or val))
939 res[f]['selection'] = sel
941 # call the 'dynamic selection' function
942 res[f]['selection'] = self._columns[f].selection(self, cr,
944 if res[f]['type'] in ('one2many', 'many2many', 'many2one', 'one2one'):
945 res[f]['relation'] = self._columns[f]._obj
946 res[f]['domain'] = self._columns[f]._domain
947 res[f]['context'] = self._columns[f]._context
949 #TODO : read the fields from the database
953 # filter out fields which aren't in the fields list
960 # Overload this method if you need a window title which depends on the context
962 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
965 def __view_look_dom(self, cr, user, node, view_id, context=None):
972 if node.tag == 'field':
976 if node.get('name') in self._columns:
977 column = self._columns[node.get('name')]
979 column = self._inherit_fields[node.get('name')][2]
984 relation = column._obj
988 if f.tag in ('form', 'tree', 'graph'):
991 ctx['base_model_name'] = self._name
992 xarch, xfields = self.pool.get(relation).__view_look_dom_arch(cr, user, f, view_id, ctx)
993 views[str(f.tag)] = {
997 attrs = {'views': views}
998 if node.get('widget') and node.get('widget') == 'selection':
999 # We can not use the 'string' domain has it is defined according to the record !
1001 if column._domain and not isinstance(column._domain, (str, unicode)):
1002 dom = column._domain
1003 attrs['selection'] = self.pool.get(relation).name_search(cr, user, '', dom, context=context)
1004 if (node.get('required') and not int(node.get('required'))) or not column.required:
1005 attrs['selection'].append((False,''))
1006 fields[node.get('name')] = attrs
1008 elif node.tag in ('form', 'tree'):
1009 result = self.view_header_get(cr, user, False, node.tag, context)
1011 node.set('string', result)
1013 elif node.tag == 'calendar':
1014 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1015 if node.get(additional_field):
1016 fields[node.get(additional_field)] = {}
1018 if 'groups' in node.attrib:
1019 if node.get('groups'):
1020 groups = node.get('groups').split(',')
1022 access_pool = self.pool.get('ir.model.access')
1023 for group in groups:
1024 readonly = readonly or access_pool.check_groups(cr, user, group)
1026 node.set('invisible', '1')
1027 del(node.attrib['groups'])
1030 if ('lang' in context) and not result:
1031 if node.get('string'):
1032 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string').encode('utf8'))
1033 if not trans and ('base_model_name' in context):
1034 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string').encode('utf8'))
1036 node.set('string', trans)
1038 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum').encode('utf8'))
1040 node.set('sum', trans)
1044 fields.update(self.__view_look_dom(cr, user, f, view_id, context))
1048 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1049 fields_def = self.__view_look_dom(cr, user, node, view_id, context=context)
1051 rolesobj = self.pool.get('res.roles')
1052 usersobj = self.pool.get('res.users')
1054 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1055 for button in buttons:
1057 if user != 1: # admin user has all roles
1058 user_roles = usersobj.read(cr, user, [user], ['roles_id'])[0]['roles_id']
1059 cr.execute("select role_id from wkf_transition where signal=%s", (button.get('name'),))
1060 roles = cr.fetchall()
1063 ok = ok and rolesobj.check(cr, user, user_roles, role[0])
1066 button.set('readonly', '1')
1068 button.set('readonly', '0')
1070 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1071 fields = self.fields_get(cr, user, fields_def.keys(), context)
1072 for field in fields_def:
1074 # sometime, the view may containt the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1075 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1076 elif field in fields:
1077 fields[field].update(fields_def[field])
1079 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1080 res = cr.fetchall()[:]
1082 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1083 msg = "\n * ".join([r[0] for r in res])
1084 msg += "\n\nEither you wrongly customised this view, or some modules bringing those views are not compatible with your current data model"
1085 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1086 raise except_orm('View error', msg)
1090 def __get_default_calendar_view(self):
1091 """Generate a default calendar view (For internal use only).
1094 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
1095 '<calendar string="%s"') % (self._description)
1097 if (self._date_name not in self._columns):
1099 for dt in ['date','date_start','x_date','x_date_start']:
1100 if dt in self._columns:
1101 self._date_name = dt
1106 raise except_orm(_('Invalid Object Architecture!'),_("Insufficient fields for Calendar View!"))
1109 arch +=' date_start="%s"' % (self._date_name)
1111 for color in ["user_id","partner_id","x_user_id","x_partner_id"]:
1112 if color in self._columns:
1113 arch += ' color="' + color + '"'
1116 dt_stop_flag = False
1118 for dt_stop in ["date_stop","date_end","x_date_stop","x_date_end"]:
1119 if dt_stop in self._columns:
1120 arch += ' date_stop="' + dt_stop + '"'
1124 if not dt_stop_flag:
1125 for dt_delay in ["date_delay","planned_hours","x_date_delay","x_planned_hours"]:
1126 if dt_delay in self._columns:
1127 arch += ' date_delay="' + dt_delay + '"'
1131 ' <field name="%s"/>\n'
1132 '</calendar>') % (self._rec_name)
1137 # if view_id, view_type is not required
1139 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1144 if isinstance(s, unicode):
1145 return s.encode('utf8')
1148 def _inherit_apply(src, inherit):
1149 def _find(node, node2):
1150 if node2.tag == 'xpath':
1151 res = node.xpath(node2.get('expr'))
1152 return res and res[0]
1154 for n in node.getiterator(node2.tag):
1156 for attr in node2.attrib:
1157 if attr == 'position':
1160 if n.get(attr) == node2.get(attr):
1167 # End: _find(node, node2)
1169 doc_dest = etree.fromstring(encode(inherit))
1170 toparse = [ doc_dest ]
1173 node2 = toparse.pop(0)
1174 if node2.tag == 'data':
1175 toparse += [ c for c in doc_dest ]
1177 node = _find(src, node2)
1178 if node is not None:
1180 if node2.get('position'):
1181 pos = node2.get('position')
1182 if pos == 'replace':
1184 node.addprevious(child)
1185 node.getparent().remove(node)
1187 sib = node.getnext()
1191 elif pos == 'after':
1195 sib.addprevious(child)
1196 elif pos == 'before':
1197 node.addprevious(child)
1199 raise AttributeError(_('Unknown position in inherited view %s !') % pos)
1202 ' %s="%s"' % (attr, node2.get(attr))
1203 for attr in node2.attrib
1204 if attr != 'position'
1206 tag = "<%s%s>" % (node2.tag, attrs)
1207 raise AttributeError(_("Couldn't find tag '%s' in parent view !") % tag)
1209 # End: _inherit_apply(src, inherit)
1211 result = {'type': view_type, 'model': self._name}
1218 where = (model and (" and model='%s'" % (self._name,))) or ''
1219 cr.execute('SELECT arch,name,field_parent,id,type,inherit_id FROM ir_ui_view WHERE id=%s'+where, (view_id,))
1221 cr.execute('''SELECT
1222 arch,name,field_parent,id,type,inherit_id
1229 ORDER BY priority''', (self._name, view_type))
1230 sql_res = cr.fetchone()
1231 if not sql_res and view_type == 'search':
1232 cr.execute('''SELECT
1233 arch,name,field_parent,id,type,inherit_id
1240 ORDER BY priority''', (self._name, 'form'))
1241 sql_res = cr.fetchone()
1245 view_id = ok or sql_res[3]
1248 # if a view was found
1250 result['type'] = sql_res[4]
1251 result['view_id'] = sql_res[3]
1252 result['arch'] = sql_res[0]
1254 def _inherit_apply_rec(result, inherit_id):
1255 # get all views which inherit from (ie modify) this view
1256 cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
1257 sql_inherit = cr.fetchall()
1258 for (inherit, id) in sql_inherit:
1259 result = _inherit_apply(result, inherit)
1260 result = _inherit_apply_rec(result, id)
1263 inherit_result = etree.fromstring(encode(result['arch']))
1264 result['arch'] = _inherit_apply_rec(inherit_result, sql_res[3])
1266 result['name'] = sql_res[1]
1267 result['field_parent'] = sql_res[2] or False
1269 # otherwise, build some kind of default view
1270 if view_type == 'form':
1271 res = self.fields_get(cr, user, context=context)
1272 xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
1273 '''<form string="%s">''' % (self._description,)
1275 if res[x]['type'] not in ('one2many', 'many2many'):
1276 xml += '<field name="%s"/>' % (x,)
1277 if res[x]['type'] == 'text':
1280 elif view_type == 'tree':
1281 _rec_name = self._rec_name
1282 if _rec_name not in self._columns:
1283 _rec_name = self._columns.keys()[0]
1284 xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
1285 '''<tree string="%s"><field name="%s"/></tree>''' \
1286 % (self._description, self._rec_name)
1287 elif view_type == 'calendar':
1288 xml = self.__get_default_calendar_view()
1290 raise except_orm(_('Invalid Architecture!'),_("There is no view of type '%s' defined for the structure!") % view_type)
1291 result['arch'] = etree.fromstring(encode(xml))
1292 result['name'] = 'default'
1293 result['field_parent'] = False
1294 result['view_id'] = 0
1296 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=context)
1297 result['arch'] = xarch
1298 result['fields'] = xfields
1301 if context and context.get('active_id',False):
1302 data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
1304 act_id = int(data_menu.split(',')[1])
1306 data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
1307 result['submenu'] = hasattr(data_action,'menus') and data_action.menus or False
1311 for key in ('report_sxw_content', 'report_rml_content',
1312 'report_sxw', 'report_rml',
1313 'report_sxw_content_data', 'report_rml_content_data'):
1317 ir_values_obj = self.pool.get('ir.values')
1318 resprint = ir_values_obj.get(cr, user, 'action',
1319 'client_print_multi', [(self._name, False)], False,
1321 resaction = ir_values_obj.get(cr, user, 'action',
1322 'client_action_multi', [(self._name, False)], False,
1325 resrelate = ir_values_obj.get(cr, user, 'action',
1326 'client_action_relate', [(self._name, False)], False,
1328 resprint = map(clean, resprint)
1329 resaction = map(clean, resaction)
1330 resaction = filter(lambda x: not x.get('multi', False), resaction)
1331 resprint = filter(lambda x: not x.get('multi', False), resprint)
1332 resrelate = map(lambda x: x[2], resrelate)
1334 for x in resprint+resaction+resrelate:
1335 x['string'] = x['name']
1337 result['toolbar'] = {
1339 'action': resaction,
1344 _view_look_dom_arch = __view_look_dom_arch
1346 def search_count(self, cr, user, args, context=None):
1349 res = self.search(cr, user, args, context=context, count=True)
1350 if isinstance(res, list):
1354 def search(self, cr, user, args, offset=0, limit=None, order=None,
1355 context=None, count=False):
1356 raise _('The search method is not implemented on this object !')
1358 def name_get(self, cr, user, ids, context=None):
1359 raise _('The name_get method is not implemented on this object !')
1361 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=None):
1362 raise _('The name_search method is not implemented on this object !')
1364 def copy(self, cr, uid, id, default=None, context=None):
1365 raise _('The copy method is not implemented on this object !')
1367 def exists(self, cr, uid, id, context=None):
1368 raise _('The exists method is not implemented on this object !')
1370 def read_string(self, cr, uid, id, langs, fields=None, context=None):
1373 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
1375 fields = self._columns.keys() + self._inherit_fields.keys()
1377 res[lang] = {'code': lang}
1379 if f in self._columns:
1380 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
1382 res[lang][f] = res_trans
1384 res[lang][f] = self._columns[f].string
1385 for table in self._inherits:
1386 cols = intersect(self._inherit_fields.keys(), fields)
1387 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
1390 res[lang]['code'] = lang
1391 for f in res2[lang]:
1392 res[lang][f] = res2[lang][f]
1395 def write_string(self, cr, uid, id, langs, vals, context=None):
1396 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
1399 if field in self._columns:
1400 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field])
1401 for table in self._inherits:
1402 cols = intersect(self._inherit_fields.keys(), vals)
1404 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
1407 def _check_removed_columns(self, cr, log=False):
1408 raise NotImplementedError()
1410 class orm_memory(orm_template):
1411 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
1412 _inherit_fields = {}
1417 def __init__(self, cr):
1418 super(orm_memory, self).__init__(cr)
1422 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
1424 def vaccum(self, cr, uid):
1426 if self.check_id % self._check_time:
1429 max = time.time() - self._max_hours * 60 * 60
1430 for id in self.datas:
1431 if self.datas[id]['internal.date_access'] < max:
1433 self.unlink(cr, uid, tounlink)
1434 if len(self.datas)>self._max_count:
1435 sorted = map(lambda x: (x[1]['internal.date_access'], x[0]), self.datas.items())
1437 ids = map(lambda x: x[1], sorted[:len(self.datas)-self._max_count])
1438 self.unlink(cr, uid, ids)
1441 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
1444 if not fields_to_read:
1445 fields_to_read = self._columns.keys()
1449 if isinstance(ids, (int, long)):
1453 for f in fields_to_read:
1454 if id in self.datas:
1455 r[f] = self.datas[id].get(f, False)
1456 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
1459 if id in self.datas:
1460 self.datas[id]['internal.date_access'] = time.time()
1461 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
1462 for f in fields_post:
1463 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
1464 for record in result:
1465 record[f] = res2[record['id']]
1466 if isinstance(ids_orig, (int, long)):
1470 def write(self, cr, user, ids, vals, context=None):
1474 if self._columns[field]._classic_write:
1475 vals2[field] = vals[field]
1477 upd_todo.append(field)
1479 self.datas[id_new].update(vals2)
1480 self.datas[id_new]['internal.date_access'] = time.time()
1481 for field in upd_todo:
1482 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
1483 self._validate(cr, user, [id_new], context)
1484 wf_service = netsvc.LocalService("workflow")
1485 wf_service.trg_write(user, self._name, id_new, cr)
1488 def create(self, cr, user, vals, context=None):
1489 self.vaccum(cr, user)
1491 id_new = self.next_id
1493 for f in self._columns.keys():
1497 vals.update(self.default_get(cr, user, default, context))
1501 if self._columns[field]._classic_write:
1502 vals2[field] = vals[field]
1504 upd_todo.append(field)
1505 self.datas[id_new] = vals2
1506 self.datas[id_new]['internal.date_access'] = time.time()
1508 for field in upd_todo:
1509 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
1510 self._validate(cr, user, [id_new], context)
1511 wf_service = netsvc.LocalService("workflow")
1512 wf_service.trg_create(user, self._name, id_new, cr)
1515 def default_get(self, cr, uid, fields_list, context=None):
1519 # get the default values for the inherited fields
1520 for f in fields_list:
1521 if f in self._defaults:
1522 value[f] = self._defaults[f](self, cr, uid, context)
1523 fld_def = ((f in self._columns) and self._columns[f]) \
1524 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1527 # get the default values set by the user and override the default
1528 # values defined in the object
1529 ir_values_obj = self.pool.get('ir.values')
1530 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1531 for id, field, field_value in res:
1532 if field in fields_list:
1533 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1534 if fld_def._type in ('many2one', 'one2one'):
1535 obj = self.pool.get(fld_def._obj)
1536 if not obj.search(cr, uid, [('id', '=', field_value)]):
1538 if fld_def._type in ('many2many'):
1539 obj = self.pool.get(fld_def._obj)
1541 for i in range(len(field_value)):
1542 if not obj.search(cr, uid, [('id', '=',
1545 field_value2.append(field_value[i])
1546 field_value = field_value2
1547 if fld_def._type in ('one2many'):
1548 obj = self.pool.get(fld_def._obj)
1550 for i in range(len(field_value)):
1551 field_value2.append({})
1552 for field2 in field_value[i]:
1553 if obj._columns[field2]._type in ('many2one', 'one2one'):
1554 obj2 = self.pool.get(obj._columns[field2]._obj)
1555 if not obj2.search(cr, uid,
1556 [('id', '=', field_value[i][field2])]):
1558 # TODO add test for many2many and one2many
1559 field_value2[i][field2] = field_value[i][field2]
1560 field_value = field_value2
1561 value[field] = field_value
1563 # get the default values from the context
1564 for key in context or {}:
1565 if key.startswith('default_'):
1566 value[key[8:]] = context[key]
1569 def _where_calc(self, cr, user, args, active_test=True, context=None):
1574 # if the object has a field named 'active', filter out all inactive
1575 # records unless they were explicitely asked for
1576 if 'active' in self._columns and (active_test and context.get('active_test', True)):
1578 active_in_args = False
1580 if a[0] == 'active':
1581 active_in_args = True
1582 if not active_in_args:
1583 args.insert(0, ('active', '=', 1))
1585 args = [('active', '=', 1)]
1588 e = expression.expression(args)
1589 e.parse(cr, user, self, context)
1590 res=e.__dict__['_expression__exp']
1594 def search(self, cr, user, args, offset=0, limit=None, order=None,
1595 context=None, count=False):
1598 result = self._where_calc(cr, user, args, context=context)
1600 return self.datas.keys()
1604 #Find the value of dict
1607 for id, data in self.datas.items():
1609 if limit and (counter >int(limit)):
1614 val =eval('data[arg[0]]'+'==' +' arg[2]')
1615 elif arg[1] in ['<','>','in','not in','<=','>=','<>']:
1616 val =eval('data[arg[0]]'+arg[1] +' arg[2]')
1617 elif arg[1] in ['ilike']:
1618 if str(data[arg[0]]).find(str(arg[2]))!=-1:
1633 def unlink(self, cr, uid, ids, context=None):
1635 if id in self.datas:
1638 cr.execute('delete from wkf_instance where res_type=%s and res_id in ('+','.join(map(str, ids))+')', (self._name, ))
1641 def perm_read(self, cr, user, ids, context=None, details=True):
1645 'create_uid': (user, 'Root'),
1646 'create_date': time.strftime('%Y-%m-%d %H:%M:%S'),
1648 'write_date': False,
1653 def _check_removed_columns(self, cr, log=False):
1654 # nothing to check in memory...
1657 def exists(self, cr, uid, id, context=None):
1658 return id in self.datas
1660 class orm(orm_template):
1661 _sql_constraints = []
1663 _protected = ['read','write','create','default_get','perm_read','unlink','fields_get','fields_view_get','search','name_get','distinct_field_get','name_search','copy','import_data','search_count', 'exists']
1665 def _parent_store_compute(self, cr):
1666 logger = netsvc.Logger()
1667 logger.notifyChannel('orm', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
1668 def browse_rec(root, pos=0):
1670 where = self._parent_name+'='+str(root)
1672 where = self._parent_name+' IS NULL'
1673 if self._parent_order:
1674 where += ' order by '+self._parent_order
1675 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
1677 childs = cr.fetchall()
1679 pos2 = browse_rec(id[0], pos2)
1680 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos,pos2,root))
1682 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
1683 if self._parent_order:
1684 query += ' order by '+self._parent_order
1687 for (root,) in cr.fetchall():
1688 pos = browse_rec(root, pos)
1691 def _update_store(self, cr, f, k):
1692 logger = netsvc.Logger()
1693 logger.notifyChannel('orm', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
1694 ss = self._columns[k]._symbol_set
1695 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
1696 cr.execute('select id from '+self._table)
1697 ids_lst = map(lambda x: x[0], cr.fetchall())
1700 ids_lst = ids_lst[40:]
1701 res = f.get(cr, self, iids, k, 1, {})
1702 for key,val in res.items():
1705 # if val is a many2one, just write the ID
1706 if type(val)==tuple:
1708 if (val<>False) or (type(val)<>bool):
1709 cr.execute(update_query, (ss[1](val), key))
1711 def _check_removed_columns(self, cr, log=False):
1712 logger = netsvc.Logger()
1713 # iterate on the database columns to drop the NOT NULL constraints
1714 # of fields which were required but have been removed (or will be added by another module)
1715 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
1716 columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
1717 cr.execute("SELECT a.attname, a.attnotnull"
1718 " FROM pg_class c, pg_attribute a"
1719 " WHERE c.relname=%%s"
1720 " AND c.oid=a.attrelid"
1721 " AND a.attisdropped=%%s"
1722 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
1723 " AND a.attname NOT IN (%s)" % ",".join(['%s']*len(columns)),
1724 [self._table, False] + columns)
1725 for column in cr.dictfetchall():
1727 logger.notifyChannel("orm", netsvc.LOG_DEBUG, "column %s is in the table %s but not in the corresponding object %s" % (column['attname'], self._table, self._name))
1728 if column['attnotnull']:
1729 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
1731 def _auto_init(self, cr, context={}):
1732 store_compute = False
1733 logger = netsvc.Logger()
1736 self._field_create(cr, context=context)
1737 if not hasattr(self, "_auto") or self._auto:
1738 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname='%s'" % self._table)
1740 cr.execute("CREATE TABLE \"%s\" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITH OIDS" % self._table)
1743 if self._parent_store:
1744 cr.execute("""SELECT c.relname
1745 FROM pg_class c, pg_attribute a
1746 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
1747 """, (self._table, 'parent_left'))
1749 if 'parent_left' not in self._columns:
1750 logger.notifyChannel('orm', netsvc.LOG_ERROR, 'create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)' % (self._table, ))
1751 if 'parent_right' not in self._columns:
1752 logger.notifyChannel('orm', netsvc.LOG_ERROR, 'create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)' % (self._table, ))
1753 if self._columns[self._parent_name].ondelete<>'cascade':
1754 logger.notifyChannel('orm', netsvc.LOG_ERROR, "the columns %s on object must be set as ondelete='cascasde'" % (self._name, self._parent_name))
1755 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
1756 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
1758 store_compute = True
1760 if self._log_access:
1762 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
1763 'create_date': 'TIMESTAMP',
1764 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
1765 'write_date': 'TIMESTAMP'
1770 FROM pg_class c, pg_attribute a
1771 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
1772 """, (self._table, k))
1774 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
1777 self._check_removed_columns(cr, log=False)
1779 # iterate on the "object columns"
1780 todo_update_store = []
1781 for k in self._columns:
1782 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
1784 #raise _('Can not define a column %s. Reserved keyword !') % (k,)
1785 f = self._columns[k]
1787 if isinstance(f, fields.one2many):
1788 cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,))
1790 cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id))
1791 res = cr.fetchone()[0]
1793 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY (%s) REFERENCES "%s" ON DELETE SET NULL' % (self._obj, f._fields_id, f._table))
1794 elif isinstance(f, fields.many2many):
1795 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (f._rel,))
1796 if not cr.dictfetchall():
1797 #FIXME: Remove this try/except
1799 ref = self.pool.get(f._obj)._table
1800 except AttributeError:
1801 ref = f._obj.replace('.', '_')
1802 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE) WITH OIDS' % (f._rel, f._id1, self._table, f._id2, ref))
1803 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
1804 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
1807 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
1808 "FROM pg_class c,pg_attribute a,pg_type t " \
1809 "WHERE c.relname=%s " \
1810 "AND a.attname=%s " \
1811 "AND c.oid=a.attrelid " \
1812 "AND a.atttypid=t.oid", (self._table, k))
1813 res = cr.dictfetchall()
1815 if not isinstance(f, fields.function) or f.store:
1817 # add the missing field
1818 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
1821 if not create and k in self._defaults:
1822 default = self._defaults[k](self, cr, 1, {})
1823 ss = self._columns[k]._symbol_set
1824 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
1825 cr.execute(query, (ss[1](default),))
1827 logger.notifyChannel('orm', netsvc.LOG_DEBUG, 'setting default value of new column %s of table %s'% (k, self._table))
1829 logger.notifyChannel('orm', netsvc.LOG_DEBUG, 'creating new column %s of table %s'% (k, self._table))
1831 if isinstance(f, fields.function):
1833 if f.store is not True:
1834 order = f.store[f.store.keys()[0]][2]
1835 todo_update_store.append((order, f,k))
1837 # and add constraints if needed
1838 if isinstance(f, fields.many2one):
1839 #FIXME: Remove this try/except
1841 ref = self.pool.get(f._obj)._table
1842 except AttributeError:
1843 ref = f._obj.replace('.', '_')
1844 # ir_actions is inherited so foreign key doesn't work on it
1845 if ref != 'ir_actions':
1846 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete))
1848 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
1852 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k))
1853 except Exception, e:
1854 logger.notifyChannel('orm', netsvc.LOG_WARNING, 'WARNING: unable to set column %s of table %s not null !\nTry to re-run: openerp-server.py --update=module\nIf it doesn\'t work, update records and execute manually:\nALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % (k, self._table, self._table, k))
1858 f_pg_type = f_pg_def['typname']
1859 f_pg_size = f_pg_def['size']
1860 f_pg_notnull = f_pg_def['attnotnull']
1861 if isinstance(f, fields.function) and not f.store:
1862 logger.notifyChannel('orm', netsvc.LOG_INFO, 'column %s (%s) in table %s removed: converted to a function !\n' % (k, f.string, self._table))
1863 cr.execute('ALTER TABLE %s DROP COLUMN %s'% (self._table, k))
1867 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
1872 ('text', 'char', 'VARCHAR(%d)' % (f.size or 0,), '::VARCHAR(%d)'%(f.size or 0,)),
1873 ('varchar', 'text', 'TEXT', ''),
1874 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
1875 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
1877 # !!! Avoid reduction of varchar field !!!
1878 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
1879 # if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size != f.size:
1880 logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed size" % (k, self._table))
1881 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
1882 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
1883 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size))
1884 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size' % (self._table,))
1887 if (f_pg_type==c[0]) and (f._type==c[1]):
1888 logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed type to %s." % (k, self._table, c[1]))
1890 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
1891 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
1892 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
1893 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
1896 if f_pg_type != f_obj_type:
1898 logger.notifyChannel('orm', netsvc.LOG_WARNING, "column '%s' in table '%s' has changed type (DB = %s, def = %s) but unable to migrate this change !" % (k, self._table, f_pg_type, f._type))
1900 # if the field is required and hasn't got a NOT NULL constraint
1901 if f.required and f_pg_notnull == 0:
1902 # set the field to the default value if any
1903 if k in self._defaults:
1904 default = self._defaults[k](self, cr, 1, {})
1905 if (default is not None):
1906 ss = self._columns[k]._symbol_set
1907 query = 'UPDATE "%s" SET "%s"=%s WHERE %s is NULL' % (self._table, k, ss[0], k)
1908 cr.execute(query, (ss[1](default),))
1909 # add the NOT NULL constraint
1912 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k))
1914 except Exception, e:
1915 logger.notifyChannel('orm', netsvc.LOG_WARNING, 'unable to set a NOT NULL constraint on column %s of the %s table !\nIf you want to have it, you should update the records and execute manually:\nALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % (k, self._table, self._table, k))
1917 elif not f.required and f_pg_notnull == 1:
1918 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
1920 indexname = '%s_%s_index' % (self._table, k)
1921 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
1922 res = cr.dictfetchall()
1923 if not res and f.select:
1924 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
1926 if res and not f.select:
1927 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
1929 if isinstance(f, fields.many2one):
1930 ref = self.pool.get(f._obj)._table
1931 if ref != 'ir_actions':
1932 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
1933 'pg_attribute as att1, pg_attribute as att2 '
1934 'WHERE con.conrelid = cl1.oid '
1935 'AND cl1.relname = %s '
1936 'AND con.confrelid = cl2.oid '
1937 'AND cl2.relname = %s '
1938 'AND array_lower(con.conkey, 1) = 1 '
1939 'AND con.conkey[1] = att1.attnum '
1940 'AND att1.attrelid = cl1.oid '
1941 'AND att1.attname = %s '
1942 'AND array_lower(con.confkey, 1) = 1 '
1943 'AND con.confkey[1] = att2.attnum '
1944 'AND att2.attrelid = cl2.oid '
1945 'AND att2.attname = %s '
1946 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
1947 res = cr.dictfetchall()
1956 if res[0]['confdeltype'] != confdeltype.get(f.ondelete.upper(), 'a'):
1957 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res[0]['conname'] + '"')
1958 cr.execute('ALTER TABLE "' + self._table + '" ADD FOREIGN KEY ("' + k + '") REFERENCES "' + ref + '" ON DELETE ' + f.ondelete)
1961 logger.notifyChannel('orm', netsvc.LOG_ERROR, "Programming error !")
1962 for order,f,k in todo_update_store:
1963 todo_end.append((order, self._update_store, (f, k)))
1966 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (self._table,))
1967 create = not bool(cr.fetchone())
1969 for (key, con, _) in self._sql_constraints:
1970 conname = '%s_%s' % (self._table, key)
1971 cr.execute("SELECT conname FROM pg_constraint where conname=%s", (conname,))
1972 if not cr.dictfetchall():
1974 cr.execute('alter table "%s" add constraint "%s_%s" %s' % (self._table, self._table, key, con,))
1977 logger.notifyChannel('orm', netsvc.LOG_WARNING, 'unable to add \'%s\' constraint on table %s !\n If you want to have it, you should update the records and execute manually:\nALTER table %s ADD CONSTRAINT %s_%s %s' % (con, self._table, self._table, self._table, key, con,))
1980 if hasattr(self, "_sql"):
1981 for line in self._sql.split(';'):
1982 line2 = line.replace('\n', '').strip()
1987 self._parent_store_compute(cr)
1990 def __init__(self, cr):
1991 super(orm, self).__init__(cr)
1993 if not hasattr(self, '_log_access'):
1994 # if not access is not specify, it is the same value as _auto
1995 self._log_access = not hasattr(self, "_auto") or self._auto
1997 self._columns = self._columns.copy()
1998 for store_field in self._columns:
1999 f = self._columns[store_field]
2000 if not isinstance(f, fields.function):
2004 if self._columns[store_field].store is True:
2005 sm = {self._name:(lambda self,cr, uid, ids, c={}: ids, None, 10, None)}
2007 sm = self._columns[store_field].store
2008 for object, aa in sm.items():
2010 (fnct,fields2,order,length)=aa
2012 (fnct,fields2,order)=aa
2015 raise except_orm('Error',
2016 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
2017 self.pool._store_function.setdefault(object, [])
2019 for x,y,z,e,f,l in self.pool._store_function[object]:
2020 if (x==self._name) and (y==store_field) and (e==fields2):
2023 self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order, length))
2024 self.pool._store_function[object].sort(lambda x,y: cmp(x[4],y[4]))
2026 for (key, _, msg) in self._sql_constraints:
2027 self.pool._sql_error[self._table+'_'+key] = msg
2029 # Load manual fields
2031 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
2033 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
2034 for field in cr.dictfetchall():
2035 if field['name'] in self._columns:
2038 'string': field['field_description'],
2039 'required': bool(field['required']),
2040 'readonly': bool(field['readonly']),
2041 'domain': field['domain'] or None,
2042 'size': field['size'],
2043 'ondelete': field['on_delete'],
2044 'translate': (field['translate']),
2045 #'select': int(field['select_level'])
2048 if field['ttype'] == 'selection':
2049 self._columns[field['name']] = getattr(fields, field['ttype'])(eval(field['selection']), **attrs)
2050 elif field['ttype'] == 'reference':
2051 self._columns[field['name']] = getattr(fields, field['ttype'])(selection=eval(field['selection']), **attrs)
2052 elif field['ttype'] == 'many2one':
2053 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], **attrs)
2054 elif field['ttype'] == 'one2many':
2055 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], field['relation_field'], **attrs)
2056 elif field['ttype'] == 'many2many':
2058 _rel1 = field['relation'].replace('.', '_')
2059 _rel2 = field['model'].replace('.', '_')
2060 _rel_name = 'x_%s_%s_%s_rel' %(_rel1, _rel2, field['name'])
2061 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], _rel_name, 'id1', 'id2', **attrs)
2063 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
2065 self._inherits_reload()
2066 if not self._sequence:
2067 self._sequence = self._table+'_id_seq'
2068 for k in self._defaults:
2069 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
2070 for f in self._columns:
2071 self._columns[f].restart()
2073 def default_get(self, cr, uid, fields_list, context=None):
2077 # get the default values for the inherited fields
2078 for t in self._inherits.keys():
2079 value.update(self.pool.get(t).default_get(cr, uid, fields_list,
2082 # get the default values defined in the object
2083 for f in fields_list:
2084 if f in self._defaults:
2085 value[f] = self._defaults[f](self, cr, uid, context)
2086 fld_def = ((f in self._columns) and self._columns[f]) \
2087 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
2089 if isinstance(fld_def, fields.property):
2090 property_obj = self.pool.get('ir.property')
2091 definition_id = fld_def._field_get(cr, uid, self._name, f)
2092 nid = property_obj.search(cr, uid, [('fields_id', '=',
2093 definition_id), ('res_id', '=', False)])
2095 prop_value = property_obj.browse(cr, uid, nid[0],
2096 context=context).value
2097 value[f] = (prop_value and int(prop_value.split(',')[1])) \
2100 # get the default values set by the user and override the default
2101 # values defined in the object
2102 ir_values_obj = self.pool.get('ir.values')
2103 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
2104 for id, field, field_value in res:
2105 if field in fields_list:
2106 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
2107 if fld_def._type in ('many2one', 'one2one'):
2108 obj = self.pool.get(fld_def._obj)
2109 if not obj.search(cr, uid, [('id', '=', field_value)]):
2111 if fld_def._type in ('many2many'):
2112 obj = self.pool.get(fld_def._obj)
2114 for i in range(len(field_value)):
2115 if not obj.search(cr, uid, [('id', '=',
2118 field_value2.append(field_value[i])
2119 field_value = field_value2
2120 if fld_def._type in ('one2many'):
2121 obj = self.pool.get(fld_def._obj)
2123 for i in range(len(field_value)):
2124 field_value2.append({})
2125 for field2 in field_value[i]:
2126 if obj._columns[field2]._type in ('many2one', 'one2one'):
2127 obj2 = self.pool.get(obj._columns[field2]._obj)
2128 if not obj2.search(cr, uid,
2129 [('id', '=', field_value[i][field2])]):
2131 # TODO add test for many2many and one2many
2132 field_value2[i][field2] = field_value[i][field2]
2133 field_value = field_value2
2134 value[field] = field_value
2135 for key in context or {}:
2136 if key.startswith('default_'):
2137 value[key[8:]] = context[key]
2141 # Update objects that uses this one to update their _inherits fields
2143 def _inherits_reload_src(self):
2144 for obj in self.pool.obj_pool.values():
2145 if self._name in obj._inherits:
2146 obj._inherits_reload()
2148 def _inherits_reload(self):
2150 for table in self._inherits:
2151 res.update(self.pool.get(table)._inherit_fields)
2152 for col in self.pool.get(table)._columns.keys():
2153 res[col] = (table, self._inherits[table], self.pool.get(table)._columns[col])
2154 for col in self.pool.get(table)._inherit_fields.keys():
2155 res[col] = (table, self._inherits[table], self.pool.get(table)._inherit_fields[col][2])
2156 self._inherit_fields = res
2157 self._inherits_reload_src()
2159 def fields_get(self, cr, user, fields=None, context=None):
2160 ira = self.pool.get('ir.model.access')
2161 read_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
2162 ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
2163 return super(orm, self).fields_get(cr, user, fields, context, read_access)
2165 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
2168 self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
2170 fields = self._columns.keys() + self._inherit_fields.keys()
2172 if isinstance(ids, (int, long)):
2174 result = self._read_flat(cr, user, select, fields, context, load)
2176 for key, v in r.items():
2179 if isinstance(ids, (int, long)):
2180 return result and result[0] or False
2183 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
2189 if fields_to_read == None:
2190 fields_to_read = self._columns.keys()
2192 # construct a clause for the rules :
2193 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
2195 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
2196 fields_pre = [f for f in fields_to_read if
2197 f == self.CONCURRENCY_CHECK_FIELD
2198 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
2199 ] + self._inherits.values()
2203 def convert_field(f):
2204 if f in ('create_date', 'write_date'):
2205 return "date_trunc('second', %s) as %s" % (f, f)
2206 if f == self.CONCURRENCY_CHECK_FIELD:
2207 if self._log_access:
2208 return "COALESCE(write_date, create_date, now())::timestamp AS %s" % (f,)
2209 return "now()::timestamp AS %s" % (f,)
2210 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
2211 return 'length("%s") as "%s"' % (f, f)
2212 return '"%s"' % (f,)
2213 fields_pre2 = map(convert_field, fields_pre)
2214 for i in range(0, len(ids), cr.IN_MAX):
2215 sub_ids = ids[i:i+cr.IN_MAX]
2217 cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) AND %s ORDER BY %s' % \
2218 (','.join(fields_pre2 + ['id']), self._table,
2219 ','.join(['%s' for x in sub_ids]), d1,
2220 self._order),sub_ids + d2)
2221 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2222 raise except_orm(_('AccessError'),
2223 _('You try to bypass an access rule (Document type: %s).') % self._description)
2225 cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) ORDER BY %s' % \
2226 (','.join(fields_pre2 + ['id']), self._table,
2227 ','.join(['%s' for x in sub_ids]),
2228 self._order), sub_ids)
2229 res.extend(cr.dictfetchall())
2231 res = map(lambda x: {'id': x}, ids)
2233 for f in fields_pre:
2234 if f == self.CONCURRENCY_CHECK_FIELD:
2236 if self._columns[f].translate:
2237 ids = map(lambda x: x['id'], res)
2238 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
2240 r[f] = res_trans.get(r['id'], False) or r[f]
2242 for table in self._inherits:
2243 col = self._inherits[table]
2244 cols = intersect(self._inherit_fields.keys(), fields_to_read)
2247 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
2255 record.update(res3[record[col]])
2256 if col not in fields_to_read:
2259 # all fields which need to be post-processed by a simple function (symbol_get)
2260 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
2262 # maybe it would be faster to iterate on the fields then on res, so that we wouldn't need
2263 # to get the _symbol_get in each occurence
2265 for f in fields_post:
2266 r[f] = self._columns[f]._symbol_get(r[f])
2267 ids = map(lambda x: x['id'], res)
2269 # all non inherited fields for which the attribute whose name is in load is False
2270 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
2272 # Compute POST fields
2274 for f in fields_post:
2275 todo.setdefault(self._columns[f]._multi, [])
2276 todo[self._columns[f]._multi].append(f)
2277 for key,val in todo.items():
2279 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
2282 record[pos] = res2[record['id']][pos]
2285 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
2288 record[f] = res2[record['id']]
2292 #for f in fields_post:
2293 # # get the value of that field for all records/ids
2294 # res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
2295 # for record in res:
2296 # record[f] = res2[record['id']]
2300 for field in vals.copy():
2302 if field in self._columns:
2303 fobj = self._columns[field]
2310 for group in groups:
2311 module = group.split(".")[0]
2312 grp = group.split(".")[1]
2313 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
2314 (grp, module, 'res.groups', user))
2315 readonly = cr.fetchall()
2316 if readonly[0][0] >= 1:
2319 elif readonly[0][0] == 0:
2325 if type(vals[field]) == type([]):
2327 elif type(vals[field]) == type(0.0):
2329 elif type(vals[field]) == type(''):
2330 vals[field] = '=No Permission='
2335 def perm_read(self, cr, user, ids, context=None, details=True):
2341 if self._log_access:
2342 fields = ', u.create_uid, u.create_date, u.write_uid, u.write_date'
2343 if isinstance(ids, (int, long)):
2346 ids_str = string.join(map(lambda x: str(x), ids), ',')
2347 cr.execute('select u.id'+fields+' from "'+self._table+'" u where u.id in ('+ids_str+')')
2348 res = cr.dictfetchall()
2351 r[key] = r[key] or False
2352 if key in ('write_uid', 'create_uid', 'uid') and details:
2354 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
2355 if isinstance(ids, (int, long)):
2359 def _check_concurrency(self, cr, ids, context):
2362 if context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access:
2364 return "%s,%s" % (self._name, oid)
2365 santa = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
2366 for i in range(0, len(ids), cr.IN_MAX):
2367 sub_ids = tools.flatten(((oid, context[self.CONCURRENCY_CHECK_FIELD][key(oid)])
2368 for oid in ids[i:i+cr.IN_MAX]
2369 if key(oid) in context[self.CONCURRENCY_CHECK_FIELD]))
2371 cr.execute("SELECT count(1) FROM %s WHERE %s" % (self._table, " OR ".join([santa]*(len(sub_ids)/2))), sub_ids)
2374 raise except_orm('ConcurrencyException', _('Records were modified in the meanwhile'))
2376 def unlink(self, cr, uid, ids, context=None):
2379 if isinstance(ids, (int, long)):
2382 result_store = self._store_get_values(cr, uid, ids, None, context)
2384 self._check_concurrency(cr, ids, context)
2386 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
2388 properties = self.pool.get('ir.property')
2389 domain = [('res_id', '=', False),
2390 ('value', 'in', ['%s,%s' % (self._name, i) for i in ids]),
2392 if properties.search(cr, uid, domain, context=context):
2393 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
2395 wf_service = netsvc.LocalService("workflow")
2397 wf_service.trg_delete(uid, self._name, oid, cr)
2399 #cr.execute('select * from '+self._table+' where id in ('+str_d+')', ids)
2400 #res = cr.dictfetchall()
2401 #for key in self._inherits:
2402 # ids2 = [x[self._inherits[key]] for x in res]
2403 # self.pool.get(key).unlink(cr, uid, ids2)
2405 d1, d2 = self.pool.get('ir.rule').domain_get(cr, uid, self._name)
2409 for i in range(0, len(ids), cr.IN_MAX):
2410 sub_ids = ids[i:i+cr.IN_MAX]
2411 str_d = string.join(('%s',)*len(sub_ids), ',')
2413 cr.execute('SELECT id FROM "'+self._table+'" ' \
2414 'WHERE id IN ('+str_d+')'+d1, sub_ids+d2)
2415 if not cr.rowcount == len(sub_ids):
2416 raise except_orm(_('AccessError'),
2417 _('You try to bypass an access rule (Document type: %s).') % \
2421 cr.execute('delete from "'+self._table+'" ' \
2422 'where id in ('+str_d+')'+d1, sub_ids+d2)
2424 cr.execute('delete from "'+self._table+'" ' \
2425 'where id in ('+str_d+')', sub_ids)
2427 for order, object, store_ids, fields in result_store:
2428 if object<>self._name:
2429 obj = self.pool.get(object)
2430 cr.execute('select id from '+obj._table+' where id in ('+','.join(map(str, store_ids))+')')
2431 rids = map(lambda x: x[0], cr.fetchall())
2433 obj._store_set_values(cr, uid, rids, fields, context)
2439 def write(self, cr, user, ids, vals, context=None):
2441 for field in vals.copy():
2443 if field in self._columns:
2444 fobj = self._columns[field]
2446 fobj = self._inherit_fields[field][2]
2453 for group in groups:
2454 module = group.split(".")[0]
2455 grp = group.split(".")[1]
2456 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
2457 (grp, module, 'res.groups', user))
2458 readonly = cr.fetchall()
2459 if readonly[0][0] >= 1:
2462 elif readonly[0][0] == 0:
2474 if isinstance(ids, (int, long)):
2477 self._check_concurrency(cr, ids, context)
2479 self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
2487 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
2489 if field in self._columns:
2490 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
2491 if (not totranslate) or not self._columns[field].translate:
2492 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
2493 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
2494 direct.append(field)
2496 upd_todo.append(field)
2498 updend.append(field)
2499 if field in self._columns \
2500 and hasattr(self._columns[field], 'selection') \
2502 if self._columns[field]._type == 'reference':
2503 val = vals[field].split(',')[0]
2506 if isinstance(self._columns[field].selection, (tuple, list)):
2507 if val not in dict(self._columns[field].selection):
2508 raise except_orm(_('ValidateError'),
2509 _('The value "%s" for the field "%s" is not in the selection') \
2510 % (vals[field], field))
2512 if val not in dict(self._columns[field].selection(
2513 self, cr, user, context=context)):
2514 raise except_orm(_('ValidateError'),
2515 _('The value "%s" for the field "%s" is not in the selection') \
2516 % (vals[field], field))
2518 if self._log_access:
2519 upd0.append('write_uid=%s')
2520 upd0.append('write_date=now()')
2525 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
2529 for i in range(0, len(ids), cr.IN_MAX):
2530 sub_ids = ids[i:i+cr.IN_MAX]
2531 ids_str = string.join(map(str, sub_ids), ',')
2533 cr.execute('SELECT id FROM "'+self._table+'" ' \
2534 'WHERE id IN ('+ids_str+')'+d1, d2)
2535 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2536 raise except_orm(_('AccessError'),
2537 _('You try to bypass an access rule (Document type: %s).') % \
2540 cr.execute('SELECT id FROM "'+self._table+'" WHERE id IN ('+ids_str+')')
2541 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2542 raise except_orm(_('AccessError'),
2543 _('You try to write on an record that doesn\'t exist ' \
2544 '(Document type: %s).') % self._description)
2546 cr.execute('update "'+self._table+'" set '+string.join(upd0, ',')+' ' \
2547 'where id in ('+ids_str+')'+d1, upd1+ d2)
2549 cr.execute('update "'+self._table+'" set '+string.join(upd0, ',')+' ' \
2550 'where id in ('+ids_str+')', upd1)
2554 if self._columns[f].translate:
2555 src_trans = self.pool.get(self._name).read(cr,user,ids,[f])
2556 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans[0][f])
2559 # call the 'set' method of fields which are not classic_write
2560 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
2562 # default element in context must be removed when call a one2many or many2many
2563 rel_context = context.copy()
2564 for c in context.items():
2565 if c[0].startswith('default_'):
2566 del rel_context[c[0]]
2568 for field in upd_todo:
2570 self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context)
2572 for table in self._inherits:
2573 col = self._inherits[table]
2575 for i in range(0, len(ids), cr.IN_MAX):
2576 sub_ids = ids[i:i+cr.IN_MAX]
2577 ids_str = string.join(map(str, sub_ids), ',')
2578 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
2579 'where id in ('+ids_str+')', upd1)
2580 nids.extend([x[0] for x in cr.fetchall()])
2584 if self._inherit_fields[val][0] == table:
2586 self.pool.get(table).write(cr, user, nids, v, context)
2588 self._validate(cr, user, ids, context)
2589 # TODO: use _order to set dest at the right position and not first node of parent
2590 if self._parent_store and (self._parent_name in vals):
2592 self.pool._init_parent[self._name]=True
2595 # Find Position of the element
2596 if vals[self._parent_name]:
2597 cr.execute('select parent_left,parent_right,id from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (vals[self._parent_name],))
2599 cr.execute('select parent_left,parent_right,id from '+self._table+' where '+self._parent_name+' is null order by '+(self._parent_order or self._order))
2600 result_p = cr.fetchall()
2602 for (pleft,pright,pid) in result_p:
2607 # It's the first node of the parent: position = parent_left+1
2609 if not vals[self._parent_name]:
2612 cr.execute('select parent_left from '+self._table+' where id=%s', (vals[self._parent_name],))
2613 position = cr.fetchone()[0]+1
2615 # We have the new position !
2616 cr.execute('select parent_left,parent_right from '+self._table+' where id=%s', (id,))
2617 pleft,pright = cr.fetchone()
2618 distance = pright - pleft + 1
2620 if position>pleft and position<=pright:
2621 raise except_orm(_('UserError'), _('Recursivity Detected.'))
2624 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
2625 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
2626 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft,position-pleft, pleft, pright))
2628 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
2629 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
2630 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance,pleft-position+distance, pleft+distance, pright+distance))
2632 result = self._store_get_values(cr, user, ids, vals.keys(), context)
2633 for order, object, ids, fields in result:
2634 self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
2636 wf_service = netsvc.LocalService("workflow")
2638 wf_service.trg_write(user, self._name, id, cr)
2642 # TODO: Should set perm to user.xxx
2644 def create(self, cr, user, vals, context=None):
2645 """ create(cr, user, vals, context) -> int
2646 cr = database cursor
2648 vals = dictionary of the form {'field_name':field_value, ...}
2652 self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
2657 for (t, c) in self._inherits.items():
2659 avoid_table.append(t)
2660 for f in self._columns.keys(): # + self._inherit_fields.keys():
2664 for f in self._inherit_fields.keys():
2665 if (not f in vals) and (self._inherit_fields[f][0] not in avoid_table):
2669 default_values = self.default_get(cr, user, default, context)
2670 for dv in default_values:
2671 if dv in self._columns and self._columns[dv]._type == 'many2many':
2672 if default_values[dv] and isinstance(default_values[dv][0], (int, long)):
2673 default_values[dv] = [(6, 0, default_values[dv])]
2675 vals.update(default_values)
2678 for v in self._inherits:
2679 if self._inherits[v] not in vals:
2682 tocreate[v] = {self._inherits[v]:vals[self._inherits[v]]}
2683 (upd0, upd1, upd2) = ('', '', [])
2685 for v in vals.keys():
2686 if v in self._inherit_fields:
2687 (table, col, col_detail) = self._inherit_fields[v]
2688 tocreate[table][v] = vals[v]
2691 if (v not in self._inherit_fields) and (v not in self._columns):
2694 # Try-except added to filter the creation of those records whose filds are readonly.
2695 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
2697 cr.execute("SELECT nextval('"+self._sequence+"')")
2699 raise except_orm(_('UserError'),
2700 _('You cannot perform this operation.'))
2702 id_new = cr.fetchone()[0]
2703 for table in tocreate:
2704 if self._inherits[table] in vals:
2705 del vals[self._inherits[table]]
2706 id = self.pool.get(table).create(cr, user, tocreate[table])
2707 upd0 += ','+self._inherits[table]
2711 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
2712 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
2714 for bool_field in bool_fields:
2715 if bool_field not in vals:
2716 vals[bool_field] = False
2718 for field in vals.copy():
2720 if field in self._columns:
2721 fobj = self._columns[field]
2723 fobj = self._inherit_fields[field][2]
2729 for group in groups:
2730 module = group.split(".")[0]
2731 grp = group.split(".")[1]
2732 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
2733 (grp, module, 'res.groups', user))
2734 readonly = cr.fetchall()
2735 if readonly[0][0] >= 1:
2738 elif readonly[0][0] == 0:
2746 if self._columns[field]._classic_write:
2747 upd0 = upd0 + ',"' + field + '"'
2748 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
2749 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
2751 upd_todo.append(field)
2752 if field in self._columns \
2753 and hasattr(self._columns[field], 'selection') \
2755 if self._columns[field]._type == 'reference':
2756 val = vals[field].split(',')[0]
2759 if isinstance(self._columns[field].selection, (tuple, list)):
2760 if val not in dict(self._columns[field].selection):
2761 raise except_orm(_('ValidateError'),
2762 _('The value "%s" for the field "%s" is not in the selection') \
2763 % (vals[field], field))
2765 if val not in dict(self._columns[field].selection(
2766 self, cr, user, context=context)):
2767 raise except_orm(_('ValidateError'),
2768 _('The value "%s" for the field "%s" is not in the selection') \
2769 % (vals[field], field))
2770 if self._log_access:
2771 upd0 += ',create_uid,create_date'
2774 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
2775 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
2777 if self._parent_store:
2779 self.pool._init_parent[self._name]=True
2781 parent = vals.get(self._parent_name, False)
2783 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
2785 result_p = cr.fetchall()
2786 for (pleft,) in result_p:
2791 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
2792 pleft_old = cr.fetchone()[0]
2795 cr.execute('select max(parent_right) from '+self._table)
2796 pleft = cr.fetchone()[0] or 0
2797 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
2798 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
2799 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1,pleft+2,id_new))
2801 # default element in context must be remove when call a one2many or many2many
2802 rel_context = context.copy()
2803 for c in context.items():
2804 if c[0].startswith('default_'):
2805 del rel_context[c[0]]
2807 for field in upd_todo:
2808 self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context)
2809 self._validate(cr, user, [id_new], context)
2812 result = self._store_get_values(cr, user, [id_new], vals.keys(), context)
2813 for order, object, ids, fields in result:
2814 self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
2816 wf_service = netsvc.LocalService("workflow")
2817 wf_service.trg_create(user, self._name, id_new, cr)
2820 def _store_get_values(self, cr, uid, ids, fields, context):
2822 fncts = self.pool._store_function.get(self._name, [])
2823 for fnct in range(len(fncts)):
2826 for f in (fields or []):
2827 if f in fncts[fnct][3]:
2833 result.setdefault(fncts[fnct][0], {})
2834 ids2 = fncts[fnct][2](self,cr, uid, ids, context)
2835 for id in filter(None, ids2):
2836 result[fncts[fnct][0]].setdefault(id, [])
2837 result[fncts[fnct][0]][id].append(fnct)
2839 for object in result:
2841 for id,fnct in result[object].items():
2842 k2.setdefault(tuple(fnct), [])
2843 k2[tuple(fnct)].append(id)
2844 for fnct,id in k2.items():
2845 dict.setdefault(fncts[fnct[0]][4],[])
2846 dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4],object,id,map(lambda x: fncts[x][1], fnct)))
2854 def _store_set_values(self, cr, uid, ids, fields, context):
2857 if self._log_access:
2858 cr.execute('select id,write_date from '+self._table+' where id in ('+','.join(map(str, ids))+')')
2862 field_dict.setdefault(r[0], [])
2863 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
2864 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
2865 for i in self.pool._store_function.get(self._name, []):
2867 up_write_date = write_date + datetime.timedelta(hours=i[5])
2868 if datetime.datetime.now() < up_write_date:
2870 field_dict[r[0]].append(i[1])
2876 if self._columns[f]._multi not in keys:
2877 keys.append(self._columns[f]._multi)
2878 todo.setdefault(self._columns[f]._multi, [])
2879 todo[self._columns[f]._multi].append(f)
2883 result = self._columns[val[0]].get(cr, self, ids, val, uid, context=context)
2884 for id,value in result.items():
2886 for f in value.keys():
2887 if f in field_dict[id]:
2894 if self._columns[v]._type in ('many2one', 'one2one'):
2896 value[v] = value[v][0]
2899 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
2900 upd1.append(self._columns[v]._symbol_set[1](value[v]))
2902 cr.execute('update "' + self._table + '" set ' + \
2903 string.join(upd0, ',') + ' where id = %s', upd1)
2907 result = self._columns[f].get(cr, self, ids, f, uid, context=context)
2908 for r in result.keys():
2910 if r in field_dict.keys():
2911 if f in field_dict[r]:
2913 for id,value in result.items():
2914 if self._columns[f]._type in ('many2one', 'one2one'):
2919 cr.execute('update "' + self._table + '" set ' + \
2920 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value),id))
2926 def perm_write(self, cr, user, ids, fields, context=None):
2927 raise _('This method does not exist anymore')
2929 # TODO: ameliorer avec NULL
2930 def _where_calc(self, cr, user, args, active_test=True, context=None):
2934 # if the object has a field named 'active', filter out all inactive
2935 # records unless they were explicitely asked for
2936 if 'active' in self._columns and (active_test and context.get('active_test', True)):
2938 active_in_args = False
2940 if a[0] == 'active':
2941 active_in_args = True
2942 if not active_in_args:
2943 args.insert(0, ('active', '=', 1))
2945 args = [('active', '=', 1)]
2949 e = expression.expression(args)
2950 e.parse(cr, user, self, context)
2951 tables = e.get_tables()
2952 qu1, qu2 = e.to_sql()
2953 qu1 = qu1 and [qu1] or []
2955 qu1, qu2, tables = [], [], ['"%s"' % self._table]
2957 return (qu1, qu2, tables)
2959 def _check_qorder(self, word):
2960 if not regex_order.match(word):
2961 raise except_orm(_('AccessError'), _('Bad query.'))
2964 def search(self, cr, user, args, offset=0, limit=None, order=None,
2965 context=None, count=False):
2968 # compute the where, order by, limit and offset clauses
2969 (qu1, qu2, tables) = self._where_calc(cr, user, args, context=context)
2972 qu1 = ' where '+string.join(qu1, ' and ')
2977 self._check_qorder(order)
2978 order_by = order or self._order
2980 limit_str = limit and ' limit %d' % limit or ''
2981 offset_str = offset and ' offset %d' % offset or ''
2984 # construct a clause for the rules :
2985 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
2987 qu1 = qu1 and qu1+' and '+d1 or ' where '+d1
2991 cr.execute('select count(%s.id) from ' % self._table +
2992 ','.join(tables) +qu1 + limit_str + offset_str, qu2)
2995 # execute the "main" query to fetch the ids we were searching for
2996 cr.execute('select %s.id from ' % self._table + ','.join(tables) +qu1+' order by '+order_by+limit_str+offset_str, qu2)
2998 return [x[0] for x in res]
3000 # returns the different values ever entered for one field
3001 # this is used, for example, in the client when the user hits enter on
3003 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
3006 if field in self._inherit_fields:
3007 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
3009 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
3011 def name_get(self, cr, user, ids, context=None):
3016 if isinstance(ids, (int, long)):
3018 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
3019 [self._rec_name], context, load='_classic_write')]
3021 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=None):
3028 args += [(self._rec_name, operator, name)]
3029 ids = self.search(cr, user, args, limit=limit, context=context)
3030 res = self.name_get(cr, user, ids, context)
3033 def copy_data(self, cr, uid, id, default=None, context=None):
3038 if 'state' not in default:
3039 if 'state' in self._defaults:
3040 default['state'] = self._defaults['state'](self, cr, uid, context)
3041 data = self.read(cr, uid, [id], context=context)[0]
3042 fields = self.fields_get(cr, uid, context=context)
3045 ftype = fields[f]['type']
3047 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
3051 data[f] = default[f]
3052 elif ftype == 'function':
3054 elif ftype == 'many2one':
3056 data[f] = data[f] and data[f][0]
3059 elif ftype in ('one2many', 'one2one'):
3061 rel = self.pool.get(fields[f]['relation'])
3062 for rel_id in data[f]:
3063 # the lines are first duplicated using the wrong (old)
3064 # parent but then are reassigned to the correct one thanks
3066 d,t = rel.copy_data(cr, uid, rel_id, context=context)
3067 res.append((0, 0, d))
3070 elif ftype == 'many2many':
3071 data[f] = [(6, 0, data[f])]
3073 trans_obj = self.pool.get('ir.translation')
3077 if f in self._columns and self._columns[f].translate:
3078 trans_name=self._name+","+f
3079 elif f in self._inherit_fields and self._inherit_fields[f][2].translate:
3080 trans_name=self._inherit_fields[f][0]+","+f
3085 trans_ids = trans_obj.search(cr, uid, [
3086 ('name', '=', trans_name),
3087 ('res_id','=',data['id'])
3090 trans_data.extend(trans_obj.read(cr,uid,trans_ids,context=context))
3094 for v in self._inherits:
3095 del data[self._inherits[v]]
3096 return data, trans_data
3098 def copy(self, cr, uid, id, default=None, context=None):
3099 trans_obj = self.pool.get('ir.translation')
3100 data, trans_data = self.copy_data(cr, uid, id, default, context)
3101 new_id = self.create(cr, uid, data, context)
3102 for record in trans_data:
3104 record['res_id'] = new_id
3105 trans_obj.create(cr, uid, record, context)
3108 def exists(self, cr, uid, id, context=None):
3109 cr.execute('SELECT count(1) FROM "%s" where id=%%s' % (self._table,), (id,))
3110 return bool(cr.fetchone()[0])
3112 def check_recursion(self, cr, uid, ids, parent=None):
3114 parent = self._parent_name
3116 while len(ids_parent):
3118 for i in range(0, len(ids), cr.IN_MAX):
3119 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
3120 cr.execute('SELECT distinct "'+parent+'"'+
3121 ' FROM "'+self._table+'" ' \
3122 'WHERE id in ('+','.join(map(str, sub_ids_parent))+')')
3123 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
3124 ids_parent = ids_parent2
3125 for i in ids_parent:
3131 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: