1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
23 # Object relationnal mapping to postgresql module
24 # . Hierarchical structure
25 # . Constraints consistency, validations
26 # . Object meta Data depends on its status
27 # . Optimised processing by complex query (multiple actions at once)
28 # . Default fields value
29 # . Permissions optimisation
30 # . Persistant object: DB postgresql
32 # . Multi-level caching system
33 # . 2 different inheritancies
35 # - classicals (varchar, integer, boolean, ...)
36 # - relations (one2many, many2one, many2many)
57 from tools.translate import _
63 from lxml import etree
65 sys.stderr.write("ERROR: Import lxml module\n")
66 sys.stderr.write("ERROR: Try to install the python-lxml package\n")
68 from tools.config import config
70 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
72 def last_day_of_current_month():
73 today = datetime.date.today()
74 last_day = str(calendar.monthrange(today.year, today.month)[1])
75 return time.strftime('%Y-%m-' + last_day)
77 def intersect(la, lb):
78 return filter(lambda x: x in lb, la)
81 class except_orm(Exception):
82 def __init__(self, name, value):
85 self.args = (name, value)
87 class BrowseRecordError(Exception):
90 # Readonly python database object browser
91 class browse_null(object):
96 def __getitem__(self, name):
99 def __getattr__(self, name):
100 return None # XXX: return self ?
108 def __nonzero__(self):
111 def __unicode__(self):
116 # TODO: execute an object method on browse_record_list
118 class browse_record_list(list):
120 def __init__(self, lst, context=None):
123 super(browse_record_list, self).__init__(lst)
124 self.context = context
127 class browse_record(object):
128 logger = netsvc.Logger()
130 def __init__(self, cr, uid, id, table, cache, context=None, list_class = None, fields_process={}):
132 table : the object (inherited from orm)
133 context : a dictionary with an optional context
137 self._list_class = list_class or browse_record_list
142 self._table_name = self._table._name
143 self.__logger = logging.getLogger(
144 'osv.browse_record.' + self._table_name)
145 self._context = context
146 self._fields_process = fields_process
148 cache.setdefault(table._name, {})
149 self._data = cache[table._name]
151 if not (id and isinstance(id, (int, long,))):
152 raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
153 # if not table.exists(cr, uid, id, context):
154 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
156 if id not in self._data:
157 self._data[id] = {'id': id}
161 def __getitem__(self, name):
164 if name not in self._data[self._id]:
165 # build the list of fields we will fetch
167 # fetch the definition of the field which was asked for
168 if name in self._table._columns:
169 col = self._table._columns[name]
170 elif name in self._table._inherit_fields:
171 col = self._table._inherit_fields[name][2]
172 elif hasattr(self._table, str(name)):
173 if isinstance(getattr(self._table, name), (types.MethodType, types.LambdaType, types.FunctionType)):
174 return lambda *args, **argv: getattr(self._table, name)(self._cr, self._uid, [self._id], *args, **argv)
176 return getattr(self._table, name)
178 self.logger.notifyChannel("browse_record", netsvc.LOG_WARNING,
179 "Field '%s' does not exist in object '%s': \n%s" % (
180 name, self, ''.join(traceback.format_exc())))
181 raise KeyError("Field '%s' does not exist in object '%s'" % (
184 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
186 # gen the list of "local" (ie not inherited) fields which are classic or many2one
187 ffields = filter(lambda x: x[1]._classic_write, self._table._columns.items())
188 # gen the list of inherited fields
189 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
190 # complete the field list with the inherited fields which are classic or many2one
191 ffields += filter(lambda x: x[1]._classic_write, inherits)
192 # otherwise we fetch only that field
194 ffields = [(name, col)]
195 ids = filter(lambda id: name not in self._data[id], self._data.keys())
197 fffields = map(lambda x: x[0], ffields)
198 datas = self._table.read(self._cr, self._uid, ids, fffields, context=self._context, load="_classic_write")
199 if self._fields_process:
200 lang = self._context.get('lang', 'en_US') or 'en_US'
201 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid,[('code','=',lang)])
203 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
204 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid,lang_obj_ids[0])
207 if f._type in self._fields_process:
209 d[n] = self._fields_process[f._type](d[n])
211 d[n].set_value(self._cr, self._uid, d[n], self, f, lang_obj)
215 # Where did those ids come from? Perhaps old entries in ir_model_dat?
216 self.__logger.warn("No datas found for ids %s in %s",
218 raise KeyError('Field %s not found in %s'%(name,self))
219 # create browse records for 'remote' objects
221 if len(str(data['id']).split('-')) > 1:
222 data['id'] = int(str(data['id']).split('-')[0])
225 if f._type in ('many2one', 'one2one'):
227 obj = self._table.pool.get(f._obj)
229 if type(data[n]) in (type([]),type( (1,) )):
234 # FIXME: this happen when a _inherits object
235 # overwrite a field of it parent. Need
236 # testing to be sure we got the right
237 # object and not the parent one.
238 if not isinstance(ids2, browse_record):
239 new_data[n] = browse_record(self._cr,
240 self._uid, ids2, obj, self._cache,
241 context=self._context,
242 list_class=self._list_class,
243 fields_process=self._fields_process)
245 new_data[n] = browse_null()
247 new_data[n] = browse_null()
248 elif f._type in ('one2many', 'many2many') and len(data[n]):
249 new_data[n] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(f._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in data[n]], self._context)
251 new_data[n] = data[n]
252 self._data[data['id']].update(new_data)
253 if not name in self._data[self._id]:
254 #how did this happen?
255 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
256 "Ffields: %s, datas: %s"%(fffields, datas))
257 self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
258 "Data: %s, Table: %s"%(self._data[self._id], self._table))
259 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
260 return self._data[self._id][name]
262 def __getattr__(self, name):
266 raise AttributeError(e)
268 def __contains__(self, name):
269 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
271 def __hasattr__(self, name):
278 return "browse_record(%s, %d)" % (self._table_name, self._id)
280 def __eq__(self, other):
281 return (self._table_name, self._id) == (other._table_name, other._id)
283 def __ne__(self, other):
284 return (self._table_name, self._id) != (other._table_name, other._id)
286 # we need to define __unicode__ even though we've already defined __str__
287 # because we have overridden __getattr__
288 def __unicode__(self):
289 return unicode(str(self))
292 return hash((self._table_name, self._id))
300 (type returned by postgres when the column was created, type expression to create the column)
304 fields.boolean: 'bool',
305 fields.integer: 'int4',
306 fields.integer_big: 'int8',
310 fields.datetime: 'timestamp',
311 fields.binary: 'bytea',
312 fields.many2one: 'int4',
314 if type(f) in type_dict:
315 f_type = (type_dict[type(f)], type_dict[type(f)])
316 elif isinstance(f, fields.float):
318 f_type = ('numeric', 'NUMERIC')
320 f_type = ('float8', 'DOUBLE PRECISION')
321 elif isinstance(f, (fields.char, fields.reference)):
322 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
323 elif isinstance(f, fields.selection):
324 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
325 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
326 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
329 f_size = getattr(f, 'size', None) or 16
332 f_type = ('int4', 'INTEGER')
334 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
335 elif isinstance(f, fields.function) and eval('fields.'+(f._type)) in type_dict:
336 t = eval('fields.'+(f._type))
337 f_type = (type_dict[t], type_dict[t])
338 elif isinstance(f, fields.function) and f._type == 'float':
340 f_type = ('numeric', 'NUMERIC')
342 f_type = ('float8', 'DOUBLE PRECISION')
343 elif isinstance(f, fields.function) and f._type == 'selection':
344 f_type = ('text', 'text')
345 elif isinstance(f, fields.function) and f._type == 'char':
346 f_type = ('varchar', 'VARCHAR(%d)' % (f.size))
348 logger = netsvc.Logger()
349 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
354 class orm_template(object):
360 _parent_name = 'parent_id'
361 _parent_store = False
362 _parent_order = False
371 CONCURRENCY_CHECK_FIELD = '__last_update'
373 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None):
374 raise _('The read_group method is not implemented on this object !')
376 def _field_create(self, cr, context={}):
377 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
379 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
380 model_id = cr.fetchone()[0]
381 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
383 model_id = cr.fetchone()[0]
384 if 'module' in context:
385 name_id = 'model_'+self._name.replace('.','_')
386 cr.execute('select * from ir_model_data where name=%s and res_id=%s', (name_id,model_id))
388 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
389 (name_id, context['module'], 'ir.model', model_id)
394 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
396 for rec in cr.dictfetchall():
397 cols[rec['name']] = rec
399 for (k, f) in self._columns.items():
401 'model_id': model_id,
404 'field_description': f.string.replace("'", " "),
406 'relation': f._obj or 'NULL',
407 'view_load': (f.view_load and 1) or 0,
408 'select_level': tools.ustr(f.select or 0),
409 'readonly':(f.readonly and 1) or 0,
410 'required':(f.required and 1) or 0,
411 'selectable' : (f.selectable and 1) or 0,
412 'relation_field': (f._type=='one2many' and isinstance(f,fields.one2many)) and f._fields_id or '',
414 # When its a custom field,it does not contain f.select
415 if context.get('field_state','base') == 'manual':
416 if context.get('field_name','') == k:
417 vals['select_level'] = context.get('select','0')
418 #setting value to let the problem NOT occur next time
420 vals['select_level'] = cols[k]['select_level']
423 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
424 id = cr.fetchone()[0]
426 cr.execute("""INSERT INTO ir_model_fields (
427 id, model_id, model, name, field_description, ttype,
428 relation,view_load,state,select_level,relation_field
430 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
432 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
433 vals['relation'], bool(vals['view_load']), 'base',
434 vals['select_level'],vals['relation_field']
436 if 'module' in context:
437 name1 = 'field_' + self._table + '_' + k
438 cr.execute("select name from ir_model_data where name=%s", (name1,))
440 name1 = name1 + "_" + str(id)
441 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
442 (name1, context['module'], 'ir.model.fields', id)
445 for key, val in vals.items():
446 if cols[k][key] != vals[key]:
447 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
449 cr.execute("""UPDATE ir_model_fields SET
450 model_id=%s, field_description=%s, ttype=%s, relation=%s,
451 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s
453 model=%s AND name=%s""", (
454 vals['model_id'], vals['field_description'], vals['ttype'],
455 vals['relation'], bool(vals['view_load']),
456 vals['select_level'], bool(vals['readonly']),bool(vals['required']),bool(vals['selectable']),vals['relation_field'],vals['model'], vals['name']
461 def _auto_init(self, cr, context={}):
462 self._field_create(cr, context)
464 def __init__(self, cr):
465 if not self._name and not hasattr(self, '_inherit'):
466 name = type(self).__name__.split('.')[0]
467 msg = "The class %s has to have a _name attribute" % name
469 logger = netsvc.Logger()
470 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg )
471 raise except_orm('ValueError', msg )
473 if not self._description:
474 self._description = self._name
476 self._table = self._name.replace('.', '_')
478 def browse(self, cr, uid, select, context=None, list_class=None, fields_process={}):
481 self._list_class = list_class or browse_record_list
483 # need to accepts ints and longs because ids coming from a method
484 # launched by button in the interface have a type long...
485 if isinstance(select, (int, long)):
486 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
487 elif isinstance(select, list):
488 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context)
492 def __export_row(self, cr, uid, row, fields, context=None):
494 def check_type(field_type):
495 if field_type == 'float':
497 elif field_type == 'integer':
499 elif field_type == 'boolean':
503 def selection_field(in_field):
504 col_obj = self.pool.get(in_field.keys()[0])
505 if f[i] in col_obj._columns.keys():
506 return col_obj._columns[f[i]]
507 elif f[i] in col_obj._inherits.keys():
508 selection_field(col_obj._inherits)
514 data = map(lambda x: '', range(len(fields)))
516 for fpos in range(len(fields)):
525 model_data = self.pool.get('ir.model.data')
526 data_ids = model_data.search(cr, uid, [('model','=',r._table_name),('res_id','=',r['id'])])
528 d = model_data.read(cr, uid, data_ids, ['name','module'])[0]
530 r = '%s.%s'%(d['module'],d['name'])
537 # To display external name of selection field when its exported
538 if not context.get('import_comp',False):# Allow external name only if its not import compatible
540 if f[i] in self._columns.keys():
541 cols = self._columns[f[i]]
542 elif f[i] in self._inherit_fields.keys():
543 cols = selection_field(self._inherits)
544 if cols and cols._type == 'selection':
545 sel_list = cols.selection
546 if type(sel_list) == type([]):
547 r = [x[1] for x in sel_list if r==x[0]][0]
550 if f[i] in self._columns:
551 r = check_type(self._columns[f[i]]._type)
552 elif f[i] in self._inherit_fields:
553 r = check_type(self._inherit_fields[f[i]][2]._type)
556 if isinstance(r, (browse_record_list, list)):
558 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
564 lines2 = self.__export_row(cr, uid, row2, fields2,
567 for fpos2 in range(len(fields)):
568 if lines2 and lines2[0][fpos2]:
569 data[fpos2] = lines2[0][fpos2]
573 if isinstance(rr.name, browse_record):
575 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id])
576 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
577 dt += tools.ustr(rr_name or '') + ','
587 if isinstance(r, browse_record):
588 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id])
589 r = r and r[0] and r[0][1] or ''
590 data[fpos] = tools.ustr(r or '')
591 return [data] + lines
593 def export_data(self, cr, uid, ids, fields_to_export, context=None):
596 imp_comp = context.get('import_comp',False)
597 cols = self._columns.copy()
598 for f in self._inherit_fields:
599 cols.update({f: self._inherit_fields[f][2]})
600 fields_to_export = map(lambda x: x.split('/'), fields_to_export)
601 fields_export = fields_to_export+[]
604 for field in fields_export:
605 if imp_comp and len(field)>1:
606 warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
607 elif len (field) <=1:
608 if imp_comp and cols.get(field and field[0],False):
609 if ((isinstance(cols[field[0]], fields.function) and not cols[field[0]].store) \
610 or isinstance(cols[field[0]], fields.related)\
611 or isinstance(cols[field[0]], fields.one2many)):
612 warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
614 if imp_comp and len(warning_fields):
615 warning = 'Following columns cannot be exported since you select to be import compatible.\n%s' %('\n'.join(warning_fields))
617 return {'warning' : warning}
618 for row in self.browse(cr, uid, ids, context):
619 datas += self.__export_row(cr, uid, row, fields_to_export, context)
620 return {'datas':datas}
622 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
625 fields = map(lambda x: x.split('/'), fields)
626 logger = netsvc.Logger()
627 ir_model_data_obj = self.pool.get('ir.model.data')
629 def _check_db_id(self, model_name, db_id):
630 obj_model = self.pool.get(model_name)
631 ids = obj_model.search(cr, uid, [('id','=',int(db_id))])
633 raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, db_id))
636 def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0):
637 line = datas[position]
646 ir_model_data_obj = self.pool.get('ir.model.data')
648 # Import normal fields
650 for i in range(len(fields)):
652 raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
657 if prefix and not prefix[0] in field:
660 if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':db_id'):
664 field_name = field[0].split(':')[0]
665 model_rel = fields_def[field_name]['relation']
667 if fields_def[field[len(prefix)][:-6]]['type']=='many2many':
669 for db_id in line[i].split(config.get('csv_internal_sep')):
671 _check_db_id(self, model_rel, db_id)
674 warning += [tools.exception_to_unicode(e)]
675 logger.notifyChannel("import", netsvc.LOG_ERROR,
676 tools.exception_to_unicode(e))
678 res = [(6, 0, res_id)]
681 _check_db_id(self, model_rel, line[i])
684 warning += [tools.exception_to_unicode(e)]
685 logger.notifyChannel("import", netsvc.LOG_ERROR,
686 tools.exception_to_unicode(e))
687 row[field_name] = res or False
690 if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':id'):
693 if fields_def[field[len(prefix)][:-3]]['type']=='many2many':
695 for word in line[i].split(config.get('csv_internal_sep')):
697 module, xml_id = word.rsplit('.', 1)
699 module, xml_id = current_module, word
700 id = ir_model_data_obj._get_id(cr, uid, module,
702 res_id2 = ir_model_data_obj.read(cr, uid, [id],
703 ['res_id'])[0]['res_id']
705 res_id.append(res_id2)
707 res_id = [(6, 0, res_id)]
710 module, xml_id = line[i].rsplit('.', 1)
712 module, xml_id = current_module, line[i]
713 id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
714 res_res_id = ir_model_data_obj.read(cr, uid, [id],
717 res_id = res_res_id[0]['res_id']
718 row[field[-1][:-3]] = res_id or False
720 if (len(field) == len(prefix)+1) and \
721 len(field[len(prefix)].split(':lang=')) == 2:
722 f, lang = field[len(prefix)].split(':lang=')
723 translate.setdefault(lang, {})[f]=line[i] or False
725 if (len(field) == len(prefix)+1) and \
726 (prefix == field[0:len(prefix)]):
727 if field[len(prefix)] == "id":
730 is_xml_id = data_id = line[i]
731 d = data_id.split('.')
732 module = len(d)>1 and d[0] or ''
733 name = len(d)>1 and d[1] or d[0]
734 data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('name','=',name)])
736 d = ir_model_data_obj.read(cr, uid, data_ids, ['res_id'])[0]
738 if is_db_id and not db_id:
739 data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('res_id','=',is_db_id)])
740 if not len(data_ids):
741 ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id})
743 if is_db_id and int(db_id) != int(is_db_id):
744 warning += [_("Id is not the same than existing one: %s")%(is_db_id)]
745 logger.notifyChannel("import", netsvc.LOG_ERROR,
746 _("Id is not the same than existing one: %s")%(is_db_id))
749 if field[len(prefix)] == "db_id":
752 _check_db_id(self, model_name, line[i])
753 data_res_id = is_db_id = int(line[i])
755 warning += [tools.exception_to_unicode(e)]
756 logger.notifyChannel("import", netsvc.LOG_ERROR,
757 tools.exception_to_unicode(e))
759 data_ids = ir_model_data_obj.search(cr, uid, [('model','=',model_name),('res_id','=',line[i])])
761 d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0]
764 data_id = '%s.%s'%(d['module'],d['name'])
767 if is_xml_id and not data_id:
769 if is_xml_id and is_xml_id!=data_id:
770 warning += [_("Id is not the same than existing one: %s")%(line[i])]
771 logger.notifyChannel("import", netsvc.LOG_ERROR,
772 _("Id is not the same than existing one: %s")%(line[i]))
775 if fields_def[field[len(prefix)]]['type'] == 'integer':
776 res = line[i] and int(line[i])
777 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
778 res = line[i].lower() not in ('0', 'false', 'off')
779 elif fields_def[field[len(prefix)]]['type'] == 'float':
780 res = line[i] and float(line[i])
781 elif fields_def[field[len(prefix)]]['type'] == 'selection':
783 if isinstance(fields_def[field[len(prefix)]]['selection'],
785 sel = fields_def[field[len(prefix)]]['selection']
787 sel = fields_def[field[len(prefix)]]['selection'](self,
790 if line[i] in [tools.ustr(key),tools.ustr(val)]: #Acepting key or value for selection field
793 if line[i] and not res:
794 logger.notifyChannel("import", netsvc.LOG_WARNING,
795 _("key '%s' not found in selection field '%s'") % \
796 (line[i], field[len(prefix)]))
798 warning += [_("Key/value '%s' not found in selection field '%s'")%(line[i],field[len(prefix)])]
800 elif fields_def[field[len(prefix)]]['type']=='many2one':
803 relation = fields_def[field[len(prefix)]]['relation']
804 res2 = self.pool.get(relation).name_search(cr, uid,
805 line[i], [], operator='=', context=context)
806 res = (res2 and res2[0][0]) or False
808 warning += [_("Relation not found: %s on '%s'")%(line[i],relation)]
809 logger.notifyChannel("import", netsvc.LOG_WARNING,
810 _("Relation not found: %s on '%s'")%(line[i],relation))
811 elif fields_def[field[len(prefix)]]['type']=='many2many':
814 relation = fields_def[field[len(prefix)]]['relation']
815 for word in line[i].split(config.get('csv_internal_sep')):
816 res2 = self.pool.get(relation).name_search(cr,
817 uid, word, [], operator='=', context=context)
818 res3 = (res2 and res2[0][0]) or False
820 warning += [_("Relation not found: %s on '%s'")%(line[i],relation)]
821 logger.notifyChannel("import",
823 _("Relation not found: %s on '%s'")%(line[i],relation))
829 res = line[i] or False
830 row[field[len(prefix)]] = res
831 elif (prefix==field[0:len(prefix)]):
832 if field[0] not in todo:
833 todo.append(field[len(prefix)])
835 # Import one2many, many2many fields
839 relation_obj = self.pool.get(fields_def[field]['relation'])
840 newfd = relation_obj.fields_get(
841 cr, uid, context=context)
842 res = process_liness(self, datas, prefix + [field], current_module, relation_obj._name, newfd, position)
843 (newrow, max2, w2, translate2, data_id2, data_res_id2) = res
844 nbrmax = max(nbrmax, max2)
845 warning = warning + w2
846 reduce(lambda x, y: x and y, newrow)
847 row[field] = (reduce(lambda x, y: x or y, newrow.values()) and \
848 [(0, 0, newrow)]) or []
850 while (position+i)<len(datas):
852 for j in range(len(fields)):
854 if (len(field2) <= (len(prefix)+1)) and datas[position+i][j]:
859 (newrow, max2, w2, translate2, data_id2, data_res_id2) = process_liness(
860 self, datas, prefix+[field], current_module, relation_obj._name, newfd, position+i)
862 if reduce(lambda x, y: x or y, newrow.values()):
863 row[field].append((0, 0, newrow))
865 nbrmax = max(nbrmax, i)
868 for i in range(max(nbrmax, 1)):
871 result = (row, nbrmax, warning, translate, data_id, data_res_id)
874 fields_def = self.fields_get(cr, uid, context=context)
877 initial_size = len(datas)
878 if config.get('import_partial', False) and filename:
879 data = pickle.load(file(config.get('import_partial')))
880 original_value = data.get(filename, 0)
886 (res, other, warning, translate, data_id, res_id) = \
887 process_liness(self, datas, [], current_module, self._name, fields_def)
890 return (-1, res, 'Line ' + str(counter) +' : ' + '!\n'.join(warning), '')
893 id = ir_model_data_obj._update(cr, uid, self._name,
894 current_module, res, xml_id=data_id, mode=mode,
895 noupdate=noupdate, res_id=res_id, context=context)
900 if isinstance(e,psycopg2.IntegrityError):
901 msg= _('Insertion Failed! ')
902 for key in self.pool._sql_error.keys():
904 msg = self.pool._sql_error[key]
906 return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' )
907 if isinstance(e, osv.orm.except_orm ):
908 msg = _('Insertion Failed! ' + e[1])
909 return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' )
910 #Raising Uncaught exception
912 for lang in translate:
913 context2 = context.copy()
914 context2['lang'] = lang
915 self.write(cr, uid, [id], translate[lang], context2)
916 if config.get('import_partial', False) and filename and (not (counter%100)) :
917 data = pickle.load(file(config.get('import_partial')))
918 data[filename] = initial_size - len(datas) + original_value
919 pickle.dump(data, file(config.get('import_partial'),'wb'))
922 #except Exception, e:
923 # logger.notifyChannel("import", netsvc.LOG_ERROR, e)
926 # return (-1, res, e[0], warning)
928 # return (-1, res, e[0], '')
931 # TODO: Send a request with the result and multi-thread !
933 return (done, 0, 0, 0)
935 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
936 raise _('The read method is not implemented on this object !')
938 def get_invalid_fields(self,cr,uid):
939 return list(self._invalids)
941 def _validate(self, cr, uid, ids, context=None):
942 context = context or {}
943 lng = context.get('lang', False) or 'en_US'
944 trans = self.pool.get('ir.translation')
946 for constraint in self._constraints:
947 fun, msg, fields = constraint
948 if not fun(self, cr, uid, ids):
949 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
951 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
953 self._invalids.update(fields)
956 raise except_orm('ValidateError', '\n'.join(error_msgs))
958 self._invalids.clear()
960 def default_get(self, cr, uid, fields_list, context=None):
961 """ Set default values for the object's fields.
963 Returns a dict of {field_name:default_value}
966 `fields_list`: the fields for which the object doesn't have
967 any value yet, and default values need to be
968 provided. If fields outside this list are
969 returned, the user-provided values will be
974 def perm_read(self, cr, user, ids, context=None, details=True):
975 raise _('The perm_read method is not implemented on this object !')
977 def unlink(self, cr, uid, ids, context=None):
978 raise _('The unlink method is not implemented on this object !')
980 def write(self, cr, user, ids, vals, context=None):
981 raise _('The write method is not implemented on this object !')
983 def create(self, cr, user, vals, context=None):
984 raise _('The create method is not implemented on this object !')
986 # returns the definition of each field in the object
987 # the optional fields parameter can limit the result to some fields
988 def fields_get_keys(self, cr, user, context=None, read_access=True):
991 res = self._columns.keys()
992 for parent in self._inherits:
993 res.extend(self.pool.get(parent).fields_get_keys(cr, user, fields, context))
996 def fields_get(self, cr, user, fields=None, context=None, read_access=True):
1000 translation_obj = self.pool.get('ir.translation')
1001 model_access_obj = self.pool.get('ir.model.access')
1002 for parent in self._inherits:
1003 res.update(self.pool.get(parent).fields_get(cr, user, fields, context))
1005 if self._columns.keys():
1006 for f in self._columns.keys():
1007 if fields and f not in fields:
1009 res[f] = {'type': self._columns[f]._type}
1010 for arg in ('string', 'readonly', 'states', 'size', 'required', 'group_operator',
1011 'change_default', 'translate', 'help', 'select', 'selectable'):
1012 if getattr(self._columns[f], arg):
1013 res[f][arg] = getattr(self._columns[f], arg)
1015 res[f]['readonly'] = True
1016 res[f]['states'] = {}
1017 for arg in ('digits', 'invisible','filters'):
1018 if getattr(self._columns[f], arg, None):
1019 res[f][arg] = getattr(self._columns[f], arg)
1022 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
1024 res[f]['string'] = res_trans
1025 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
1027 res[f]['help'] = help_trans
1029 if hasattr(self._columns[f], 'selection'):
1030 if isinstance(self._columns[f].selection, (tuple, list)):
1031 sel = self._columns[f].selection
1032 # translate each selection option
1034 for (key, val) in sel:
1037 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
1038 sel2.append((key, val2 or val))
1040 res[f]['selection'] = sel
1042 # call the 'dynamic selection' function
1043 res[f]['selection'] = self._columns[f].selection(self, cr,
1045 if res[f]['type'] in ('one2many', 'many2many', 'many2one', 'one2one'):
1046 res[f]['relation'] = self._columns[f]._obj
1047 res[f]['domain'] = self._columns[f]._domain
1048 res[f]['context'] = self._columns[f]._context
1050 #TODO : read the fields from the database
1054 # filter out fields which aren't in the fields list
1055 for r in res.keys():
1061 # Overload this method if you need a window title which depends on the context
1063 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1066 def __view_look_dom(self, cr, user, node, view_id, context=None):
1073 if node.tag == 'field':
1074 if node.get('name'):
1077 if node.get('name') in self._columns:
1078 column = self._columns[node.get('name')]
1080 column = self._inherit_fields[node.get('name')][2]
1085 relation = column._obj
1089 if f.tag in ('form', 'tree', 'graph'):
1091 ctx = context.copy()
1092 ctx['base_model_name'] = self._name
1093 xarch, xfields = self.pool.get(relation).__view_look_dom_arch(cr, user, f, view_id, ctx)
1094 views[str(f.tag)] = {
1098 attrs = {'views': views}
1099 if node.get('widget') and node.get('widget') == 'selection':
1100 # We can not use the 'string' domain has it is defined according to the record !
1102 if column._domain and not isinstance(column._domain, (str, unicode)):
1103 dom = column._domain
1104 dom += eval(node.get('domain','[]'), {'uid':user, 'time':time})
1105 context.update(eval(node.get('context','{}')))
1106 attrs['selection'] = self.pool.get(relation).name_search(cr, user, '', dom, context=context)
1107 if (node.get('required') and not int(node.get('required'))) or not column.required:
1108 attrs['selection'].append((False,''))
1109 fields[node.get('name')] = attrs
1111 elif node.tag in ('form', 'tree'):
1112 result = self.view_header_get(cr, user, False, node.tag, context)
1114 node.set('string', result)
1116 elif node.tag == 'calendar':
1117 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1118 if node.get(additional_field):
1119 fields[node.get(additional_field)] = {}
1121 if 'groups' in node.attrib:
1122 if node.get('groups'):
1123 groups = node.get('groups').split(',')
1125 access_pool = self.pool.get('ir.model.access')
1126 for group in groups:
1127 readonly = readonly or access_pool.check_groups(cr, user, group)
1129 node.set('invisible', '1')
1130 del(node.attrib['groups'])
1133 if ('lang' in context) and not result:
1134 if node.get('string'):
1135 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string').encode('utf8'))
1136 if not trans and ('base_model_name' in context):
1137 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string').encode('utf8'))
1139 node.set('string', trans)
1141 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum').encode('utf8'))
1143 node.set('sum', trans)
1147 fields.update(self.__view_look_dom(cr, user, f, view_id, context))
1151 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1152 fields_def = self.__view_look_dom(cr, user, node, view_id, context=context)
1154 rolesobj = self.pool.get('res.roles')
1155 usersobj = self.pool.get('res.users')
1157 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1158 for button in buttons:
1160 if user != 1: # admin user has all roles
1161 user_roles = usersobj.read(cr, user, [user], ['roles_id'])[0]['roles_id']
1162 # TODO handle the case of more than one workflow for a model
1163 cr.execute("""SELECT DISTINCT t.role_id
1165 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1166 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1169 """, (self._name, button.get('name'),))
1170 roles = cr.fetchall()
1172 # draft -> valid = signal_next (role X)
1173 # draft -> cancel = signal_cancel (no role)
1175 # valid -> running = signal_next (role Y)
1176 # valid -> cancel = signal_cancel (role Z)
1178 # running -> done = signal_next (role Z)
1179 # running -> cancel = signal_cancel (role Z)
1181 # As we don't know the object state, in this scenario,
1182 # the button "signal_cancel" will be always shown as there is no restriction to cancel in draft
1183 # the button "signal_next" will be show if the user has any of the roles (X Y or Z)
1184 # The verification will be made later in workflow process...
1186 can_click = any((not role) or rolesobj.check(cr, user, user_roles, role) for (role,) in roles)
1188 button.set('readonly', str(int(not can_click)))
1190 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1191 fields = self.fields_get(cr, user, fields_def.keys(), context)
1192 for field in fields_def:
1194 # sometime, the view may containt the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1195 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1196 elif field in fields:
1197 fields[field].update(fields_def[field])
1199 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1200 res = cr.fetchall()[:]
1202 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1203 msg = "\n * ".join([r[0] for r in res])
1204 msg += "\n\nEither you wrongly customised this view, or some modules bringing those views are not compatible with your current data model"
1205 netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg)
1206 raise except_orm('View error', msg)
1210 def __get_default_calendar_view(self):
1211 """Generate a default calendar view (For internal use only).
1214 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
1215 '<calendar string="%s"') % (self._description)
1217 if (self._date_name not in self._columns):
1219 for dt in ['date','date_start','x_date','x_date_start']:
1220 if dt in self._columns:
1221 self._date_name = dt
1226 raise except_orm(_('Invalid Object Architecture!'),_("Insufficient fields for Calendar View!"))
1229 arch +=' date_start="%s"' % (self._date_name)
1231 for color in ["user_id","partner_id","x_user_id","x_partner_id"]:
1232 if color in self._columns:
1233 arch += ' color="' + color + '"'
1236 dt_stop_flag = False
1238 for dt_stop in ["date_stop","date_end","x_date_stop","x_date_end"]:
1239 if dt_stop in self._columns:
1240 arch += ' date_stop="' + dt_stop + '"'
1244 if not dt_stop_flag:
1245 for dt_delay in ["date_delay","planned_hours","x_date_delay","x_planned_hours"]:
1246 if dt_delay in self._columns:
1247 arch += ' date_delay="' + dt_delay + '"'
1251 ' <field name="%s"/>\n'
1252 '</calendar>') % (self._rec_name)
1256 def __get_default_search_view(self, cr, uid, context={}):
1259 if isinstance(s, unicode):
1260 return s.encode('utf8')
1263 view = self.fields_view_get(cr, uid, False, 'form', context)
1265 root = etree.fromstring(encode(view['arch']))
1266 res = etree.XML("<search string='%s'></search>" % root.get("string", ""))
1267 node = etree.Element("group")
1270 fields = root.xpath("//field[@select=1]")
1271 for field in fields:
1274 return etree.tostring(res, encoding="utf-8").replace('\t', '')
1277 # if view_id, view_type is not required
1279 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
1284 if isinstance(s, unicode):
1285 return s.encode('utf8')
1288 def _inherit_apply(src, inherit):
1289 def _find(node, node2):
1290 if node2.tag == 'xpath':
1291 res = node.xpath(node2.get('expr'))
1297 for n in node.getiterator(node2.tag):
1299 for attr in node2.attrib:
1300 if attr == 'position':
1303 if n.get(attr) == node2.get(attr):
1310 # End: _find(node, node2)
1312 doc_dest = etree.fromstring(encode(inherit))
1313 toparse = [ doc_dest ]
1316 node2 = toparse.pop(0)
1317 if node2.tag == 'data':
1318 toparse += [ c for c in doc_dest ]
1320 node = _find(src, node2)
1321 if node is not None:
1323 if node2.get('position'):
1324 pos = node2.get('position')
1325 if pos == 'replace':
1326 parent = node.getparent()
1328 src = copy.deepcopy(node2[0])
1331 node.addprevious(child)
1332 node.getparent().remove(node)
1333 elif pos == 'attributes':
1334 for child in node2.getiterator('attribute'):
1335 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
1337 node.set(attribute[0], attribute[1])
1339 del(node.attrib[attribute[0]])
1341 sib = node.getnext()
1345 elif pos == 'after':
1349 sib.addprevious(child)
1350 elif pos == 'before':
1351 node.addprevious(child)
1353 raise AttributeError(_('Unknown position in inherited view %s !') % pos)
1356 ' %s="%s"' % (attr, node2.get(attr))
1357 for attr in node2.attrib
1358 if attr != 'position'
1360 tag = "<%s%s>" % (node2.tag, attrs)
1361 raise AttributeError(_("Couldn't find tag '%s' in parent view !") % tag)
1363 # End: _inherit_apply(src, inherit)
1365 result = {'type': view_type, 'model': self._name}
1371 view_ref = context.get(view_type + '_view_ref', False)
1374 module, view_ref = view_ref.split('.', 1)
1375 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
1376 view_ref_res = cr.fetchone()
1378 view_id = view_ref_res[0]
1381 where = (model and (" and model='%s'" % (self._name,))) or ''
1382 cr.execute('SELECT arch,name,field_parent,id,type,inherit_id FROM ir_ui_view WHERE id=%s'+where, (view_id,))
1384 cr.execute('''SELECT
1385 arch,name,field_parent,id,type,inherit_id
1392 ORDER BY priority''', (self._name, view_type))
1393 sql_res = cr.fetchone()
1399 view_id = ok or sql_res[3]
1402 # if a view was found
1404 result['type'] = sql_res[4]
1405 result['view_id'] = sql_res[3]
1406 result['arch'] = sql_res[0]
1408 def _inherit_apply_rec(result, inherit_id):
1409 # get all views which inherit from (ie modify) this view
1410 cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
1411 sql_inherit = cr.fetchall()
1412 for (inherit, id) in sql_inherit:
1413 result = _inherit_apply(result, inherit)
1414 result = _inherit_apply_rec(result, id)
1417 inherit_result = etree.fromstring(encode(result['arch']))
1418 result['arch'] = _inherit_apply_rec(inherit_result, sql_res[3])
1420 result['name'] = sql_res[1]
1421 result['field_parent'] = sql_res[2] or False
1424 # otherwise, build some kind of default view
1425 if view_type == 'form':
1426 res = self.fields_get(cr, user, context=context)
1427 xml = '<?xml version="1.0" encoding="utf-8"?> ' \
1428 '<form string="%s">' % (self._description,)
1430 if res[x]['type'] not in ('one2many', 'many2many'):
1431 xml += '<field name="%s"/>' % (x,)
1432 if res[x]['type'] == 'text':
1436 elif view_type == 'tree':
1437 _rec_name = self._rec_name
1438 if _rec_name not in self._columns:
1439 _rec_name = self._columns.keys()[0]
1440 xml = '<?xml version="1.0" encoding="utf-8"?>' \
1441 '<tree string="%s"><field name="%s"/></tree>' \
1442 % (self._description, self._rec_name)
1444 elif view_type == 'calendar':
1445 xml = self.__get_default_calendar_view()
1447 elif view_type == 'search':
1448 xml = self.__get_default_search_view(cr, user, context)
1451 xml = '<?xml version="1.0"?>' # what happens here, graph case?
1452 raise except_orm(_('Invalid Architecture!'),_("There is no view of type '%s' defined for the structure!") % view_type)
1453 result['arch'] = etree.fromstring(encode(xml))
1454 result['name'] = 'default'
1455 result['field_parent'] = False
1456 result['view_id'] = 0
1458 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=context)
1459 result['arch'] = xarch
1460 result['fields'] = xfields
1463 if context and context.get('active_id',False):
1464 data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
1466 act_id = int(data_menu.split(',')[1])
1468 data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
1469 result['submenu'] = getattr(data_action,'menus', False)
1473 for key in ('report_sxw_content', 'report_rml_content',
1474 'report_sxw', 'report_rml',
1475 'report_sxw_content_data', 'report_rml_content_data'):
1479 ir_values_obj = self.pool.get('ir.values')
1480 resprint = ir_values_obj.get(cr, user, 'action',
1481 'client_print_multi', [(self._name, False)], False,
1483 resaction = ir_values_obj.get(cr, user, 'action',
1484 'client_action_multi', [(self._name, False)], False,
1487 resrelate = ir_values_obj.get(cr, user, 'action',
1488 'client_action_relate', [(self._name, False)], False,
1490 resprint = map(clean, resprint)
1491 resaction = map(clean, resaction)
1492 resaction = filter(lambda x: not x.get('multi', False), resaction)
1493 resprint = filter(lambda x: not x.get('multi', False), resprint)
1494 resrelate = map(lambda x: x[2], resrelate)
1496 for x in resprint+resaction+resrelate:
1497 x['string'] = x['name']
1499 result['toolbar'] = {
1501 'action': resaction,
1506 _view_look_dom_arch = __view_look_dom_arch
1508 def search_count(self, cr, user, args, context=None):
1511 res = self.search(cr, user, args, context=context, count=True)
1512 if isinstance(res, list):
1516 def search(self, cr, user, args, offset=0, limit=None, order=None,
1517 context=None, count=False):
1518 raise _('The search method is not implemented on this object !')
1520 def name_get(self, cr, user, ids, context=None):
1521 raise _('The name_get method is not implemented on this object !')
1523 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
1524 raise _('The name_search method is not implemented on this object !')
1526 def copy(self, cr, uid, id, default=None, context=None):
1527 raise _('The copy method is not implemented on this object !')
1529 def exists(self, cr, uid, id, context=None):
1530 raise _('The exists method is not implemented on this object !')
1532 def read_string(self, cr, uid, id, langs, fields=None, context=None):
1535 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
1537 fields = self._columns.keys() + self._inherit_fields.keys()
1538 #FIXME: collect all calls to _get_source into one SQL call.
1540 res[lang] = {'code': lang}
1542 if f in self._columns:
1543 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
1545 res[lang][f] = res_trans
1547 res[lang][f] = self._columns[f].string
1548 for table in self._inherits:
1549 cols = intersect(self._inherit_fields.keys(), fields)
1550 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
1553 res[lang]['code'] = lang
1554 for f in res2[lang]:
1555 res[lang][f] = res2[lang][f]
1558 def write_string(self, cr, uid, id, langs, vals, context=None):
1559 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
1560 #FIXME: try to only call the translation in one SQL
1563 if field in self._columns:
1564 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field])
1565 for table in self._inherits:
1566 cols = intersect(self._inherit_fields.keys(), vals)
1568 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
1571 def _check_removed_columns(self, cr, log=False):
1572 raise NotImplementedError()
1574 class orm_memory(orm_template):
1575 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
1576 _inherit_fields = {}
1581 def __init__(self, cr):
1582 super(orm_memory, self).__init__(cr)
1586 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
1588 def vaccum(self, cr, uid):
1590 if self.check_id % self._check_time:
1593 max = time.time() - self._max_hours * 60 * 60
1594 for id in self.datas:
1595 if self.datas[id]['internal.date_access'] < max:
1597 self.unlink(cr, uid, tounlink)
1598 if len(self.datas)>self._max_count:
1599 sorted = map(lambda x: (x[1]['internal.date_access'], x[0]), self.datas.items())
1601 ids = map(lambda x: x[1], sorted[:len(self.datas)-self._max_count])
1602 self.unlink(cr, uid, ids)
1605 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
1608 if not fields_to_read:
1609 fields_to_read = self._columns.keys()
1613 if isinstance(ids, (int, long)):
1617 for f in fields_to_read:
1618 if id in self.datas:
1619 r[f] = self.datas[id].get(f, False)
1620 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
1623 if id in self.datas:
1624 self.datas[id]['internal.date_access'] = time.time()
1625 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
1626 for f in fields_post:
1627 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
1628 for record in result:
1629 record[f] = res2[record['id']]
1630 if isinstance(ids_orig, (int, long)):
1634 def write(self, cr, user, ids, vals, context=None):
1640 if self._columns[field]._classic_write:
1641 vals2[field] = vals[field]
1643 upd_todo.append(field)
1645 self.datas[id_new].update(vals2)
1646 self.datas[id_new]['internal.date_access'] = time.time()
1647 for field in upd_todo:
1648 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
1649 self._validate(cr, user, [id_new], context)
1650 wf_service = netsvc.LocalService("workflow")
1651 wf_service.trg_write(user, self._name, id_new, cr)
1654 def create(self, cr, user, vals, context=None):
1655 self.vaccum(cr, user)
1657 id_new = self.next_id
1659 for f in self._columns.keys():
1663 vals.update(self.default_get(cr, user, default, context))
1667 if self._columns[field]._classic_write:
1668 vals2[field] = vals[field]
1670 upd_todo.append(field)
1671 self.datas[id_new] = vals2
1672 self.datas[id_new]['internal.date_access'] = time.time()
1674 for field in upd_todo:
1675 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
1676 self._validate(cr, user, [id_new], context)
1677 wf_service = netsvc.LocalService("workflow")
1678 wf_service.trg_create(user, self._name, id_new, cr)
1681 def default_get(self, cr, uid, fields_list, context=None):
1685 # get the default values for the inherited fields
1686 for f in fields_list:
1687 if f in self._defaults:
1688 if callable(self._defaults[f]):
1689 value[f] = self._defaults[f](self, cr, uid, context)
1691 value[f] = self._defaults[f]
1693 fld_def = ((f in self._columns) and self._columns[f]) \
1694 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1697 # get the default values set by the user and override the default
1698 # values defined in the object
1699 ir_values_obj = self.pool.get('ir.values')
1700 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1701 for id, field, field_value in res:
1702 if field in fields_list:
1703 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1704 if fld_def._type in ('many2one', 'one2one'):
1705 obj = self.pool.get(fld_def._obj)
1706 if not obj.search(cr, uid, [('id', '=', field_value)]):
1708 if fld_def._type in ('many2many'):
1709 obj = self.pool.get(fld_def._obj)
1711 for i in range(len(field_value)):
1712 if not obj.search(cr, uid, [('id', '=',
1715 field_value2.append(field_value[i])
1716 field_value = field_value2
1717 if fld_def._type in ('one2many'):
1718 obj = self.pool.get(fld_def._obj)
1720 for i in range(len(field_value)):
1721 field_value2.append({})
1722 for field2 in field_value[i]:
1723 if obj._columns[field2]._type in ('many2one', 'one2one'):
1724 obj2 = self.pool.get(obj._columns[field2]._obj)
1725 if not obj2.search(cr, uid,
1726 [('id', '=', field_value[i][field2])]):
1728 # TODO add test for many2many and one2many
1729 field_value2[i][field2] = field_value[i][field2]
1730 field_value = field_value2
1731 value[field] = field_value
1733 # get the default values from the context
1734 for key in context or {}:
1735 if key.startswith('default_') and (key[8:] in fields_list):
1736 value[key[8:]] = context[key]
1739 def _where_calc(self, cr, user, args, active_test=True, context=None):
1744 # if the object has a field named 'active', filter out all inactive
1745 # records unless they were explicitely asked for
1746 if 'active' in self._columns and (active_test and context.get('active_test', True)):
1748 active_in_args = False
1750 if a[0] == 'active':
1751 active_in_args = True
1752 if not active_in_args:
1753 args.insert(0, ('active', '=', 1))
1755 args = [('active', '=', 1)]
1758 e = expression.expression(args)
1759 e.parse(cr, user, self, context)
1760 res=e.__dict__['_expression__exp']
1764 def search(self, cr, user, args, offset=0, limit=None, order=None,
1765 context=None, count=False):
1768 result = self._where_calc(cr, user, args, context=context)
1770 return self.datas.keys()
1774 #Find the value of dict
1777 for id, data in self.datas.items():
1780 if limit and (counter >int(limit)):
1785 val =eval('data[arg[0]]'+'==' +' arg[2]')
1786 elif arg[1] in ['<','>','in','not in','<=','>=','<>']:
1787 val =eval('data[arg[0]]'+arg[1] +' arg[2]')
1788 elif arg[1] in ['ilike']:
1789 if str(data[arg[0]]).find(str(arg[2]))!=-1:
1804 def unlink(self, cr, uid, ids, context=None):
1806 if id in self.datas:
1809 cr.execute('delete from wkf_instance where res_type=%s and res_id = ANY (%s)', (self._name,ids))
1812 def perm_read(self, cr, user, ids, context=None, details=True):
1816 'create_uid': (user, 'Root'),
1817 'create_date': time.strftime('%Y-%m-%d %H:%M:%S'),
1819 'write_date': False,
1824 def _check_removed_columns(self, cr, log=False):
1825 # nothing to check in memory...
1828 def exists(self, cr, uid, id, context=None):
1829 return id in self.datas
1831 class orm(orm_template):
1832 _sql_constraints = []
1834 _protected = ['read','write','create','default_get','perm_read','unlink','fields_get','fields_view_get','search','name_get','distinct_field_get','name_search','copy','import_data','search_count', 'exists']
1836 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None):
1837 context = context or {}
1838 self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
1840 fields = self._columns.keys()
1842 (where_clause, where_params, tables) = self._where_calc(cr, uid, domain, context=context)
1843 dom = self.pool.get('ir.rule').domain_get(cr, uid, self._name, context=context)
1844 where_clause = where_clause + dom[0]
1845 where_params = where_params + dom[1]
1850 # Take care of adding join(s) if groupby is an '_inherits'ed field
1851 tables, where_clause = self._inherits_join_calc(groupby,tables,where_clause)
1853 if len(where_clause):
1854 where_clause = ' where '+string.join(where_clause, ' and ')
1857 limit_str = limit and ' limit %d' % limit or ''
1858 offset_str = offset and ' offset %d' % offset or ''
1860 fget = self.fields_get(cr, uid, fields)
1861 float_int_fields = filter(lambda x: fget[x]['type'] in ('float','integer'), fields)
1865 if fget.get(groupby,False) and fget[groupby]['type'] in ('date','datetime'):
1866 flist = "to_char(%s,'yyyy-mm') as %s "%(groupby,groupby)
1867 groupby = "to_char(%s,'yyyy-mm')"%(groupby)
1871 fields_pre = [f for f in float_int_fields if
1872 f == self.CONCURRENCY_CHECK_FIELD
1873 or (f in self._columns and getattr(self._columns[f], '_classic_write'))]
1874 for f in fields_pre:
1875 if f not in ['id','sequence']:
1876 operator = fget[f].get('group_operator','sum')
1877 flist += ','+operator+'('+f+') as '+f
1879 cr.execute('select min(%s.id) as id,' % self._table + flist + ' from ' + ','.join(tables) + where_clause + ' group by '+ groupby + limit_str + offset_str, where_params)
1882 for r in cr.dictfetchall():
1883 alldata[r['id']] = r
1885 data = self.read(cr, uid, alldata.keys(), [groupby], context=context)
1886 today = datetime.date.today()
1889 d['__domain'] = [(groupby,'=',alldata[d['id']][groupby] or False)] + domain
1890 if fget.has_key(groupby):
1891 if d[groupby] and fget[groupby]['type'] in ('date','datetime'):
1892 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7],'%Y-%m')
1893 days = calendar.monthrange(dt.year, dt.month)[1]
1895 d[groupby] = datetime.datetime.strptime(d[groupby][:10],'%Y-%m-%d').strftime('%B %Y')
1896 d['__domain'] = [(groupby,'>=',alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01','%Y-%m-%d').strftime('%Y-%m-%d') or False),\
1897 (groupby,'<=',alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days),'%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
1898 elif fget[groupby]['type'] == 'many2one':
1899 d[groupby] = d[groupby] and ((type(d[groupby])==type(1)) and d[groupby] or d[groupby][1]) or ''
1901 del alldata[d['id']][groupby]
1902 d.update(alldata[d['id']])
1906 def _inherits_join_calc(self, field, tables, where_clause):
1907 """ Adds missing table select and join clause(s) for reaching
1908 the field coming from an '_inherits' parent table.
1909 @param tables: list of table._table names enclosed in double quotes as returned
1912 current_table = self
1913 while field in current_table._inherit_fields and not field in current_table._columns:
1914 parent_table = self.pool.get(current_table._inherit_fields[field][0])
1915 parent_table_name = parent_table._table
1916 if '"%s"'%parent_table_name not in tables:
1917 tables.append('"%s"'%parent_table_name)
1918 where_clause.append('(%s.%s = %s.id)' % (current_table._table, current_table._inherits[parent_table._name], parent_table_name))
1919 current_table = parent_table
1920 return (tables, where_clause)
1922 def _parent_store_compute(self, cr):
1923 logger = netsvc.Logger()
1924 logger.notifyChannel('orm', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
1925 def browse_rec(root, pos=0):
1927 where = self._parent_name+'='+str(root)
1929 where = self._parent_name+' IS NULL'
1930 if self._parent_order:
1931 where += ' order by '+self._parent_order
1932 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
1934 childs = cr.fetchall()
1936 pos2 = browse_rec(id[0], pos2)
1937 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos,pos2,root))
1939 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
1940 if self._parent_order:
1941 query += ' order by '+self._parent_order
1944 for (root,) in cr.fetchall():
1945 pos = browse_rec(root, pos)
1948 def _update_store(self, cr, f, k):
1949 logger = netsvc.Logger()
1950 logger.notifyChannel('orm', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
1951 ss = self._columns[k]._symbol_set
1952 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
1953 cr.execute('select id from '+self._table)
1954 ids_lst = map(lambda x: x[0], cr.fetchall())
1957 ids_lst = ids_lst[40:]
1958 res = f.get(cr, self, iids, k, 1, {})
1959 for key,val in res.items():
1962 # if val is a many2one, just write the ID
1963 if type(val)==tuple:
1965 if (val<>False) or (type(val)<>bool):
1966 cr.execute(update_query, (ss[1](val), key))
1968 def _check_removed_columns(self, cr, log=False):
1969 logger = netsvc.Logger()
1970 # iterate on the database columns to drop the NOT NULL constraints
1971 # of fields which were required but have been removed (or will be added by another module)
1972 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
1973 columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
1974 cr.execute("SELECT a.attname, a.attnotnull"
1975 " FROM pg_class c, pg_attribute a"
1976 " WHERE c.relname=%%s"
1977 " AND c.oid=a.attrelid"
1978 " AND a.attisdropped=%%s"
1979 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
1980 " AND a.attname NOT IN (%s)" % ",".join(['%s']*len(columns)),
1981 [self._table, False] + columns)
1982 for column in cr.dictfetchall():
1984 logger.notifyChannel("orm", netsvc.LOG_DEBUG, "column %s is in the table %s but not in the corresponding object %s" % (column['attname'], self._table, self._name))
1985 if column['attnotnull']:
1986 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
1988 def _auto_init(self, cr, context={}):
1989 store_compute = False
1990 logger = netsvc.Logger()
1993 self._field_create(cr, context=context)
1994 if getattr(self, '_auto', True):
1995 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname='%s'" % self._table)
1997 cr.execute("CREATE TABLE \"%s\" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS" % self._table)
1998 cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'","''")))
2001 if self._parent_store:
2002 cr.execute("""SELECT c.relname
2003 FROM pg_class c, pg_attribute a
2004 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2005 """, (self._table, 'parent_left'))
2007 if 'parent_left' not in self._columns:
2008 logger.notifyChannel('orm', netsvc.LOG_ERROR, 'create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)' % (self._table, ))
2009 if 'parent_right' not in self._columns:
2010 logger.notifyChannel('orm', netsvc.LOG_ERROR, 'create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)' % (self._table, ))
2011 if self._columns[self._parent_name].ondelete<>'cascade':
2012 logger.notifyChannel('orm', netsvc.LOG_ERROR, "the columns %s on object must be set as ondelete='cascasde'" % (self._name, self._parent_name))
2013 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
2014 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
2016 store_compute = True
2018 if self._log_access:
2020 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2021 'create_date': 'TIMESTAMP',
2022 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
2023 'write_date': 'TIMESTAMP'
2028 FROM pg_class c, pg_attribute a
2029 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
2030 """, (self._table, k))
2032 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
2035 self._check_removed_columns(cr, log=False)
2037 # iterate on the "object columns"
2038 todo_update_store = []
2039 update_custom_fields = context.get('update_custom_fields', False)
2040 for k in self._columns:
2041 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
2043 #raise _('Can not define a column %s. Reserved keyword !') % (k,)
2044 #Not Updating Custom fields
2045 if k.startswith('x_') and not update_custom_fields:
2047 f = self._columns[k]
2049 if isinstance(f, fields.one2many):
2050 cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,))
2052 if self.pool.get(f._obj):
2053 if f._fields_id not in self.pool.get(f._obj)._columns.keys():
2054 if not self.pool.get(f._obj)._inherits or (f._fields_id not in self.pool.get(f._obj)._inherit_fields.keys()):
2055 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id,f._obj,))
2058 cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id))
2059 res = cr.fetchone()[0]
2061 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY (%s) REFERENCES "%s" ON DELETE SET NULL' % (self._obj, f._fields_id, f._table))
2062 elif isinstance(f, fields.many2many):
2063 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (f._rel,))
2064 if not cr.dictfetchall():
2065 if not self.pool.get(f._obj):
2066 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2067 ref = self.pool.get(f._obj)._table
2068 # ref = f._obj.replace('.', '_')
2069 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE) WITH OIDS' % (f._rel, f._id1, self._table, f._id2, ref))
2070 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
2071 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
2072 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref))
2075 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2076 "FROM pg_class c,pg_attribute a,pg_type t " \
2077 "WHERE c.relname=%s " \
2078 "AND a.attname=%s " \
2079 "AND c.oid=a.attrelid " \
2080 "AND a.atttypid=t.oid", (self._table, k))
2081 res = cr.dictfetchall()
2082 if not res and hasattr(f,'oldname'):
2083 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
2084 "FROM pg_class c,pg_attribute a,pg_type t " \
2085 "WHERE c.relname=%s " \
2086 "AND a.attname=%s " \
2087 "AND c.oid=a.attrelid " \
2088 "AND a.atttypid=t.oid", (self._table, f.oldname))
2089 res_old = cr.dictfetchall()
2090 logger.notifyChannel('orm', netsvc.LOG_DEBUG, 'trying to rename %s(%s) to %s'% (self._table, f.oldname, k))
2091 if res_old and len(res_old)==1:
2092 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % ( self._table,f.oldname, k))
2094 res[0]['attname'] = k
2097 if not isinstance(f, fields.function) or f.store:
2099 # add the missing field
2100 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
2101 cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'","''")))
2104 if not create and k in self._defaults:
2105 if callable(self._defaults[k]):
2106 default = self._defaults[k](self, cr, 1, context)
2108 default = self._defaults[k]
2110 ss = self._columns[k]._symbol_set
2111 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
2112 cr.execute(query, (ss[1](default),))
2114 logger.notifyChannel('orm', netsvc.LOG_DEBUG, 'setting default value of new column %s of table %s'% (k, self._table))
2116 logger.notifyChannel('orm', netsvc.LOG_DEBUG, 'creating new column %s of table %s'% (k, self._table))
2118 if isinstance(f, fields.function):
2120 if f.store is not True:
2121 order = f.store[f.store.keys()[0]][2]
2122 todo_update_store.append((order, f,k))
2124 # and add constraints if needed
2125 if isinstance(f, fields.many2one):
2126 if not self.pool.get(f._obj):
2127 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
2128 ref = self.pool.get(f._obj)._table
2129 # ref = f._obj.replace('.', '_')
2130 # ir_actions is inherited so foreign key doesn't work on it
2131 if ref != 'ir_actions':
2132 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete))
2134 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2138 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k))
2139 except Exception, e:
2140 logger.notifyChannel('orm', netsvc.LOG_WARNING, 'WARNING: unable to set column %s of table %s not null !\nTry to re-run: openerp-server.py --update=module\nIf it doesn\'t work, update records and execute manually:\nALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % (k, self._table, self._table, k))
2144 f_pg_type = f_pg_def['typname']
2145 f_pg_size = f_pg_def['size']
2146 f_pg_notnull = f_pg_def['attnotnull']
2147 if isinstance(f, fields.function) and not f.store and\
2148 not getattr(f, 'nodrop', False):
2149 logger.notifyChannel('orm', netsvc.LOG_INFO, 'column %s (%s) in table %s removed: converted to a function !\n' % (k, f.string, self._table))
2150 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE'% (self._table, k))
2154 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
2159 ('text', 'char', 'VARCHAR(%d)' % (f.size or 0,), '::VARCHAR(%d)'%(f.size or 0,)),
2160 ('varchar', 'text', 'TEXT', ''),
2161 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2162 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
2163 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2164 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
2166 # !!! Avoid reduction of varchar field !!!
2167 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
2168 # if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size != f.size:
2169 logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed size" % (k, self._table))
2170 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2171 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
2172 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size))
2173 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2176 if (f_pg_type==c[0]) and (f._type==c[1]):
2177 if f_pg_type != f_obj_type:
2178 if f_pg_type != f_obj_type:
2179 logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed type to %s." % (k, self._table, c[1]))
2181 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
2182 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
2183 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
2184 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
2188 if f_pg_type != f_obj_type:
2190 logger.notifyChannel('orm', netsvc.LOG_WARNING, "column '%s' in table '%s' has changed type (DB = %s, def = %s) but unable to migrate this change !" % (k, self._table, f_pg_type, f._type))
2192 # if the field is required and hasn't got a NOT NULL constraint
2193 if f.required and f_pg_notnull == 0:
2194 # set the field to the default value if any
2195 if k in self._defaults:
2196 if callable(self._defaults[k]):
2197 default = self._defaults[k](self, cr, 1, context)
2199 default = self._defaults[k]
2201 if (default is not None):
2202 ss = self._columns[k]._symbol_set
2203 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
2204 cr.execute(query, (ss[1](default),))
2205 # add the NOT NULL constraint
2208 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k))
2210 except Exception, e:
2211 logger.notifyChannel('orm', netsvc.LOG_WARNING, 'unable to set a NOT NULL constraint on column %s of the %s table !\nIf you want to have it, you should update the records and execute manually:\nALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % (k, self._table, self._table, k))
2213 elif not f.required and f_pg_notnull == 1:
2214 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
2216 indexname = '%s_%s_index' % (self._table, k)
2217 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
2218 res = cr.dictfetchall()
2219 if not res and f.select:
2220 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
2222 if res and not f.select:
2223 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
2225 if isinstance(f, fields.many2one):
2226 ref = self.pool.get(f._obj)._table
2227 if ref != 'ir_actions':
2228 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
2229 'pg_attribute as att1, pg_attribute as att2 '
2230 'WHERE con.conrelid = cl1.oid '
2231 'AND cl1.relname = %s '
2232 'AND con.confrelid = cl2.oid '
2233 'AND cl2.relname = %s '
2234 'AND array_lower(con.conkey, 1) = 1 '
2235 'AND con.conkey[1] = att1.attnum '
2236 'AND att1.attrelid = cl1.oid '
2237 'AND att1.attname = %s '
2238 'AND array_lower(con.confkey, 1) = 1 '
2239 'AND con.confkey[1] = att2.attnum '
2240 'AND att2.attrelid = cl2.oid '
2241 'AND att2.attname = %s '
2242 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
2243 res = cr.dictfetchall()
2252 if res[0]['confdeltype'] != confdeltype.get(f.ondelete.upper(), 'a'):
2253 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res[0]['conname'] + '"')
2254 cr.execute('ALTER TABLE "' + self._table + '" ADD FOREIGN KEY ("' + k + '") REFERENCES "' + ref + '" ON DELETE ' + f.ondelete)
2257 logger.notifyChannel('orm', netsvc.LOG_ERROR, "Programming error, column %s->%s has multiple instances !"%(self._table,k))
2258 for order,f,k in todo_update_store:
2259 todo_end.append((order, self._update_store, (f, k)))
2262 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (self._table,))
2263 create = not bool(cr.fetchone())
2265 for (key, con, _) in self._sql_constraints:
2266 conname = '%s_%s' % (self._table, key)
2267 cr.execute("SELECT conname FROM pg_constraint where conname=%s", (conname,))
2268 if not cr.dictfetchall():
2270 cr.execute('alter table "%s" add constraint "%s_%s" %s' % (self._table, self._table, key, con,))
2273 logger.notifyChannel('orm', netsvc.LOG_WARNING, 'unable to add \'%s\' constraint on table %s !\n If you want to have it, you should update the records and execute manually:\nALTER table %s ADD CONSTRAINT %s_%s %s' % (con, self._table, self._table, self._table, key, con,))
2276 if hasattr(self, "_sql"):
2277 for line in self._sql.split(';'):
2278 line2 = line.replace('\n', '').strip()
2283 self._parent_store_compute(cr)
2286 def __init__(self, cr):
2287 super(orm, self).__init__(cr)
2289 if not hasattr(self, '_log_access'):
2290 # if not access is not specify, it is the same value as _auto
2291 self._log_access = getattr(self, "_auto", True)
2293 self._columns = self._columns.copy()
2294 for store_field in self._columns:
2295 f = self._columns[store_field]
2296 if hasattr(f, 'digits_change'):
2298 if not isinstance(f, fields.function):
2302 if self._columns[store_field].store is True:
2303 sm = {self._name:(lambda self,cr, uid, ids, c={}: ids, None, 10, None)}
2305 sm = self._columns[store_field].store
2306 for object, aa in sm.items():
2308 (fnct,fields2,order,length)=aa
2310 (fnct,fields2,order)=aa
2313 raise except_orm('Error',
2314 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
2315 self.pool._store_function.setdefault(object, [])
2317 for x,y,z,e,f,l in self.pool._store_function[object]:
2318 if (x==self._name) and (y==store_field) and (e==fields2):
2322 self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order, length))
2323 self.pool._store_function[object].sort(lambda x,y: cmp(x[4],y[4]))
2325 for (key, _, msg) in self._sql_constraints:
2326 self.pool._sql_error[self._table+'_'+key] = msg
2328 # Load manual fields
2330 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
2332 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
2333 for field in cr.dictfetchall():
2334 if field['name'] in self._columns:
2337 'string': field['field_description'],
2338 'required': bool(field['required']),
2339 'readonly': bool(field['readonly']),
2340 'domain': field['domain'] or None,
2341 'size': field['size'],
2342 'ondelete': field['on_delete'],
2343 'translate': (field['translate']),
2344 #'select': int(field['select_level'])
2347 if field['ttype'] == 'selection':
2348 self._columns[field['name']] = getattr(fields, field['ttype'])(eval(field['selection']), **attrs)
2349 elif field['ttype'] == 'reference':
2350 self._columns[field['name']] = getattr(fields, field['ttype'])(selection=eval(field['selection']), **attrs)
2351 elif field['ttype'] == 'many2one':
2352 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], **attrs)
2353 elif field['ttype'] == 'one2many':
2354 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], field['relation_field'], **attrs)
2355 elif field['ttype'] == 'many2many':
2356 _rel1 = field['relation'].replace('.', '_')
2357 _rel2 = field['model'].replace('.', '_')
2358 _rel_name = 'x_%s_%s_%s_rel' %(_rel1, _rel2, field['name'])
2359 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], _rel_name, 'id1', 'id2', **attrs)
2361 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
2363 self._inherits_reload()
2364 if not self._sequence:
2365 self._sequence = self._table+'_id_seq'
2366 for k in self._defaults:
2367 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
2368 for f in self._columns:
2369 self._columns[f].restart()
2371 def default_get(self, cr, uid, fields_list, context=None):
2375 # get the default values for the inherited fields
2376 for t in self._inherits.keys():
2377 value.update(self.pool.get(t).default_get(cr, uid, fields_list,
2380 # get the default values defined in the object
2381 for f in fields_list:
2382 if f in self._defaults:
2383 if callable(self._defaults[f]):
2384 value[f] = self._defaults[f](self, cr, uid, context)
2386 value[f] = self._defaults[f]
2388 fld_def = ((f in self._columns) and self._columns[f]) \
2389 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
2391 if isinstance(fld_def, fields.property):
2392 property_obj = self.pool.get('ir.property')
2393 definition_id = fld_def._field_get(cr, uid, self._name, f)
2394 nid = property_obj.search(cr, uid, [('fields_id', '=',
2395 definition_id), ('res_id', '=', False)])
2397 prop_value = property_obj.browse(cr, uid, nid[0],
2398 context=context).value
2399 value[f] = (prop_value and int(prop_value.split(',')[1])) \
2402 # get the default values set by the user and override the default
2403 # values defined in the object
2404 ir_values_obj = self.pool.get('ir.values')
2405 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
2406 for id, field, field_value in res:
2407 if field in fields_list:
2408 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
2409 if fld_def._type in ('many2one', 'one2one'):
2410 obj = self.pool.get(fld_def._obj)
2411 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
2413 if fld_def._type in ('many2many'):
2414 obj = self.pool.get(fld_def._obj)
2416 for i in range(len(field_value)):
2417 if not obj.search(cr, uid, [('id', '=',
2420 field_value2.append(field_value[i])
2421 field_value = field_value2
2422 if fld_def._type in ('one2many'):
2423 obj = self.pool.get(fld_def._obj)
2425 for i in range(len(field_value)):
2426 field_value2.append({})
2427 for field2 in field_value[i]:
2428 if obj._columns[field2]._type in ('many2one', 'one2one'):
2429 obj2 = self.pool.get(obj._columns[field2]._obj)
2430 if not obj2.search(cr, uid,
2431 [('id', '=', field_value[i][field2])]):
2433 # TODO add test for many2many and one2many
2434 field_value2[i][field2] = field_value[i][field2]
2435 field_value = field_value2
2436 value[field] = field_value
2437 for key in context or {}:
2438 if key.startswith('default_') and (key[8:] in fields_list):
2439 value[key[8:]] = context[key]
2443 # Update objects that uses this one to update their _inherits fields
2445 def _inherits_reload_src(self):
2446 for obj in self.pool.obj_pool.values():
2447 if self._name in obj._inherits:
2448 obj._inherits_reload()
2450 def _inherits_reload(self):
2452 for table in self._inherits:
2453 res.update(self.pool.get(table)._inherit_fields)
2454 for col in self.pool.get(table)._columns.keys():
2455 res[col] = (table, self._inherits[table], self.pool.get(table)._columns[col])
2456 for col in self.pool.get(table)._inherit_fields.keys():
2457 res[col] = (table, self._inherits[table], self.pool.get(table)._inherit_fields[col][2])
2458 self._inherit_fields = res
2459 self._inherits_reload_src()
2461 def fields_get(self, cr, user, fields=None, context=None):
2462 ira = self.pool.get('ir.model.access')
2463 read_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
2464 ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
2465 return super(orm, self).fields_get(cr, user, fields, context, read_access)
2467 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
2470 self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
2472 fields = self._columns.keys() + self._inherit_fields.keys()
2473 if isinstance(ids, (int, long)):
2477 select = map(lambda x: isinstance(x,dict) and x['id'] or x, select)
2478 result = self._read_flat(cr, user, select, fields, context, load)
2480 for key, v in r.items():
2483 if key in self._columns.keys():
2484 type = self._columns[key]._type
2485 elif key in self._inherit_fields.keys():
2486 type = self._inherit_fields[key][2]._type
2489 if type == 'reference' and v:
2490 model,ref_id = v.split(',')
2491 table = self.pool.get(model)._table
2492 cr.execute('select id from "%s" where id=%s' % (table,ref_id))
2493 id_exist = cr.fetchone()
2495 cr.execute('update "'+self._table+'" set "'+key+'"=NULL where "%s"=%s' %(key,''.join("'"+str(v)+"'")))
2497 if isinstance(ids, (int, long, dict)):
2498 return result and result[0] or False
2501 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
2504 #ids = map(lambda x:int(x), ids)
2507 if fields_to_read == None:
2508 fields_to_read = self._columns.keys()
2510 # construct a clause for the rules :
2511 d1, d2, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, context=context)
2513 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
2514 fields_pre = [f for f in fields_to_read if
2515 f == self.CONCURRENCY_CHECK_FIELD
2516 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
2517 ] + self._inherits.values()
2521 def convert_field(f):
2522 if f in ('create_date', 'write_date'):
2523 return "date_trunc('second', %s) as %s" % (f, f)
2524 if f == self.CONCURRENCY_CHECK_FIELD:
2525 if self._log_access:
2526 return "COALESCE(%s.write_date, %s.create_date, now())::timestamp AS %s" % (self._table, self._table, f,)
2527 return "now()::timestamp AS %s" % (f,)
2528 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
2529 return 'length("%s") as "%s"' % (f, f)
2530 return '"%s"' % (f,)
2531 fields_pre2 = map(convert_field, fields_pre)
2532 order_by = self._parent_order or self._order
2533 for i in range(0, len(ids), cr.IN_MAX):
2534 sub_ids = ids[i:i+cr.IN_MAX]
2536 cr.execute('SELECT %s FROM %s WHERE %s.id = ANY (%%s) AND %s ORDER BY %s' % \
2537 (','.join(fields_pre2 + [self._table + '.id']), ','.join(tables), self._table, ' and '.join(d1),
2538 order_by),[sub_ids,]+d2)
2539 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2540 raise except_orm(_('AccessError'),
2541 _('You try to bypass an access rule while reading (Document type: %s).') % self._description)
2543 cr.execute('SELECT %s FROM \"%s\" WHERE id = ANY (%%s) ORDER BY %s' %
2544 (','.join(fields_pre2 + ['id']), self._table,
2545 order_by), (sub_ids,))
2546 res.extend(cr.dictfetchall())
2548 res = map(lambda x: {'id': x}, ids)
2550 for f in fields_pre:
2551 if f == self.CONCURRENCY_CHECK_FIELD:
2553 if self._columns[f].translate:
2554 ids = map(lambda x: x['id'], res)
2555 #TODO: optimize out of this loop
2556 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
2558 r[f] = res_trans.get(r['id'], False) or r[f]
2560 for table in self._inherits:
2561 col = self._inherits[table]
2562 cols = intersect(self._inherit_fields.keys(), fields_to_read)
2565 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
2573 if not record[col]:# if the record is deleted from _inherits table?
2575 record.update(res3[record[col]])
2576 if col not in fields_to_read:
2579 # all fields which need to be post-processed by a simple function (symbol_get)
2580 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
2583 for f in fields_post:
2584 r[f] = self._columns[f]._symbol_get(r[f])
2585 ids = map(lambda x: x['id'], res)
2587 # all non inherited fields for which the attribute whose name is in load is False
2588 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
2590 # Compute POST fields
2592 for f in fields_post:
2593 todo.setdefault(self._columns[f]._multi, [])
2594 todo[self._columns[f]._multi].append(f)
2595 for key,val in todo.items():
2597 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
2600 if isinstance(res2[record['id']], str):res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
2601 record[pos] = res2[record['id']][pos]
2604 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
2607 record[f] = res2[record['id']]
2611 #for f in fields_post:
2612 # # get the value of that field for all records/ids
2613 # res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
2614 # for record in res:
2615 # record[f] = res2[record['id']]
2619 for field in vals.copy():
2621 if field in self._columns:
2622 fobj = self._columns[field]
2629 for group in groups:
2630 module = group.split(".")[0]
2631 grp = group.split(".")[1]
2632 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
2633 (grp, module, 'res.groups', user))
2634 readonly = cr.fetchall()
2635 if readonly[0][0] >= 1:
2638 elif readonly[0][0] == 0:
2644 if type(vals[field]) == type([]):
2646 elif type(vals[field]) == type(0.0):
2648 elif type(vals[field]) == type(''):
2649 vals[field] = '=No Permission='
2654 def perm_read(self, cr, user, ids, context=None, details=True):
2660 if self._log_access:
2661 fields = ', u.create_uid, u.create_date, u.write_uid, u.write_date'
2662 if isinstance(ids, (int, long)):
2665 ids_str = string.join(map(lambda x: str(x), ids), ',')
2666 cr.execute('select u.id'+fields+' from "'+self._table+'" u where u.id in ('+ids_str+')')
2667 res = cr.dictfetchall()
2670 r[key] = r[key] or False
2671 if key in ('write_uid', 'create_uid', 'uid') and details:
2673 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
2674 if isinstance(ids, (int, long)):
2678 def _check_concurrency(self, cr, ids, context):
2681 if context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access:
2683 return "%s,%s" % (self._name, oid)
2684 santa = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)"
2685 for i in range(0, len(ids), cr.IN_MAX):
2686 sub_ids = tools.flatten(((oid, context[self.CONCURRENCY_CHECK_FIELD][key(oid)])
2687 for oid in ids[i:i+cr.IN_MAX]
2688 if key(oid) in context[self.CONCURRENCY_CHECK_FIELD]))
2690 cr.execute("SELECT count(1) FROM %s WHERE %s" % (self._table, " OR ".join([santa]*(len(sub_ids)/2))), sub_ids)
2693 raise except_orm('ConcurrencyException', _('Records were modified in the meanwhile'))
2695 def unlink(self, cr, uid, ids, context=None):
2698 if isinstance(ids, (int, long)):
2701 result_store = self._store_get_values(cr, uid, ids, None, context)
2703 self._check_concurrency(cr, ids, context)
2705 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
2707 properties = self.pool.get('ir.property')
2708 domain = [('res_id', '=', False),
2709 ('value', 'in', ['%s,%s' % (self._name, i) for i in ids]),
2711 if properties.search(cr, uid, domain, context=context):
2712 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
2714 wf_service = netsvc.LocalService("workflow")
2716 wf_service.trg_delete(uid, self._name, oid, cr)
2718 #cr.execute('select * from '+self._table+' where id in ('+str_d+')', ids)
2719 #res = cr.dictfetchall()
2720 #for key in self._inherits:
2721 # ids2 = [x[self._inherits[key]] for x in res]
2722 # self.pool.get(key).unlink(cr, uid, ids2)
2724 d1, d2,tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, context=context)
2726 d1 = ' AND '+' and '.join(d1)
2728 for i in range(0, len(ids), cr.IN_MAX):
2729 sub_ids = ids[i:i+cr.IN_MAX]
2730 str_d = string.join(('%s',)*len(sub_ids), ',')
2732 cr.execute('SELECT '+self._table+'.id FROM '+','.join(tables)+' ' \
2733 'WHERE '+self._table+'.id IN ('+str_d+')'+d1, sub_ids+d2)
2734 if not cr.rowcount == len(sub_ids):
2735 raise except_orm(_('AccessError'),
2736 _('You try to bypass an access rule (Document type: %s).') % \
2739 cr.execute('delete from '+self._table+' ' \
2740 'where id in ('+str_d+')', sub_ids)
2742 for order, object, store_ids, fields in result_store:
2743 if object<>self._name:
2744 obj = self.pool.get(object)
2745 cr.execute('select id from '+obj._table+' where id in ('+','.join(map(str, store_ids))+')')
2746 rids = map(lambda x: x[0], cr.fetchall())
2748 obj._store_set_values(cr, uid, rids, fields, context)
2754 def write(self, cr, user, ids, vals, context=None):
2756 for field in vals.copy():
2758 if field in self._columns:
2759 fobj = self._columns[field]
2761 fobj = self._inherit_fields[field][2]
2768 for group in groups:
2769 module = group.split(".")[0]
2770 grp = group.split(".")[1]
2771 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
2772 (grp, module, 'res.groups', user))
2773 readonly = cr.fetchall()
2774 if readonly[0][0] >= 1:
2777 elif readonly[0][0] == 0:
2789 if isinstance(ids, (int, long)):
2792 self._check_concurrency(cr, ids, context)
2794 self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
2802 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
2804 if field in self._columns:
2805 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
2806 if (not totranslate) or not self._columns[field].translate:
2807 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
2808 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
2809 direct.append(field)
2811 upd_todo.append(field)
2813 updend.append(field)
2814 if field in self._columns \
2815 and hasattr(self._columns[field], 'selection') \
2817 if self._columns[field]._type == 'reference':
2818 val = vals[field].split(',')[0]
2821 if isinstance(self._columns[field].selection, (tuple, list)):
2822 if val not in dict(self._columns[field].selection):
2823 raise except_orm(_('ValidateError'),
2824 _('The value "%s" for the field "%s" is not in the selection') \
2825 % (vals[field], field))
2827 if val not in dict(self._columns[field].selection(
2828 self, cr, user, context=context)):
2829 raise except_orm(_('ValidateError'),
2830 _('The value "%s" for the field "%s" is not in the selection') \
2831 % (vals[field], field))
2833 if self._log_access:
2834 upd0.append('write_uid=%s')
2835 upd0.append('write_date=now()')
2840 d1, d2,tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, context=context)
2842 d1 = ' and '+' and '.join(d1)
2844 for i in range(0, len(ids), cr.IN_MAX):
2845 sub_ids = ids[i:i+cr.IN_MAX]
2846 ids_str = string.join(map(str, sub_ids), ',')
2848 cr.execute('SELECT '+self._table+'.id FROM '+','.join(tables)+' ' \
2849 'WHERE '+self._table+'.id IN ('+ids_str+')'+d1, d2)
2850 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2851 raise except_orm(_('AccessError'),
2852 _('You try to bypass an access rule while writing (Document type: %s).') % \
2855 cr.execute('SELECT id FROM "'+self._table+'" WHERE id IN ('+ids_str+')')
2856 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2857 raise except_orm(_('AccessError'),
2858 _('You try to write on an record that doesn\'t exist ' \
2859 '(Document type: %s).') % self._description)
2860 cr.execute('update '+self._table+' set '+string.join(upd0, ',')+' ' \
2861 'where id in ('+ids_str+')', upd1)
2866 if self._columns[f].translate:
2867 src_trans = self.pool.get(self._name).read(cr,user,ids,[f])[0][f]
2870 # Inserting value to DB
2871 self.write(cr, user, ids, {f:vals[f]})
2872 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
2875 # call the 'set' method of fields which are not classic_write
2876 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
2878 # default element in context must be removed when call a one2many or many2many
2879 rel_context = context.copy()
2880 for c in context.items():
2881 if c[0].startswith('default_'):
2882 del rel_context[c[0]]
2885 for field in upd_todo:
2887 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
2889 for table in self._inherits:
2890 col = self._inherits[table]
2892 for i in range(0, len(ids), cr.IN_MAX):
2893 sub_ids = ids[i:i+cr.IN_MAX]
2894 ids_str = string.join(map(str, sub_ids), ',')
2895 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
2896 'where id in ('+ids_str+')', upd1)
2897 nids.extend([x[0] for x in cr.fetchall()])
2901 if self._inherit_fields[val][0] == table:
2903 self.pool.get(table).write(cr, user, nids, v, context)
2905 self._validate(cr, user, ids, context)
2906 # TODO: use _order to set dest at the right position and not first node of parent
2907 if self._parent_store and (self._parent_name in vals):
2909 self.pool._init_parent[self._name]=True
2912 # Find Position of the element
2913 if vals[self._parent_name]:
2914 cr.execute('select parent_left,parent_right,id from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (vals[self._parent_name],))
2916 cr.execute('select parent_left,parent_right,id from '+self._table+' where '+self._parent_name+' is null order by '+(self._parent_order or self._order))
2917 result_p = cr.fetchall()
2919 for (pleft,pright,pid) in result_p:
2924 # It's the first node of the parent: position = parent_left+1
2926 if not vals[self._parent_name]:
2929 cr.execute('select parent_left from '+self._table+' where id=%s', (vals[self._parent_name],))
2930 position = cr.fetchone()[0]+1
2932 # We have the new position !
2933 cr.execute('select parent_left,parent_right from '+self._table+' where id=%s', (id,))
2934 pleft,pright = cr.fetchone()
2935 distance = pright - pleft + 1
2937 if position>pleft and position<=pright:
2938 raise except_orm(_('UserError'), _('Recursivity Detected.'))
2941 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
2942 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
2943 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft,position-pleft, pleft, pright))
2945 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
2946 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
2947 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance,pleft-position+distance, pleft+distance, pright+distance))
2949 result += self._store_get_values(cr, user, ids, vals.keys(), context)
2950 for order, object, ids, fields in result:
2951 self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
2953 wf_service = netsvc.LocalService("workflow")
2955 wf_service.trg_write(user, self._name, id, cr)
2959 # TODO: Should set perm to user.xxx
2961 def create(self, cr, user, vals, context=None):
2962 """ create(cr, user, vals, context) -> int
2963 cr = database cursor
2965 vals = dictionary of the form {'field_name':field_value, ...}
2969 self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
2974 for (t, c) in self._inherits.items():
2976 avoid_table.append(t)
2977 for f in self._columns.keys(): # + self._inherit_fields.keys():
2981 for f in self._inherit_fields.keys():
2982 if (not f in vals) and (self._inherit_fields[f][0] not in avoid_table):
2986 default_values = self.default_get(cr, user, default, context)
2987 for dv in default_values:
2988 if dv in self._columns and self._columns[dv]._type == 'many2many':
2989 if default_values[dv] and isinstance(default_values[dv][0], (int, long)):
2990 default_values[dv] = [(6, 0, default_values[dv])]
2992 vals.update(default_values)
2995 for v in self._inherits:
2996 if self._inherits[v] not in vals:
2999 tocreate[v] = {self._inherits[v]:vals[self._inherits[v]]}
3000 (upd0, upd1, upd2) = ('', '', [])
3002 for v in vals.keys():
3003 if v in self._inherit_fields:
3004 (table, col, col_detail) = self._inherit_fields[v]
3005 tocreate[table][v] = vals[v]
3008 if (v not in self._inherit_fields) and (v not in self._columns):
3011 # Try-except added to filter the creation of those records whose filds are readonly.
3012 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
3014 cr.execute("SELECT nextval('"+self._sequence+"')")
3016 raise except_orm(_('UserError'),
3017 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
3019 id_new = cr.fetchone()[0]
3020 for table in tocreate:
3021 if self._inherits[table] in vals:
3022 del vals[self._inherits[table]]
3023 id = self.pool.get(table).create(cr, user, tocreate[table])
3024 upd0 += ','+self._inherits[table]
3028 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
3029 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
3031 for bool_field in bool_fields:
3032 if bool_field not in vals:
3033 vals[bool_field] = False
3035 for field in vals.copy():
3037 if field in self._columns:
3038 fobj = self._columns[field]
3040 fobj = self._inherit_fields[field][2]
3046 for group in groups:
3047 module = group.split(".")[0]
3048 grp = group.split(".")[1]
3049 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
3050 (grp, module, 'res.groups', user))
3051 readonly = cr.fetchall()
3052 if readonly[0][0] >= 1:
3055 elif readonly[0][0] == 0:
3063 if self._columns[field]._classic_write:
3064 upd0 = upd0 + ',"' + field + '"'
3065 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
3066 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
3068 upd_todo.append(field)
3069 if field in self._columns \
3070 and hasattr(self._columns[field], 'selection') \
3072 if self._columns[field]._type == 'reference':
3073 val = vals[field].split(',')[0]
3076 if isinstance(self._columns[field].selection, (tuple, list)):
3077 if val not in dict(self._columns[field].selection):
3078 raise except_orm(_('ValidateError'),
3079 _('The value "%s" for the field "%s" is not in the selection') \
3080 % (vals[field], field))
3082 if val not in dict(self._columns[field].selection(
3083 self, cr, user, context=context)):
3084 raise except_orm(_('ValidateError'),
3085 _('The value "%s" for the field "%s" is not in the selection') \
3086 % (vals[field], field))
3087 if self._log_access:
3088 upd0 += ',create_uid,create_date'
3091 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
3092 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
3094 if self._parent_store:
3096 self.pool._init_parent[self._name]=True
3098 parent = vals.get(self._parent_name, False)
3100 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
3102 result_p = cr.fetchall()
3103 for (pleft,) in result_p:
3108 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
3109 pleft_old = cr.fetchone()[0]
3112 cr.execute('select max(parent_right) from '+self._table)
3113 pleft = cr.fetchone()[0] or 0
3114 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
3115 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
3116 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1,pleft+2,id_new))
3118 # default element in context must be remove when call a one2many or many2many
3119 rel_context = context.copy()
3120 for c in context.items():
3121 if c[0].startswith('default_'):
3122 del rel_context[c[0]]
3125 for field in upd_todo:
3126 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
3127 self._validate(cr, user, [id_new], context)
3129 if not context.get('no_store_function', False):
3130 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
3133 for order, object, ids, fields2 in result:
3134 if not (object, ids, fields2) in done:
3135 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
3136 done.append((object, ids, fields2))
3138 wf_service = netsvc.LocalService("workflow")
3139 wf_service.trg_create(user, self._name, id_new, cr)
3142 def _store_get_values(self, cr, uid, ids, fields, context):
3144 fncts = self.pool._store_function.get(self._name, [])
3145 for fnct in range(len(fncts)):
3150 for f in (fields or []):
3151 if f in fncts[fnct][3]:
3157 result.setdefault(fncts[fnct][0], {})
3159 # uid == 1 for accessing objects having rules defined on store fields
3160 ids2 = fncts[fnct][2](self,cr, 1, ids, context)
3161 for id in filter(None, ids2):
3162 result[fncts[fnct][0]].setdefault(id, [])
3163 result[fncts[fnct][0]][id].append(fnct)
3165 for object in result:
3167 for id,fnct in result[object].items():
3168 k2.setdefault(tuple(fnct), [])
3169 k2[tuple(fnct)].append(id)
3170 for fnct,id in k2.items():
3171 dict.setdefault(fncts[fnct[0]][4],[])
3172 dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4],object,id,map(lambda x: fncts[x][1], fnct)))
3180 def _store_set_values(self, cr, uid, ids, fields, context):
3183 if self._log_access:
3184 cr.execute('select id,write_date from '+self._table+' where id in ('+','.join(map(str, ids))+')')
3188 field_dict.setdefault(r[0], [])
3189 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
3190 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
3191 for i in self.pool._store_function.get(self._name, []):
3193 up_write_date = write_date + datetime.timedelta(hours=i[5])
3194 if datetime.datetime.now() < up_write_date:
3196 field_dict[r[0]].append(i[1])
3202 if self._columns[f]._multi not in keys:
3203 keys.append(self._columns[f]._multi)
3204 todo.setdefault(self._columns[f]._multi, [])
3205 todo[self._columns[f]._multi].append(f)
3209 # uid == 1 for accessing objects having rules defined on store fields
3210 result = self._columns[val[0]].get(cr, self, ids, val, 1, context=context)
3211 for id,value in result.items():
3213 for f in value.keys():
3214 if f in field_dict[id]:
3221 if self._columns[v]._type in ('many2one', 'one2one'):
3223 value[v] = value[v][0]
3226 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
3227 upd1.append(self._columns[v]._symbol_set[1](value[v]))
3229 cr.execute('update "' + self._table + '" set ' + \
3230 string.join(upd0, ',') + ' where id = %s', upd1)
3234 # uid == 1 for accessing objects having rules defined on store fields
3235 result = self._columns[f].get(cr, self, ids, f, 1, context=context)
3236 for r in result.keys():
3238 if r in field_dict.keys():
3239 if f in field_dict[r]:
3241 for id,value in result.items():
3242 if self._columns[f]._type in ('many2one', 'one2one'):
3247 cr.execute('update "' + self._table + '" set ' + \
3248 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value),id))
3254 def perm_write(self, cr, user, ids, fields, context=None):
3255 raise _('This method does not exist anymore')
3257 # TODO: ameliorer avec NULL
3258 def _where_calc(self, cr, user, args, active_test=True, context=None):
3262 # if the object has a field named 'active', filter out all inactive
3263 # records unless they were explicitely asked for
3264 if 'active' in self._columns and (active_test and context.get('active_test', True)):
3266 active_in_args = False
3268 if a[0] == 'active':
3269 active_in_args = True
3270 if not active_in_args:
3271 args.insert(0, ('active', '=', 1))
3273 args = [('active', '=', 1)]
3277 e = expression.expression(args)
3278 e.parse(cr, user, self, context)
3279 tables = e.get_tables()
3280 qu1, qu2 = e.to_sql()
3281 qu1 = qu1 and [qu1] or []
3283 qu1, qu2, tables = [], [], ['"%s"' % self._table]
3285 return (qu1, qu2, tables)
3287 def _check_qorder(self, word):
3288 if not regex_order.match(word):
3289 raise except_orm(_('AccessError'), _('Bad query.'))
3292 def search(self, cr, user, args, offset=0, limit=None, order=None,
3293 context=None, count=False):
3296 # compute the where, order by, limit and offset clauses
3297 (qu1, qu2, tables) = self._where_calc(cr, user, args, context=context)
3298 dom = self.pool.get('ir.rule').domain_get(cr, user, self._name, context=context)
3306 qu1 = ' where '+string.join(qu1, ' and ')
3311 order_by = self._order
3313 self._check_qorder(order)
3314 o = order.split(' ')[0]
3315 if (o in self._columns) and getattr(self._columns[o], '_classic_write'):
3318 limit_str = limit and ' limit %d' % limit or ''
3319 offset_str = offset and ' offset %d' % offset or ''
3323 cr.execute('select count(%s.id) from ' % self._table +
3324 ','.join(tables) +qu1 + limit_str + offset_str, qu2)
3327 cr.execute('select %s.id from ' % self._table + ','.join(tables) +qu1+' order by '+order_by+limit_str+offset_str, qu2)
3329 return [x[0] for x in res]
3331 # returns the different values ever entered for one field
3332 # this is used, for example, in the client when the user hits enter on
3334 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
3337 if field in self._inherit_fields:
3338 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
3340 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
3342 def name_get(self, cr, user, ids, context=None):
3347 if isinstance(ids, (int, long)):
3349 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
3350 [self._rec_name], context, load='_classic_write')]
3352 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
3359 args += [(self._rec_name, operator, name)]
3360 ids = self.search(cr, user, args, limit=limit, context=context)
3361 res = self.name_get(cr, user, ids, context)
3364 def copy_data(self, cr, uid, id, default=None, context=None):
3369 if 'state' not in default:
3370 if 'state' in self._defaults:
3371 if callable(self._defaults['state']):
3372 default['state'] = self._defaults['state'](self, cr, uid, context)
3374 default['state'] = self._defaults['state']
3376 data = self.read(cr, uid, [id], context=context)[0]
3377 fields = self.fields_get(cr, uid, context=context)
3380 ftype = fields[f]['type']
3382 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
3386 data[f] = default[f]
3387 elif ftype == 'function':
3389 elif ftype == 'many2one':
3391 data[f] = data[f] and data[f][0]
3394 elif ftype in ('one2many', 'one2one'):
3396 rel = self.pool.get(fields[f]['relation'])
3397 if data[f] != False:
3398 for rel_id in data[f]:
3399 # the lines are first duplicated using the wrong (old)
3400 # parent but then are reassigned to the correct one thanks
3402 d,t = rel.copy_data(cr, uid, rel_id, context=context)
3403 res.append((0, 0, d))
3406 elif ftype == 'many2many':
3407 data[f] = [(6, 0, data[f])]
3409 trans_obj = self.pool.get('ir.translation')
3410 #TODO: optimize translations
3414 if f in self._columns and self._columns[f].translate:
3415 trans_name=self._name+","+f
3416 elif f in self._inherit_fields and self._inherit_fields[f][2].translate:
3417 trans_name=self._inherit_fields[f][0]+","+f
3422 trans_ids = trans_obj.search(cr, uid, [
3423 ('name', '=', trans_name),
3424 ('res_id','=',data['id'])
3427 trans_data.extend(trans_obj.read(cr,uid,trans_ids,context=context))
3431 for v in self._inherits:
3432 del data[self._inherits[v]]
3433 return data, trans_data
3435 def copy(self, cr, uid, id, default=None, context=None):
3436 trans_obj = self.pool.get('ir.translation')
3437 data, trans_data = self.copy_data(cr, uid, id, default, context)
3438 new_id = self.create(cr, uid, data, context)
3439 for record in trans_data:
3441 record['res_id'] = new_id
3442 trans_obj.create(cr, uid, record, context)
3445 def exists(self, cr, uid, id, context=None):
3446 cr.execute('SELECT count(1) FROM "%s" where id=%%s' % (self._table,), (id,))
3447 return bool(cr.fetchone()[0])
3449 def check_recursion(self, cr, uid, ids, parent=None):
3451 parent = self._parent_name
3453 while len(ids_parent):
3455 for i in range(0, len(ids), cr.IN_MAX):
3456 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
3457 cr.execute('SELECT distinct "'+parent+'"'+
3458 ' FROM "'+self._table+'" ' \
3459 'WHERE id = ANY(%s)',(sub_ids_parent,))
3460 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
3461 ids_parent = ids_parent2
3462 for i in ids_parent:
3467 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: