1 # -*- encoding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 ##############################################################################
24 # Object relationnal mapping to postgresql module
25 # . Hierarchical structure
26 # . Constraints consistency, validations
27 # . Object meta Data depends on its status
28 # . Optimised processing by complex query (multiple actions at once)
29 # . Default fields value
30 # . Permissions optimisation
31 # . Persistant object: DB postgresql
33 # . Multi-level caching system
34 # . 2 different inheritancies
36 # - classicals (varchar, integer, boolean, ...)
37 # - relations (one2many, many2one, many2many)
55 from xml import dom, xpath
57 sys.stderr.write("ERROR: Import xpath module\n")
58 sys.stderr.write("ERROR: Try to install the old python-xml package\n")
61 from tools.config import config
63 regex_order = re.compile('^([a-zA-Z0-9_]+( desc)?( asc)?,?)+$', re.I)
66 def intersect(la, lb):
67 return filter(lambda x: x in lb, la)
70 class except_orm(Exception):
71 def __init__(self, name, value):
74 self.args = (name, value)
77 # Readonly python database object browser
78 class browse_null(object):
83 def __getitem__(self, name):
86 def __getattr__(self, name):
87 return False # XXX: return self ?
95 def __nonzero__(self):
98 def __unicode__(self):
103 # TODO: execute an object method on browse_record_list
105 class browse_record_list(list):
107 def __init__(self, lst, context=None):
110 super(browse_record_list, self).__init__(lst)
111 self.context = context
114 class browse_record(object):
115 def __init__(self, cr, uid, id, table, cache, context=None, list_class = None, fields_process={}):
117 table : the object (inherited from orm)
118 context : a dictionnary with an optionnal context
122 assert id and isinstance(id, (int, long,)), _('Wrong ID for the browse record, got %r, expected an integer.') % (id,)
123 self._list_class = list_class or browse_record_list
128 self._table_name = self._table._name
129 self._context = context
130 self._fields_process = fields_process
132 cache.setdefault(table._name, {})
133 self._data = cache[table._name]
135 if id not in self._data:
136 self._data[id] = {'id': id}
140 def __getitem__(self, name):
143 if name not in self._data[self._id]:
144 # build the list of fields we will fetch
146 # fetch the definition of the field which was asked for
147 if name in self._table._columns:
148 col = self._table._columns[name]
149 elif name in self._table._inherit_fields:
150 col = self._table._inherit_fields[name][2]
151 elif hasattr(self._table, str(name)):
152 if isinstance(getattr(self._table, name), (types.MethodType, types.LambdaType, types.FunctionType)):
153 return lambda *args, **argv: getattr(self._table, name)(self._cr, self._uid, [self._id], *args, **argv)
155 return getattr(self._table, name)
157 logger = netsvc.Logger()
158 logger.notifyChannel('orm', netsvc.LOG_ERROR, "Programming error: field '%s' does not exist in object '%s' !" % (name, self._table._name))
161 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
162 if col._classic_write:
163 # gen the list of "local" (ie not inherited) fields which are classic or many2one
164 ffields = filter(lambda x: x[1]._classic_write, self._table._columns.items())
165 # gen the list of inherited fields
166 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
167 # complete the field list with the inherited fields which are classic or many2one
168 ffields += filter(lambda x: x[1]._classic_write, inherits)
169 # otherwise we fetch only that field
171 ffields = [(name, col)]
172 ids = filter(lambda id: name not in self._data[id], self._data.keys())
174 fffields = map(lambda x: x[0], ffields)
175 datas = self._table.read(self._cr, self._uid, ids, fffields, context=self._context, load="_classic_write")
176 if self._fields_process:
178 if f._type in self._fields_process:
180 d[n] = self._fields_process[f._type](d[n])
181 d[n].set_value(d[n], self, f)
185 # Where did those ids come from? Perhaps old entries in ir_model_data?
186 raise except_orm('NoDataError', 'Field %s in %s%s'%(name,self._table_name,str(ids)))
187 # create browse records for 'remote' objects
190 if f._type in ('many2one', 'one2one'):
192 obj = self._table.pool.get(f._obj)
194 if type(data[n]) in (type([]),type( (1,) )):
199 data[n] = browse_record(self._cr, self._uid, ids2, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
201 data[n] = browse_null()
203 data[n] = browse_null()
204 elif f._type in ('one2many', 'many2many') and len(data[n]):
205 data[n] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(f._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in data[n]], self._context)
206 self._data[data['id']].update(data)
207 if not name in self._data[self._id]:
208 #how did this happen?
209 logger = netsvc.Logger()
210 logger.notifyChannel("browse_record", netsvc.LOG_ERROR,"Ffields: %s, datas: %s"%(str(fffields),str(datas)))
211 logger.notifyChannel("browse_record", netsvc.LOG_ERROR,"Data: %s, Table: %s"%(str(self._data[self._id]),str(self._table)))
212 raise AttributeError(_('Unknown attribute %s in %s ') % (str(name),self._table_name))
213 return self._data[self._id][name]
215 def __getattr__(self, name):
216 # raise an AttributeError exception.
219 def __contains__(self, name):
220 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
222 def __hasattr__(self, name):
229 return "browse_record(%s, %d)" % (self._table_name, self._id)
231 def __eq__(self, other):
232 return (self._table_name, self._id) == (other._table_name, other._id)
234 def __ne__(self, other):
235 return (self._table_name, self._id) != (other._table_name, other._id)
237 # we need to define __unicode__ even though we've already defined __str__
238 # because we have overridden __getattr__
239 def __unicode__(self):
240 return unicode(str(self))
243 return hash((self._table_name, self._id))
251 (type returned by postgres when the column was created, type expression to create the column)
255 fields.boolean: 'bool',
256 fields.integer: 'int4',
257 fields.integer_big: 'int8',
261 fields.datetime: 'timestamp',
262 fields.binary: 'bytea',
263 fields.many2one: 'int4',
265 if type(f) in type_dict:
266 f_type = (type_dict[type(f)], type_dict[type(f)])
267 elif isinstance(f, fields.float):
269 f_type = ('numeric', 'NUMERIC(%d,%d)' % (f.digits[0], f.digits[1]))
271 f_type = ('float8', 'DOUBLE PRECISION')
272 elif isinstance(f, (fields.char, fields.reference)):
273 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
274 elif isinstance(f, fields.selection):
275 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
276 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
277 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
280 f_size = (hasattr(f, 'size') and f.size) or 16
283 f_type = ('int4', 'INTEGER')
285 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
286 elif isinstance(f, fields.function) and eval('fields.'+(f._type)) in type_dict:
287 t = eval('fields.'+(f._type))
288 f_type = (type_dict[t], type_dict[t])
289 elif isinstance(f, fields.function) and f._type == 'float':
290 f_type = ('float8', 'DOUBLE PRECISION')
291 elif isinstance(f, fields.function) and f._type == 'selection':
292 f_type = ('text', 'text')
294 logger = netsvc.Logger()
295 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
300 class orm_template(object):
306 _parent_name = 'parent_id'
307 _parent_store = False
316 def _field_create(self, cr, context={}):
317 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
319 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
320 model_id = cr.fetchone()[0]
321 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
323 model_id = cr.fetchone()[0]
324 if 'module' in context:
325 name_id = 'model_'+self._name.replace('.','_')
326 cr.execute('select * from ir_model_data where name=%s and res_id=%s', (name_id,model_id))
328 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
329 (name_id, context['module'], 'ir.model', model_id)
334 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
336 for rec in cr.dictfetchall():
337 cols[rec['name']] = rec
339 for (k, f) in self._columns.items():
341 'model_id': model_id,
344 'field_description': f.string.replace("'", " "),
346 'relation': f._obj or 'NULL',
347 'view_load': (f.view_load and 1) or 0,
348 'select_level': str(f.select or 0),
349 'readonly':(f.readonly and 1) or 0,
350 'required':(f.required and 1) or 0,
353 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
354 id = cr.fetchone()[0]
356 cr.execute("""INSERT INTO ir_model_fields (
357 id, model_id, model, name, field_description, ttype,
358 relation,view_load,state,select_level
360 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s
362 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
363 vals['relation'], bool(vals['view_load']), 'base',
366 if 'module' in context:
367 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
368 (('field_'+self._table+'_'+k)[:64], context['module'], 'ir.model.fields', id)
371 for key, val in vals.items():
372 if cols[k][key] != vals[key]:
373 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
375 cr.execute("""UPDATE ir_model_fields SET
376 model_id=%s, field_description=%s, ttype=%s, relation=%s,
377 view_load=%s, select_level=%s, readonly=%s ,required=%s
379 model=%s AND name=%s""", (
380 vals['model_id'], vals['field_description'], vals['ttype'],
381 vals['relation'], bool(vals['view_load']),
382 vals['select_level'], bool(vals['readonly']),bool(vals['required']), vals['model'], vals['name']
387 def _auto_init(self, cr, context={}):
388 self._field_create(cr, context)
390 def __init__(self, cr):
391 if not self._name and not hasattr(self, '_inherit'):
392 name = type(self).__name__.split('.')[0]
393 msg = "The class %s has to have a _name attribute" % name
395 logger = netsvc.Logger()
396 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg )
397 raise except_orm('ValueError', msg )
399 if not self._description:
400 self._description = self._name
402 self._table = self._name.replace('.', '_')
404 def browse(self, cr, uid, select, context=None, list_class=None, fields_process={}):
407 self._list_class = list_class or browse_record_list
409 # need to accepts ints and longs because ids coming from a method
410 # launched by button in the interface have a type long...
411 if isinstance(select, (int, long)):
412 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
413 elif isinstance(select, list):
414 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context)
418 def __export_row(self, cr, uid, row, fields, context=None):
420 data = map(lambda x: '', range(len(fields)))
422 for fpos in range(len(fields)):
431 if isinstance(r, (browse_record_list, list)):
433 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
439 lines2 = self.__export_row(cr, uid, row2, fields2,
442 for fpos2 in range(len(fields)):
443 if lines2 and lines2[0][fpos2]:
444 data[fpos2] = lines2[0][fpos2]
452 data[fpos] = str(r or '')
453 return [data] + lines
455 def export_data(self, cr, uid, ids, fields, context=None):
458 fields = map(lambda x: x.split('/'), fields)
460 for row in self.browse(cr, uid, ids, context):
461 datas += self.__export_row(cr, uid, row, fields, context)
464 def import_data(self, cr, uid, fields, datas, mode='init',
465 current_module=None, noupdate=False, context=None, filename=None):
468 fields = map(lambda x: x.split('/'), fields)
469 logger = netsvc.Logger()
471 def process_liness(self, datas, prefix, fields_def, position=0):
472 line = datas[position]
479 # Import normal fields
481 for i in range(len(fields)):
483 raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
488 if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':id'):
491 if fields_def[field[len(prefix)][:-3]]['type']=='many2many':
493 for word in line[i].split(','):
495 module, xml_id = word.rsplit('.', 1)
497 module, xml_id = current_module, word
498 ir_model_data_obj = self.pool.get('ir.model.data')
499 id = ir_model_data_obj._get_id(cr, uid, module,
501 res_id2 = ir_model_data_obj.read(cr, uid, [id],
502 ['res_id'])[0]['res_id']
504 res_id.append(res_id2)
506 res_id = [(6, 0, res_id)]
509 module, xml_id = line[i].rsplit('.', 1)
511 module, xml_id = current_module, line[i]
512 ir_model_data_obj = self.pool.get('ir.model.data')
513 id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
514 res_res_id = ir_model_data_obj.read(cr, uid, [id],
517 res_id = res_res_id[0]['res_id']
518 row[field[0][:-3]] = res_id or False
520 if (len(field) == len(prefix)+1) and \
521 len(field[len(prefix)].split(':lang=')) == 2:
522 f, lang = field[len(prefix)].split(':lang=')
523 translate.setdefault(lang, {})[f]=line[i] or False
525 if (len(field) == len(prefix)+1) and \
526 (prefix == field[0:len(prefix)]):
527 if fields_def[field[len(prefix)]]['type'] == 'integer':
528 res = line[i] and int(line[i])
529 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
530 res = line[i] and eval(line[i])
531 elif fields_def[field[len(prefix)]]['type'] == 'float':
532 res = line[i] and float(line[i])
533 elif fields_def[field[len(prefix)]]['type'] == 'selection':
535 if isinstance(fields_def[field[len(prefix)]]['selection'],
537 sel = fields_def[field[len(prefix)]]['selection']
539 sel = fields_def[field[len(prefix)]]['selection'](self,
542 if str(key) == line[i]:
544 if line[i] and not res:
545 logger.notifyChannel("import", netsvc.LOG_WARNING,
546 "key '%s' not found in selection field '%s'" % \
547 (line[i], field[len(prefix)]))
548 elif fields_def[field[len(prefix)]]['type']=='many2one':
551 relation = fields_def[field[len(prefix)]]['relation']
552 res2 = self.pool.get(relation).name_search(cr, uid,
553 line[i], [], operator='=')
554 res = (res2 and res2[0][0]) or False
556 warning += ('Relation not found: ' + line[i] + \
557 ' on ' + relation + ' !\n')
558 logger.notifyChannel("import", netsvc.LOG_WARNING,
559 'Relation not found: ' + line[i] + \
560 ' on ' + relation + ' !\n')
561 elif fields_def[field[len(prefix)]]['type']=='many2many':
564 relation = fields_def[field[len(prefix)]]['relation']
565 for word in line[i].split(','):
566 res2 = self.pool.get(relation).name_search(cr,
567 uid, word, [], operator='=')
568 res3 = (res2 and res2[0][0]) or False
570 warning += ('Relation not found: ' + \
571 line[i] + ' on '+relation + ' !\n')
572 logger.notifyChannel("import",
574 'Relation not found: ' + line[i] + \
575 ' on '+relation + ' !\n')
581 res = line[i] or False
582 row[field[len(prefix)]] = res
583 elif (prefix==field[0:len(prefix)]):
584 if field[0] not in todo:
585 todo.append(field[len(prefix)])
587 # Import one2many fields
591 newfd = self.pool.get(fields_def[field]['relation']).fields_get(
592 cr, uid, context=context)
593 res = process_liness(self, datas, prefix + [field], newfd, position)
594 (newrow, max2, w2, translate2, data_id2) = res
595 nbrmax = max(nbrmax, max2)
596 warning = warning + w2
597 reduce(lambda x, y: x and y, newrow)
598 row[field] = (reduce(lambda x, y: x or y, newrow.values()) and \
599 [(0, 0, newrow)]) or []
601 while (position+i)<len(datas):
603 for j in range(len(fields)):
605 if (len(field2) <= (len(prefix)+1)) and datas[position+i][j]:
610 (newrow, max2, w2, translate2, data_id2) = process_liness(
611 self, datas, prefix+[field], newfd, position+i)
613 if reduce(lambda x, y: x or y, newrow.values()):
614 row[field].append((0, 0, newrow))
616 nbrmax = max(nbrmax, i)
619 for i in range(max(nbrmax, 1)):
622 result = (row, nbrmax, warning, translate, data_id)
625 fields_def = self.fields_get(cr, uid, context=context)
628 initial_size = len(datas)
629 if config.get('import_partial', False) and filename:
630 data = pickle.load(file(config.get('import_partial')))
631 original_value = data.get(filename, 0)
637 (res, other, warning, translate, data_id) = \
638 process_liness(self, datas, [], fields_def)
641 return (-1, res, warning, '')
642 id = self.pool.get('ir.model.data')._update(cr, uid, self._name,
643 current_module, res, xml_id=data_id, mode=mode,
645 for lang in translate:
646 context2 = context.copy()
647 context2['lang'] = lang
648 self.write(cr, uid, [id], translate[lang], context2)
649 if config.get('import_partial', False) and filename and (not (counter%100)) :
650 data = pickle.load(file(config.get('import_partial')))
651 data[filename] = initial_size - len(datas) + original_value
652 pickle.dump(data, file(config.get('import_partial'),'wb'))
655 #except Exception, e:
656 # logger.notifyChannel("import", netsvc.LOG_ERROR, e)
659 # return (-1, res, e[0], warning)
661 # return (-1, res, e[0], '')
664 # TODO: Send a request with the result and multi-thread !
666 return (done, 0, 0, 0)
668 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
669 raise _('The read method is not implemented on this object !')
671 def get_invalid_fields(self,cr,uid):
672 return list(self._invalids)
674 def _validate(self, cr, uid, ids, context=None):
675 context = context or {}
676 lng = context.get('lang', False) or 'en_US'
677 trans = self.pool.get('ir.translation')
679 for constraint in self._constraints:
680 fun, msg, fields = constraint
681 if not fun(self, cr, uid, ids):
682 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
684 _("Error occured while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
686 self._invalids.update(fields)
689 raise except_orm('ValidateError', '\n'.join(error_msgs))
691 self._invalids.clear()
693 def default_get(self, cr, uid, fields_list, context=None):
696 def perm_read(self, cr, user, ids, context=None, details=True):
697 raise _('The perm_read method is not implemented on this object !')
699 def unlink(self, cr, uid, ids, context=None):
700 raise _('The unlink method is not implemented on this object !')
702 def write(self, cr, user, ids, vals, context=None):
703 raise _('The write method is not implemented on this object !')
705 def create(self, cr, user, vals, context=None):
706 raise _('The create method is not implemented on this object !')
708 # returns the definition of each field in the object
709 # the optional fields parameter can limit the result to some fields
710 def fields_get_keys(self, cr, user, context=None, read_access=True):
713 res = self._columns.keys()
714 for parent in self._inherits:
715 res.extend(self.pool.get(parent).fields_get_keys(cr, user, fields, context))
718 def fields_get(self, cr, user, fields=None, context=None, read_access=True):
722 translation_obj = self.pool.get('ir.translation')
723 model_access_obj = self.pool.get('ir.model.access')
724 for parent in self._inherits:
725 res.update(self.pool.get(parent).fields_get(cr, user, fields, context))
726 for f in self._columns.keys():
727 if fields and f not in fields:
729 res[f] = {'type': self._columns[f]._type}
730 for arg in ('string', 'readonly', 'states', 'size', 'required',
731 'change_default', 'translate', 'help', 'select'):
732 if getattr(self._columns[f], arg):
733 res[f][arg] = getattr(self._columns[f], arg)
735 res[f]['readonly'] = True
736 res[f]['states'] = {}
737 for arg in ('digits', 'invisible','filters'):
738 if hasattr(self._columns[f], arg) \
739 and getattr(self._columns[f], arg):
740 res[f][arg] = getattr(self._columns[f], arg)
742 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
744 res[f]['string'] = res_trans
745 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
747 res[f]['help'] = help_trans
749 if hasattr(self._columns[f], 'selection'):
750 if isinstance(self._columns[f].selection, (tuple, list)):
751 sel = self._columns[f].selection
752 # translate each selection option
754 for (key, val) in sel:
757 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context.get('lang', False) or 'en_US', val)
758 sel2.append((key, val2 or val))
760 res[f]['selection'] = sel
762 # call the 'dynamic selection' function
763 res[f]['selection'] = self._columns[f].selection(self, cr,
765 if res[f]['type'] in ('one2many', 'many2many',
766 'many2one', 'one2one'):
767 res[f]['relation'] = self._columns[f]._obj
768 res[f]['domain'] = self._columns[f]._domain
769 res[f]['context'] = self._columns[f]._context
772 # filter out fields which aren't in the fields list
779 # Overload this method if you need a window title which depends on the context
781 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
784 def __view_look_dom(self, cr, user, node, context=None):
791 if node.nodeType == node.ELEMENT_NODE and node.localName == 'field':
792 if node.hasAttribute('name'):
795 if node.getAttribute('name') in self._columns:
796 relation = self._columns[node.getAttribute('name')]._obj
798 relation = self._inherit_fields[node.getAttribute('name')][2]._obj
805 for f in node.childNodes:
806 if f.nodeType == f.ELEMENT_NODE and f.localName in ('form', 'tree', 'graph'):
809 ctx['base_model_name'] = self._name
810 xarch, xfields = self.pool.get(relation).__view_look_dom_arch(cr, user, f, ctx)
811 views[str(f.localName)] = {
815 attrs = {'views': views}
816 if node.hasAttribute('widget') and node.getAttribute('widget')=='selection':
817 # We can not use the domain has it is defined according to the record !
818 attrs['selection'] = self.pool.get(relation).name_search(cr, user, '', context=context)
819 if not attrs.get('required',False):
820 attrs['selection'].append((False,''))
821 fields[node.getAttribute('name')] = attrs
823 elif node.nodeType==node.ELEMENT_NODE and node.localName in ('form', 'tree'):
824 result = self.view_header_get(cr, user, False, node.localName, context)
826 node.setAttribute('string', result.decode('utf-8'))
828 elif node.nodeType==node.ELEMENT_NODE and node.localName == 'calendar':
829 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
830 if node.hasAttribute(additional_field) and node.getAttribute(additional_field):
831 fields[node.getAttribute(additional_field)] = {}
833 if node.nodeType == node.ELEMENT_NODE and node.hasAttribute('groups'):
834 if node.getAttribute('groups'):
835 groups = node.getAttribute('groups').split(',')
837 access_pool = self.pool.get('ir.model.access')
839 readonly = readonly or access_pool.check_groups(cr, user, group)
841 node.setAttribute('invisible', '1')
842 node.removeAttribute('groups')
844 if node.nodeType == node.ELEMENT_NODE:
846 if ('lang' in context) and not result:
847 if node.hasAttribute('string') and node.getAttribute('string'):
848 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.getAttribute('string').encode('utf8'))
849 if not trans and ('base_model_name' in context):
850 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.getAttribute('string').encode('utf8'))
852 node.setAttribute('string', trans)
853 if node.hasAttribute('sum') and node.getAttribute('sum'):
854 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.getAttribute('sum').encode('utf8'))
856 node.setAttribute('sum', trans)
859 for f in node.childNodes:
860 fields.update(self.__view_look_dom(cr, user, f, context))
862 if ('state' not in fields) and (('state' in self._columns) or ('state' in self._inherit_fields)):
867 def __view_look_dom_arch(self, cr, user, node, context=None):
868 fields_def = self.__view_look_dom(cr, user, node, context=context)
870 buttons = xpath.Evaluate('//button', node)
872 for button in buttons:
873 if button.getAttribute('type') == 'object':
878 if user != 1: # admin user has all roles
879 serv = netsvc.LocalService('object_proxy')
880 user_roles = serv.execute_cr(cr, user, 'res.users', 'read', [user], ['roles_id'])[0]['roles_id']
881 cr.execute("select role_id from wkf_transition where signal='%s'" % button.getAttribute('name'))
882 roles = cr.fetchall()
885 ok = ok and serv.execute_cr(cr, user, 'res.roles', 'check', user_roles, role[0])
888 button.setAttribute('readonly', '1')
890 button.setAttribute('readonly', '0')
892 arch = node.toxml(encoding="utf-8").replace('\t', '')
893 fields = self.fields_get(cr, user, fields_def.keys(), context)
895 fields[field].update(fields_def[field])
898 def __get_default_calendar_view(self):
899 """Generate a default calendar view (For internal use only).
902 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
903 '<calendar string="%s" date_start="%s"') % (self._description, self._date_name)
905 if 'user_id' in self._columns:
906 arch += ' color="user_id"'
908 elif 'partner_id' in self._columns:
909 arch += ' color="partner_id"'
911 if 'date_stop' in self._columns:
912 arch += ' date_stop="date_stop"'
914 elif 'date_end' in self._columns:
915 arch += ' date_stop="date_end"'
917 elif 'date_delay' in self._columns:
918 arch += ' date_delay="date_delay"'
920 elif 'planned_hours' in self._columns:
921 arch += ' date_delay="planned_hours"'
924 ' <field name="%s"/>\n'
925 '</calendar>') % (self._rec_name)
930 # if view_id, view_type is not required
932 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False):
937 if isinstance(s, unicode):
938 return s.encode('utf8')
941 def _inherit_apply(src, inherit):
942 def _find(node, node2):
943 if node2.nodeType == node2.ELEMENT_NODE and node2.localName == 'xpath':
944 res = xpath.Evaluate(node2.getAttribute('expr'), node)
945 return res and res[0]
947 if node.nodeType == node.ELEMENT_NODE and node.localName == node2.localName:
949 for attr in node2.attributes.keys():
950 if attr == 'position':
952 if node.hasAttribute(attr):
953 if node.getAttribute(attr)==node2.getAttribute(attr):
958 for child in node.childNodes:
959 res = _find(child, node2)
965 doc_src = dom.minidom.parseString(encode(src))
966 doc_dest = dom.minidom.parseString(encode(inherit))
967 toparse = doc_dest.childNodes
969 node2 = toparse.pop(0)
970 if not node2.nodeType == node2.ELEMENT_NODE:
972 if node2.localName == 'data':
973 toparse += node2.childNodes
975 node = _find(doc_src, node2)
978 if node2.hasAttribute('position'):
979 pos = node2.getAttribute('position')
981 parent = node.parentNode
982 for child in node2.childNodes:
983 if child.nodeType == child.ELEMENT_NODE:
984 parent.insertBefore(child, node)
985 parent.removeChild(node)
987 sib = node.nextSibling
988 for child in node2.childNodes:
989 if child.nodeType == child.ELEMENT_NODE:
991 node.appendChild(child)
993 node.parentNode.insertBefore(child, sib)
995 node.parentNode.insertBefore(child, node)
997 raise AttributeError(_('Unknown position in inherited view %s !') % pos)
1000 ' %s="%s"' % (attr, node2.getAttribute(attr))
1001 for attr in node2.attributes.keys()
1002 if attr != 'position'
1004 tag = "<%s%s>" % (node2.localName, attrs)
1005 raise AttributeError(_("Couldn't find tag '%s' in parent view !\n%s") % (tag,src))
1006 return doc_src.toxml(encoding="utf-8").replace('\t', '')
1008 result = {'type': view_type, 'model': self._name}
1015 where = (model and (" and model='%s'" % (self._name,))) or ''
1016 cr.execute('SELECT arch,name,field_parent,id,type,inherit_id FROM ir_ui_view WHERE id=%s'+where, (view_id,))
1018 cr.execute('''SELECT
1019 arch,name,field_parent,id,type,inherit_id
1026 ORDER BY priority''', (self._name, view_type))
1027 sql_res = cr.fetchone()
1031 view_id = ok or sql_res[3]
1034 # if a view was found
1036 result['type'] = sql_res[4]
1037 result['view_id'] = sql_res[3]
1038 result['arch'] = sql_res[0]
1040 def _inherit_apply_rec(result, inherit_id):
1041 # get all views which inherit from (ie modify) this view
1042 cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
1043 sql_inherit = cr.fetchall()
1044 for (inherit, id) in sql_inherit:
1045 result = _inherit_apply(result, inherit)
1046 result = _inherit_apply_rec(result, id)
1049 result['arch'] = _inherit_apply_rec(result['arch'], sql_res[3])
1051 result['name'] = sql_res[1]
1052 result['field_parent'] = sql_res[2] or False
1054 # otherwise, build some kind of default view
1055 if view_type == 'form':
1056 res = self.fields_get(cr, user, context=context)
1057 xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
1058 '''<form string="%s">''' % (self._description,)
1060 if res[x]['type'] not in ('one2many', 'many2many'):
1061 xml += '<field name="%s"/>' % (x,)
1062 if res[x]['type'] == 'text':
1065 elif view_type == 'tree':
1066 _rec_name = self._rec_name
1067 if _rec_name not in self._columns:
1068 _rec_name = self._columns.keys()[0]
1069 xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
1070 '''<tree string="%s"><field name="%s"/></tree>''' \
1071 % (self._description, self._rec_name)
1072 elif view_type == 'calendar':
1073 xml = self.__get_default_calendar_view()
1076 result['arch'] = xml
1077 result['name'] = 'default'
1078 result['field_parent'] = False
1079 result['view_id'] = 0
1081 doc = dom.minidom.parseString(encode(result['arch']))
1082 xarch, xfields = self.__view_look_dom_arch(cr, user, doc, context=context)
1083 result['arch'] = xarch
1084 result['fields'] = xfields
1088 for key in ('report_sxw_content', 'report_rml_content',
1089 'report_sxw', 'report_rml',
1090 'report_sxw_content_data', 'report_rml_content_data'):
1094 ir_values_obj = self.pool.get('ir.values')
1095 resprint = ir_values_obj.get(cr, user, 'action',
1096 'client_print_multi', [(self._name, False)], False,
1098 resaction = ir_values_obj.get(cr, user, 'action',
1099 'client_action_multi', [(self._name, False)], False,
1102 resrelate = ir_values_obj.get(cr, user, 'action',
1103 'client_action_relate', [(self._name, False)], False,
1105 resprint = map(clean, resprint)
1106 resaction = map(clean, resaction)
1107 resaction = filter(lambda x: not x.get('multi', False), resaction)
1108 resprint = filter(lambda x: not x.get('multi', False), resprint)
1109 resrelate = map(lambda x: x[2], resrelate)
1111 for x in resprint+resaction+resrelate:
1112 x['string'] = x['name']
1114 result['toolbar'] = {
1116 'action': resaction,
1121 _view_look_dom_arch = __view_look_dom_arch
1123 def search_count(self, cr, user, args, context=None):
1126 res = self.search(cr, user, args, context=context, count=True)
1127 if isinstance(res, list):
1131 def search(self, cr, user, args, offset=0, limit=None, order=None,
1132 context=None, count=False):
1133 raise _('The search method is not implemented on this object !')
1135 def name_get(self, cr, user, ids, context=None):
1136 raise _('The name_get method is not implemented on this object !')
1138 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=None):
1139 raise _('The name_search method is not implemented on this object !')
1141 def copy(self, cr, uid, id, default=None, context=None):
1142 raise _('The copy method is not implemented on this object !')
1144 def read_string(self, cr, uid, id, langs, fields=None, context=None):
1149 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
1151 fields = self._columns.keys() + self._inherit_fields.keys()
1153 res[lang] = {'code': lang}
1155 if f in self._columns:
1156 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
1158 res[lang][f] = res_trans
1160 res[lang][f] = self._columns[f].string
1161 for table in self._inherits:
1162 cols = intersect(self._inherit_fields.keys(), fields)
1163 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
1166 res[lang] = {'code': lang}
1167 for f in res2[lang]:
1168 res[lang][f] = res2[lang][f]
1171 def write_string(self, cr, uid, id, langs, vals, context=None):
1174 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
1177 if field in self._columns:
1178 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field])
1179 for table in self._inherits:
1180 cols = intersect(self._inherit_fields.keys(), vals)
1182 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
1186 class orm_memory(orm_template):
1187 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count']
1188 _inherit_fields = {}
1193 def __init__(self, cr):
1194 super(orm_memory, self).__init__(cr)
1198 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
1200 def vaccum(self, cr, uid):
1202 if self.check_id % self._check_time:
1205 max = time.time() - self._max_hours * 60 * 60
1206 for id in self.datas:
1207 if self.datas[id]['internal.date_access'] < max:
1209 self.unlink(cr, uid, tounlink)
1210 if len(self.datas)>self._max_count:
1211 sorted = map(lambda x: (x[1]['internal.date_access'], x[0]), self.datas.items())
1213 ids = map(lambda x: x[1], sorted[:len(self.datas)-self._max_count])
1214 self.unlink(cr, uid, ids)
1217 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
1220 if not fields_to_read:
1221 fields_to_read = self._columns.keys()
1224 if isinstance(ids, (int, long)):
1228 for f in fields_to_read:
1229 if id in self.datas:
1230 r[f] = self.datas[id].get(f, False)
1231 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
1234 if id in self.datas:
1235 self.datas[id]['internal.date_access'] = time.time()
1236 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
1237 for f in fields_post:
1238 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
1239 for record in result:
1240 record[f] = res2[record['id']]
1241 if isinstance(ids, (int, long)):
1245 def write(self, cr, user, ids, vals, context=None):
1249 if self._columns[field]._classic_write:
1250 vals2[field] = vals[field]
1252 upd_todo.append(field)
1254 self.datas[id_new].update(vals2)
1255 self.datas[id_new]['internal.date_access'] = time.time()
1256 for field in upd_todo:
1257 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
1258 self._validate(cr, user, [id_new], context)
1259 wf_service = netsvc.LocalService("workflow")
1260 wf_service.trg_write(user, self._name, id_new, cr)
1261 self.vaccum(cr, user)
1264 def create(self, cr, user, vals, context=None):
1266 id_new = self.next_id
1268 for f in self._columns.keys():
1272 vals.update(self.default_get(cr, user, default, context))
1276 if self._columns[field]._classic_write:
1277 vals2[field] = vals[field]
1279 upd_todo.append(field)
1280 self.datas[id_new] = vals2
1281 self.datas[id_new]['internal.date_access'] = time.time()
1283 for field in upd_todo:
1284 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
1285 self._validate(cr, user, [id_new], context)
1286 wf_service = netsvc.LocalService("workflow")
1287 wf_service.trg_create(user, self._name, id_new, cr)
1288 self.vaccum(cr, user)
1291 def default_get(self, cr, uid, fields_list, context=None):
1295 # get the default values for the inherited fields
1296 for f in fields_list:
1297 if f in self._defaults:
1298 value[f] = self._defaults[f](self, cr, uid, context)
1299 fld_def = ((f in self._columns) and self._columns[f]) \
1300 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1303 # get the default values set by the user and override the default
1304 # values defined in the object
1305 ir_values_obj = self.pool.get('ir.values')
1306 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1307 for id, field, field_value in res:
1308 if field in fields_list:
1309 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1310 if fld_def._type in ('many2one', 'one2one'):
1311 obj = self.pool.get(fld_def._obj)
1312 if not obj.search(cr, uid, [('id', '=', field_value)]):
1314 if fld_def._type in ('many2many'):
1315 obj = self.pool.get(fld_def._obj)
1317 for i in range(len(field_value)):
1318 if not obj.search(cr, uid, [('id', '=',
1321 field_value2.append(field_value[i])
1322 field_value = field_value2
1323 if fld_def._type in ('one2many'):
1324 obj = self.pool.get(fld_def._obj)
1326 for i in range(len(field_value)):
1327 field_value2.append({})
1328 for field2 in field_value[i]:
1329 if obj._columns[field2]._type in ('many2one', 'one2one'):
1330 obj2 = self.pool.get(obj._columns[field2]._obj)
1331 if not obj2.search(cr, uid,
1332 [('id', '=', field_value[i][field2])]):
1334 # TODO add test for many2many and one2many
1335 field_value2[i][field2] = field_value[i][field2]
1336 field_value = field_value2
1337 value[field] = field_value
1339 # get the default values from the context
1340 for key in context or {}:
1341 if key.startswith('default_'):
1342 value[key[8:]] = context[key]
1345 def search(self, cr, user, args, offset=0, limit=None, order=None,
1346 context=None, count=False):
1347 return self.datas.keys()
1349 def unlink(self, cr, uid, ids, context=None):
1351 if id in self.datas:
1354 cr.execute('delete from wkf_instance where res_type=%s and res_id in ('+','.join(map(str, ids))+')', (self._name, ))
1357 def perm_read(self, cr, user, ids, context=None, details=True):
1361 'create_uid': (user, 'Root'),
1362 'create_date': time.strftime('%Y-%m-%d %H:%M:%S'),
1364 'write_date': False,
1369 class orm(orm_template):
1370 _sql_constraints = []
1373 _protected = ['read','write','create','default_get','perm_read','unlink','fields_get','fields_view_get','search','name_get','distinct_field_get','name_search','copy','import_data','search_count']
1375 def _parent_store_compute(self, cr):
1376 logger = netsvc.Logger()
1377 logger.notifyChannel('init', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
1378 def browse_rec(root, pos=0):
1380 where = self._parent_name+'='+str(root)
1382 where = self._parent_name+' IS NULL'
1383 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
1385 childs = cr.fetchall()
1387 pos2 = browse_rec(id[0], pos2)
1388 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos,pos2,root))
1393 def _update_store(self, cr, f, k):
1394 logger = netsvc.Logger()
1395 logger.notifyChannel('init', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,))
1396 ss = self._columns[k]._symbol_set
1397 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
1398 cr.execute('select id from '+self._table)
1399 ids_lst = map(lambda x: x[0], cr.fetchall())
1402 ids_lst = ids_lst[40:]
1403 res = f.get(cr, self, iids, k, 1, {})
1404 for key,val in res.items():
1407 # if val is a many2one, just write the ID
1408 if type(val)==tuple:
1410 if (val<>False) or (type(val)<>bool):
1411 cr.execute(update_query, (ss[1](val), key))
1413 def _auto_init(self, cr, context={}):
1414 store_compute = False
1415 logger = netsvc.Logger()
1418 self._field_create(cr, context=context)
1419 if not hasattr(self, "_auto") or self._auto:
1420 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname='%s'" % self._table)
1422 cr.execute("CREATE TABLE \"%s\" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITH OIDS" % self._table)
1425 if self._parent_store:
1426 cr.execute("""SELECT c.relname
1427 FROM pg_class c, pg_attribute a
1428 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
1429 """, (self._table, 'parent_left'))
1431 if 'parent_left' not in self._columns:
1432 logger.notifyChannel('init', netsvc.LOG_ERROR, 'create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)' % (self._table, ))
1433 if 'parent_right' not in self._columns:
1434 logger.notifyChannel('init', netsvc.LOG_ERROR, 'create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)' % (self._table, ))
1435 if self._columns[self._parent_name].ondelete<>'cascade':
1436 logger.notifyChannel('init', netsvc.LOG_ERROR, "the columns %s on object must be set as ondelete='cascasde'" % (self._name, self._parent_name))
1437 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
1438 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
1440 store_compute = True
1442 if self._log_access:
1444 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
1445 'create_date': 'TIMESTAMP',
1446 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
1447 'write_date': 'TIMESTAMP'
1452 FROM pg_class c, pg_attribute a
1453 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
1454 """, (self._table, k))
1456 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
1459 # iterate on the database columns to drop the NOT NULL constraints
1460 # of fields which were required but have been removed
1462 "SELECT a.attname, a.attnotnull "\
1463 "FROM pg_class c, pg_attribute a "\
1464 "WHERE c.oid=a.attrelid AND c.relname=%s", (self._table,))
1465 db_columns = cr.dictfetchall()
1466 for column in db_columns:
1467 if column['attname'] not in ('id', 'oid', 'tableoid', 'ctid', 'xmin', 'xmax', 'cmin', 'cmax'):
1468 if column['attnotnull'] and column['attname'] not in self._columns:
1469 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
1471 # iterate on the "object columns"
1472 todo_update_store = []
1473 for k in self._columns:
1474 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
1476 #raise _('Can not define a column %s. Reserved keyword !') % (k,)
1477 f = self._columns[k]
1479 if isinstance(f, fields.one2many):
1480 cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,))
1482 cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id))
1483 res = cr.fetchone()[0]
1485 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY (%s) REFERENCES "%s" ON DELETE SET NULL' % (self._obj, f._fields_id, f._table))
1486 elif isinstance(f, fields.many2many):
1487 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (f._rel,))
1488 if not cr.dictfetchall():
1489 #FIXME: Remove this try/except
1491 ref = self.pool.get(f._obj)._table
1492 except AttributeError:
1493 ref = f._obj.replace('.', '_')
1494 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE) WITH OIDS' % (f._rel, f._id1, self._table, f._id2, ref))
1495 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
1496 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
1499 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size " \
1500 "FROM pg_class c,pg_attribute a,pg_type t " \
1501 "WHERE c.relname=%s " \
1502 "AND a.attname=%s " \
1503 "AND c.oid=a.attrelid " \
1504 "AND a.atttypid=t.oid", (self._table, k))
1505 res = cr.dictfetchall()
1507 if not isinstance(f, fields.function) or f.store:
1509 # add the missing field
1510 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
1513 if not create and k in self._defaults:
1514 default = self._defaults[k](self, cr, 1, {})
1515 ss = self._columns[k]._symbol_set
1516 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
1517 cr.execute(query, (ss[1](default),))
1519 logger.notifyChannel('init', netsvc.LOG_DEBUG, 'setting default value of new column %s of table %s'% (k, self._table))
1521 logger.notifyChannel('init', netsvc.LOG_DEBUG, 'creating new column %s of table %s'% (k, self._table))
1523 if isinstance(f, fields.function):
1525 if f.store is not True:
1526 order = f.store[f.store.keys()[0]][2]
1527 todo_update_store.append((order, f,k))
1529 # and add constraints if needed
1530 if isinstance(f, fields.many2one):
1531 #FIXME: Remove this try/except
1533 ref = self.pool.get(f._obj)._table
1534 except AttributeError:
1535 ref = f._obj.replace('.', '_')
1536 # ir_actions is inherited so foreign key doesn't work on it
1537 if ref != 'ir_actions':
1538 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete))
1540 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
1544 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k))
1545 except Exception, e:
1546 logger.notifyChannel('init', netsvc.LOG_WARNING, 'WARNING: unable to set column %s of table %s not null !\nTry to re-run: openerp-server.py --update=module\nIf it doesn\'t work, update records and execute manually:\nALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % (k, self._table, self._table, k))
1550 f_pg_type = f_pg_def['typname']
1551 f_pg_size = f_pg_def['size']
1552 f_pg_notnull = f_pg_def['attnotnull']
1553 if isinstance(f, fields.function) and not f.store:
1554 logger.notifyChannel('init', netsvc.LOG_INFO, 'column %s (%s) in table %s removed: converted to a function !\n' % (k, f.string, self._table))
1555 cr.execute('ALTER TABLE %s DROP COLUMN %s'% (self._table, k))
1559 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
1564 ('text', 'char', 'VARCHAR(%d)' % (f.size or 0,), '::VARCHAR(%d)'%(f.size or 0,)),
1565 ('varchar', 'text', 'TEXT', ''),
1566 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
1567 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
1569 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size != f.size:
1570 logger.notifyChannel('init', netsvc.LOG_INFO, "column '%s' in table '%s' changed size" % (k, self._table))
1571 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
1572 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
1573 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size))
1574 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size' % (self._table,))
1577 if (f_pg_type==c[0]) and (f._type==c[1]):
1578 logger.notifyChannel('init', netsvc.LOG_INFO, "column '%s' in table '%s' changed type to %s." % (k, self._table, c[1]))
1580 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
1581 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
1582 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
1583 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
1586 if f_pg_type != f_obj_type:
1588 logger.notifyChannel('init', netsvc.LOG_WARNING, "column '%s' in table '%s' has changed type (DB = %s, def = %s) but unable to migrate this change !" % (k, self._table, f_pg_type, f._type))
1590 # if the field is required and hasn't got a NOT NULL constraint
1591 if f.required and f_pg_notnull == 0:
1592 # set the field to the default value if any
1593 if k in self._defaults:
1594 default = self._defaults[k](self, cr, 1, {})
1595 if (default is not None):
1596 ss = self._columns[k]._symbol_set
1597 query = 'UPDATE "%s" SET "%s"=%s WHERE %s is NULL' % (self._table, k, ss[0], k)
1598 cr.execute(query, (ss[1](default),))
1599 # add the NOT NULL constraint
1602 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k))
1604 except Exception, e:
1605 logger.notifyChannel('init', netsvc.LOG_WARNING, 'unable to set a NOT NULL constraint on column %s of the %s table !\nIf you want to have it, you should update the records and execute manually:\nALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % (k, self._table, self._table, k))
1607 elif not f.required and f_pg_notnull == 1:
1608 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
1610 indexname = '%s_%s_index' % (self._table, k)
1611 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
1612 res = cr.dictfetchall()
1613 if not res and f.select:
1614 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
1616 if res and not f.select:
1617 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
1619 if isinstance(f, fields.many2one):
1620 ref = self.pool.get(f._obj)._table
1621 if ref != 'ir_actions':
1622 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, '
1623 'pg_attribute as att1, pg_attribute as att2 '
1624 'WHERE con.conrelid = cl1.oid '
1625 'AND cl1.relname = %s '
1626 'AND con.confrelid = cl2.oid '
1627 'AND cl2.relname = %s '
1628 'AND array_lower(con.conkey, 1) = 1 '
1629 'AND con.conkey[1] = att1.attnum '
1630 'AND att1.attrelid = cl1.oid '
1631 'AND att1.attname = %s '
1632 'AND array_lower(con.confkey, 1) = 1 '
1633 'AND con.confkey[1] = att2.attnum '
1634 'AND att2.attrelid = cl2.oid '
1635 'AND att2.attname = %s '
1636 "AND con.contype = 'f'", (self._table, ref, k, 'id'))
1637 res = cr.dictfetchall()
1646 if res[0]['confdeltype'] != confdeltype.get(f.ondelete.upper(), 'a'):
1647 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res[0]['conname'] + '"')
1648 cr.execute('ALTER TABLE "' + self._table + '" ADD FOREIGN KEY ("' + k + '") REFERENCES "' + ref + '" ON DELETE ' + f.ondelete)
1651 logger = netsvc.Logger()
1652 logger.notifyChannel('orm', netsvc.LOG_ERROR, "Programming error !")
1653 for order,f,k in todo_update_store:
1654 todo_end.append((order, self._update_store, (f, k)))
1657 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (self._table,))
1658 create = not bool(cr.fetchone())
1660 for (key, con, _) in self._sql_constraints:
1661 conname = '%s_%s' % (self._table, key)
1662 cr.execute("SELECT conname FROM pg_constraint where conname=%s", (conname,))
1663 if not cr.dictfetchall():
1665 cr.execute('alter table "%s" add constraint "%s_%s" %s' % (self._table, self._table, key, con,))
1668 logger.notifyChannel('init', netsvc.LOG_WARNING, 'unable to add \'%s\' constraint on table %s !\n If you want to have it, you should update the records and execute manually:\nALTER table %s ADD CONSTRAINT %s_%s %s' % (con, self._table, self._table, self._table, key, con,))
1671 if hasattr(self, "_sql"):
1672 for line in self._sql.split(';'):
1673 line2 = line.replace('\n', '').strip()
1678 self._parent_store_compute(cr)
1681 def __init__(self, cr):
1682 super(orm, self).__init__(cr)
1683 self._columns = self._columns.copy()
1684 for store_field in self._columns:
1685 f = self._columns[store_field]
1686 if not isinstance(f, fields.function):
1690 if self._columns[store_field].store is True:
1691 sm = {self._name:(lambda self,cr, uid, ids, c={}: ids, None, 10)}
1693 sm = self._columns[store_field].store
1694 for object, aa in sm.items():
1696 (fnct,fields2,order)=aa
1698 raise except_orm('Error',
1699 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority)}.' % (store_field, self._name)))
1700 self.pool._store_function.setdefault(object, [])
1702 for x,y,z,e,f in self.pool._store_function[object]:
1703 if (x==self._name) and (y==store_field) and (e==fields2):
1706 self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order))
1707 self.pool._store_function[object].sort(lambda x,y: cmp(x[4],y[4]))
1709 for (key, _, msg) in self._sql_constraints:
1710 self.pool._sql_error[self._table+'_'+key] = msg
1712 # Load manual fields
1714 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
1716 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
1717 for field in cr.dictfetchall():
1718 if field['name'] in self._columns:
1721 'string': field['field_description'],
1722 'required': bool(field['required']),
1723 'readonly': bool(field['readonly']),
1724 'domain': field['domain'] or None,
1725 'size': field['size'],
1726 'ondelete': field['on_delete'],
1727 'translate': (field['translate']),
1728 #'select': int(field['select_level'])
1731 if field['ttype'] == 'selection':
1732 self._columns[field['name']] = getattr(fields, field['ttype'])(eval(field['selection']), **attrs)
1733 elif field['ttype'] == 'reference':
1734 self._columns[field['name']] = getattr(fields, field['ttype'])(selection=eval(field['selection']), **attrs)
1735 elif field['ttype'] == 'many2one':
1736 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], **attrs)
1737 elif field['ttype'] == 'one2many':
1738 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], field['relation_field'], **attrs)
1739 elif field['ttype'] == 'many2many':
1741 _rel1 = field['relation'].replace('.', '_')
1742 _rel2 = field['model'].replace('.', '_')
1743 _rel_name = 'x_%s_%s_%s_rel' %(_rel1, _rel2, random.randint(0, 10000))
1744 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1746 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1748 self._inherits_reload()
1749 if not self._sequence:
1750 self._sequence = self._table+'_id_seq'
1751 for k in self._defaults:
1752 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1753 for f in self._columns:
1754 self._columns[f].restart()
1756 def default_get(self, cr, uid, fields_list, context=None):
1760 # get the default values for the inherited fields
1761 for t in self._inherits.keys():
1762 value.update(self.pool.get(t).default_get(cr, uid, fields_list,
1765 # get the default values defined in the object
1766 for f in fields_list:
1767 if f in self._defaults:
1768 value[f] = self._defaults[f](self, cr, uid, context)
1769 fld_def = ((f in self._columns) and self._columns[f]) \
1770 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1772 if isinstance(fld_def, fields.property):
1773 property_obj = self.pool.get('ir.property')
1774 definition_id = fld_def._field_get(cr, uid, self._name, f)
1775 nid = property_obj.search(cr, uid, [('fields_id', '=',
1776 definition_id), ('res_id', '=', False)])
1778 prop_value = property_obj.browse(cr, uid, nid[0],
1779 context=context).value
1780 value[f] = (prop_value and int(prop_value.split(',')[1])) \
1783 # get the default values set by the user and override the default
1784 # values defined in the object
1785 ir_values_obj = self.pool.get('ir.values')
1786 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1787 for id, field, field_value in res:
1788 if field in fields_list:
1789 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1790 if fld_def._type in ('many2one', 'one2one'):
1791 obj = self.pool.get(fld_def._obj)
1792 if not obj.search(cr, uid, [('id', '=', field_value)]):
1794 if fld_def._type in ('many2many'):
1795 obj = self.pool.get(fld_def._obj)
1797 for i in range(len(field_value)):
1798 if not obj.search(cr, uid, [('id', '=',
1801 field_value2.append(field_value[i])
1802 field_value = field_value2
1803 if fld_def._type in ('one2many'):
1804 obj = self.pool.get(fld_def._obj)
1806 for i in range(len(field_value)):
1807 field_value2.append({})
1808 for field2 in field_value[i]:
1809 if obj._columns[field2]._type in ('many2one', 'one2one'):
1810 obj2 = self.pool.get(obj._columns[field2]._obj)
1811 if not obj2.search(cr, uid,
1812 [('id', '=', field_value[i][field2])]):
1814 # TODO add test for many2many and one2many
1815 field_value2[i][field2] = field_value[i][field2]
1816 field_value = field_value2
1817 value[field] = field_value
1818 for key in context or {}:
1819 if key.startswith('default_'):
1820 value[key[8:]] = context[key]
1824 # Update objects that uses this one to update their _inherits fields
1826 def _inherits_reload_src(self):
1827 for obj in self.pool.obj_pool.values():
1828 if self._name in obj._inherits:
1829 obj._inherits_reload()
1831 def _inherits_reload(self):
1833 for table in self._inherits:
1834 res.update(self.pool.get(table)._inherit_fields)
1835 for col in self.pool.get(table)._columns.keys():
1836 res[col] = (table, self._inherits[table], self.pool.get(table)._columns[col])
1837 for col in self.pool.get(table)._inherit_fields.keys():
1838 res[col] = (table, self._inherits[table], self.pool.get(table)._inherit_fields[col][2])
1839 self._inherit_fields = res
1840 self._inherits_reload_src()
1842 def fields_get(self, cr, user, fields=None, context=None):
1843 read_access = self.pool.get('ir.model.access').check(cr, user, self._name, 'write', raise_exception=False)
1844 return super(orm, self).fields_get(cr, user, fields, context, read_access)
1846 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
1849 self.pool.get('ir.model.access').check(cr, user, self._name, 'read')
1851 fields = self._columns.keys() + self._inherit_fields.keys()
1853 if isinstance(ids, (int, long)):
1855 result = self._read_flat(cr, user, select, fields, context, load)
1857 for key, v in r.items():
1860 if isinstance(ids, (int, long)):
1861 return result and result[0] or False
1864 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
1870 if fields_to_read == None:
1871 fields_to_read = self._columns.keys()
1873 # construct a clause for the rules :
1874 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
1876 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
1877 fields_pre = filter(lambda x: x in self._columns and getattr(self._columns[x], '_classic_write'), fields_to_read) + self._inherits.values()
1881 def convert_field(f):
1882 if f in ('create_date', 'write_date'):
1883 return "date_trunc('second', %s) as %s" % (f, f)
1884 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
1885 return "length(%s) as %s" % (f,f)
1886 return '"%s"' % (f,)
1887 fields_pre2 = map(convert_field, fields_pre)
1888 for i in range(0, len(ids), cr.IN_MAX):
1889 sub_ids = ids[i:i+cr.IN_MAX]
1891 cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) AND %s ORDER BY %s' % \
1892 (','.join(fields_pre2 + ['id']), self._table,
1893 ','.join([str(x) for x in sub_ids]), d1,
1895 if not cr.rowcount == len({}.fromkeys(sub_ids)):
1896 raise except_orm(_('AccessError'),
1897 _('You try to bypass an access rule (Document type: %s).') % self._description)
1899 cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) ORDER BY %s' % \
1900 (','.join(fields_pre2 + ['id']), self._table,
1901 ','.join([str(x) for x in sub_ids]),
1903 res.extend(cr.dictfetchall())
1905 res = map(lambda x: {'id': x}, ids)
1907 for f in fields_pre:
1908 if self._columns[f].translate:
1909 ids = map(lambda x: x['id'], res)
1910 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
1912 r[f] = res_trans.get(r['id'], False) or r[f]
1914 for table in self._inherits:
1915 col = self._inherits[table]
1916 cols = intersect(self._inherit_fields.keys(), fields_to_read)
1919 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
1927 record.update(res3[record[col]])
1928 if col not in fields_to_read:
1931 # all fields which need to be post-processed by a simple function (symbol_get)
1932 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
1934 # maybe it would be faster to iterate on the fields then on res, so that we wouldn't need
1935 # to get the _symbol_get in each occurence
1937 for f in fields_post:
1938 r[f] = self._columns[f]._symbol_get(r[f])
1939 ids = map(lambda x: x['id'], res)
1941 # all non inherited fields for which the attribute whose name is in load is False
1942 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
1944 # Compute POST fields
1946 for f in fields_post:
1947 todo.setdefault(self._columns[f]._multi, [])
1948 todo[self._columns[f]._multi].append(f)
1949 for key,val in todo.items():
1951 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
1954 record[pos] = res2[record['id']][pos]
1957 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
1959 record[f] = res2[record['id']]
1961 #for f in fields_post:
1962 # # get the value of that field for all records/ids
1963 # res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
1964 # for record in res:
1965 # record[f] = res2[record['id']]
1969 for field in vals.copy():
1971 if field in self._columns:
1972 fobj = self._columns[field]
1979 for group in groups:
1980 module = group.split(".")[0]
1981 grp = group.split(".")[1]
1982 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
1983 (grp, module, 'res.groups', user))
1984 readonly = cr.fetchall()
1985 if readonly[0][0] >= 1:
1988 elif readonly[0][0] == 0:
1994 if type(vals[field]) == type([]):
1996 elif type(vals[field]) == type(0.0):
1998 elif type(vals[field]) == type(''):
1999 vals[field] = '=No Permission='
2004 def perm_read(self, cr, user, ids, context=None, details=True):
2010 if self._log_access:
2011 fields = ', u.create_uid, u.create_date, u.write_uid, u.write_date'
2012 if isinstance(ids, (int, long)):
2015 ids_str = string.join(map(lambda x: str(x), ids), ',')
2016 cr.execute('select u.id'+fields+' from "'+self._table+'" u where u.id in ('+ids_str+')')
2017 res = cr.dictfetchall()
2020 r[key] = r[key] or False
2021 if key in ('write_uid', 'create_uid', 'uid') and details:
2023 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
2024 if isinstance(ids, (int, long)):
2028 def unlink(self, cr, uid, ids, context=None):
2033 if isinstance(ids, (int, long)):
2036 result_store = self._store_get_values(cr, uid, ids, None, context)
2037 delta = context.get('read_delta', False)
2038 if delta and self._log_access:
2039 for i in range(0, len(ids), cr.IN_MAX):
2040 sub_ids = ids[i:i+cr.IN_MAX]
2041 cr.execute("select (now() - min(write_date)) <= '%s'::interval " \
2042 "from \"%s\" where id in (%s)" %
2043 (delta, self._table, ",".join(map(str, sub_ids))))
2046 raise except_orm(_('ConcurrencyException'),
2047 _('This record was modified in the meanwhile'))
2049 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink')
2051 wf_service = netsvc.LocalService("workflow")
2053 wf_service.trg_delete(uid, self._name, id, cr)
2055 #cr.execute('select * from '+self._table+' where id in ('+str_d+')', ids)
2056 #res = cr.dictfetchall()
2057 #for key in self._inherits:
2058 # ids2 = [x[self._inherits[key]] for x in res]
2059 # self.pool.get(key).unlink(cr, uid, ids2)
2061 d1, d2 = self.pool.get('ir.rule').domain_get(cr, uid, self._name)
2065 for i in range(0, len(ids), cr.IN_MAX):
2066 sub_ids = ids[i:i+cr.IN_MAX]
2067 str_d = string.join(('%s',)*len(sub_ids), ',')
2069 cr.execute('SELECT id FROM "'+self._table+'" ' \
2070 'WHERE id IN ('+str_d+')'+d1, sub_ids+d2)
2071 if not cr.rowcount == len({}.fromkeys(ids)):
2072 raise except_orm(_('AccessError'),
2073 _('You try to bypass an access rule (Document type: %s).') % \
2077 cr.execute('delete from "'+self._table+'" ' \
2078 'where id in ('+str_d+')'+d1, sub_ids+d2)
2080 cr.execute('delete from "'+self._table+'" ' \
2081 'where id in ('+str_d+')', sub_ids)
2083 for order, object, ids, fields in result_store:
2084 if object<>self._name:
2085 self.pool.get(object)._store_set_values(cr, uid, ids, fields, context)
2091 def write(self, cr, user, ids, vals, context=None):
2093 for field in vals.copy():
2095 if field in self._columns:
2096 fobj = self._columns[field]
2098 fobj = self._inherit_fields[field][2]
2105 for group in groups:
2106 module = group.split(".")[0]
2107 grp = group.split(".")[1]
2108 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
2109 (grp, module, 'res.groups', user))
2110 readonly = cr.fetchall()
2111 if readonly[0][0] >= 1:
2114 elif readonly[0][0] == 0:
2126 if isinstance(ids, (int, long)):
2128 delta = context.get('read_delta', False)
2129 if delta and self._log_access:
2130 for i in range(0, len(ids), cr.IN_MAX):
2131 sub_ids = ids[i:i+cr.IN_MAX]
2132 cr.execute("select (now() - min(write_date)) <= '%s'::interval " \
2133 "from %s where id in (%s)" %
2134 (delta, self._table, ",".join(map(str, sub_ids))))
2138 if field in self._columns and self._columns[field]._classic_write:
2139 raise except_orm(_('ConcurrencyException'),
2140 _('This record was modified in the meanwhile'))
2142 self.pool.get('ir.model.access').check(cr, user, self._name, 'write')
2144 #for v in self._inherits.values():
2145 # assert v not in vals, (v, vals)
2151 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
2153 if field in self._columns:
2154 if self._columns[field]._classic_write:
2155 if (not totranslate) or not self._columns[field].translate:
2156 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
2157 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
2158 direct.append(field)
2160 upd_todo.append(field)
2162 updend.append(field)
2163 if field in self._columns \
2164 and hasattr(self._columns[field], 'selection') \
2166 if self._columns[field]._type == 'reference':
2167 val = vals[field].split(',')[0]
2170 if isinstance(self._columns[field].selection, (tuple, list)):
2171 if val not in dict(self._columns[field].selection):
2172 raise except_orm(_('ValidateError'),
2173 _('The value "%s" for the field "%s" is not in the selection') \
2174 % (vals[field], field))
2176 if val not in dict(self._columns[field].selection(
2177 self, cr, user, context=context)):
2178 raise except_orm(_('ValidateError'),
2179 _('The value "%s" for the field "%s" is not in the selection') \
2180 % (vals[field], field))
2182 if self._log_access:
2183 upd0.append('write_uid=%s')
2184 upd0.append('write_date=now()')
2189 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
2193 for i in range(0, len(ids), cr.IN_MAX):
2194 sub_ids = ids[i:i+cr.IN_MAX]
2195 ids_str = string.join(map(str, sub_ids), ',')
2197 cr.execute('SELECT id FROM "'+self._table+'" ' \
2198 'WHERE id IN ('+ids_str+')'+d1, d2)
2199 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2200 raise except_orm(_('AccessError'),
2201 _('You try to bypass an access rule (Document type: %s).') % \
2204 cr.execute('SELECT id FROM "'+self._table+'" WHERE id IN ('+ids_str+')')
2205 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2206 raise except_orm(_('AccessError'),
2207 _('You try to write on an record that doesn\'t exist ' \
2208 '(Document type: %s).') % self._description)
2210 cr.execute('update "'+self._table+'" set '+string.join(upd0, ',')+' ' \
2211 'where id in ('+ids_str+')'+d1, upd1+ d2)
2213 cr.execute('update "'+self._table+'" set '+string.join(upd0, ',')+' ' \
2214 'where id in ('+ids_str+')', upd1)
2218 if self._columns[f].translate:
2219 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f])
2221 # call the 'set' method of fields which are not classic_write
2222 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
2223 for field in upd_todo:
2225 self._columns[field].set(cr, self, id, field, vals[field], user, context=context)
2227 for table in self._inherits:
2228 col = self._inherits[table]
2230 for i in range(0, len(ids), cr.IN_MAX):
2231 sub_ids = ids[i:i+cr.IN_MAX]
2232 ids_str = string.join(map(str, sub_ids), ',')
2233 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
2234 'where id in ('+ids_str+')', upd1)
2235 nids.extend([x[0] for x in cr.fetchall()])
2239 if self._inherit_fields[val][0] == table:
2241 self.pool.get(table).write(cr, user, nids, v, context)
2243 self._validate(cr, user, ids, context)
2244 # TODO: use _order to set dest at the right position and not first node of parent
2245 if self._parent_store and (self._parent_name in vals):
2247 self.pool._init_parent[self._name]=True
2249 if vals[self._parent_name]:
2250 cr.execute('select parent_left,parent_right from '+self._table+' where id=%s', (vals[self._parent_name],))
2252 cr.execute('SELECT parent_left,parent_right FROM '+self._table+' WHERE id IS NULL')
2257 cr.execute('select max(parent_right),max(parent_right)+1 from '+self._table)
2258 pleft,pright = cr.fetchone()
2259 cr.execute('select parent_left,parent_right,id from '+self._table+' where id in ('+','.join(map(lambda x:'%s',ids))+')', ids)
2261 for cleft,cright,cid in cr.fetchall():
2263 treeshift = pleft - cleft + 1
2265 rightbound = cleft-1
2266 cwidth = cright-cleft+1
2270 treeshift = pleft - cright
2271 leftbound = cright + 1
2273 cwidth = cleft-cright-1
2276 cr.execute('UPDATE '+self._table+'''
2279 WHEN parent_left BETWEEN %s AND %s THEN parent_left + %s
2280 WHEN parent_left BETWEEN %s AND %s THEN parent_left + %s
2284 WHEN parent_right BETWEEN %s AND %s THEN parent_right + %s
2285 WHEN parent_right BETWEEN %s AND %s THEN parent_right + %s
2289 parent_left<%s OR parent_right>%s;
2290 ''', (leftbound,rightbound,cwidth,cleft,cright,treeshift,leftbound,rightbound,
2291 cwidth,cleft,cright,treeshift,leftrange,rightrange))
2293 if 'read_delta' in context:
2294 del context['read_delta']
2297 result = self._store_get_values(cr, user, ids, vals.keys(), context)
2298 for order, object, ids, fields in result:
2299 self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
2301 wf_service = netsvc.LocalService("workflow")
2303 wf_service.trg_write(user, self._name, id, cr)
2307 # TODO: Should set perm to user.xxx
2309 def create(self, cr, user, vals, context=None):
2310 """ create(cr, user, vals, context) -> int
2311 cr = database cursor
2313 vals = dictionary of the form {'field_name':field_value, ...}
2317 self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
2322 for (t, c) in self._inherits.items():
2324 avoid_table.append(t)
2325 for f in self._columns.keys(): # + self._inherit_fields.keys():
2328 for f in self._inherit_fields.keys():
2329 if (not f in vals) and (not self._inherit_fields[f][0] in avoid_table):
2333 vals.update(self.default_get(cr, user, default, context))
2336 for v in self._inherits:
2337 if self._inherits[v] not in vals:
2340 (upd0, upd1, upd2) = ('', '', [])
2343 for v in vals.keys():
2344 if v in self._inherit_fields:
2345 (table, col, col_detail) = self._inherit_fields[v]
2346 tocreate[table][v] = vals[v]
2349 cr.execute("SELECT nextval('"+self._sequence+"')")
2350 id_new = cr.fetchone()[0]
2351 for table in tocreate:
2352 id = self.pool.get(table).create(cr, user, tocreate[table])
2353 upd0 += ','+self._inherits[table]
2358 if self._columns[field]._classic_write:
2359 upd0 = upd0 + ',"' + field + '"'
2360 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
2361 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
2363 upd_todo.append(field)
2364 if field in self._columns \
2365 and hasattr(self._columns[field], 'selection') \
2367 if self._columns[field]._type == 'reference':
2368 val = vals[field].split(',')[0]
2371 if isinstance(self._columns[field].selection, (tuple, list)):
2372 if val not in dict(self._columns[field].selection):
2373 raise except_orm(_('ValidateError'),
2374 _('The value "%s" for the field "%s" is not in the selection') \
2375 % (vals[field], field))
2377 if val not in dict(self._columns[field].selection(
2378 self, cr, user, context=context)):
2379 raise except_orm(_('ValidateError'),
2380 _('The value "%s" for the field "%s" is not in the selection') \
2381 % (vals[field], field))
2382 if self._log_access:
2383 upd0 += ',create_uid,create_date'
2386 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
2387 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
2388 for field in upd_todo:
2389 self._columns[field].set(cr, self, id_new, field, vals[field], user, context)
2391 self._validate(cr, user, [id_new], context)
2393 if self._parent_store:
2395 self.pool._init_parent[self._name]=True
2397 parent = vals.get(self._parent_name, False)
2399 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
2400 pleft = cr.fetchone()[0]
2402 cr.execute('select max(parent_right) from '+self._table)
2403 pleft = cr.fetchone()[0] or 0
2404 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
2405 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
2406 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1,pleft+2,id_new))
2408 result = self._store_get_values(cr, user, [id_new], vals.keys(), context)
2409 for order, object, ids, fields in result:
2410 self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
2412 wf_service = netsvc.LocalService("workflow")
2413 wf_service.trg_create(user, self._name, id_new, cr)
2416 def _store_get_values(self, cr, uid, ids, fields, context):
2418 fncts = self.pool._store_function.get(self._name, [])
2419 for fnct in range(len(fncts)):
2420 result.setdefault(fncts[fnct][0], {})
2421 ids2 = fncts[fnct][2](self,cr, uid, ids, context)
2422 for id in filter(None, ids2):
2423 result[fncts[fnct][0]].setdefault(id, [])
2424 result[fncts[fnct][0]][id].append(fnct)
2426 for object in result:
2428 for id,fnct in result[object].items():
2429 k2.setdefault(tuple(fnct), [])
2430 k2[tuple(fnct)].append(id)
2431 for fnct,id in k2.items():
2432 result2.append((fncts[fnct[0]][4],object,id,map(lambda x: fncts[x][1], fnct)))
2436 def _store_set_values(self, cr, uid, ids, fields, context):
2440 if self._columns[f]._multi not in keys:
2441 keys.append(self._columns[f]._multi)
2442 todo.setdefault(self._columns[f]._multi, [])
2443 todo[self._columns[f]._multi].append(f)
2447 result = self._columns[val[0]].get(cr, self, ids, val, uid, context=context)
2448 for id,value in result.items():
2454 if self._columns[v]._type in ('many2one', 'one2one'):
2456 value[v] = value[v][0]
2459 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
2460 upd1.append(self._columns[v]._symbol_set[1](value[v]))
2462 cr.execute('update "' + self._table + '" set ' + \
2463 string.join(upd0, ',') + ' where id = %s', upd1)
2467 result = self._columns[f].get(cr, self, ids, f, uid, context=context)
2468 for id,value in result.items():
2469 if self._columns[f]._type in ('many2one', 'one2one'):
2474 cr.execute('update "' + self._table + '" set ' + \
2475 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value),id))
2481 def perm_write(self, cr, user, ids, fields, context=None):
2482 raise _('This method does not exist anymore')
2484 # TODO: ameliorer avec NULL
2485 def _where_calc(self, cr, user, args, active_test=True, context=None):
2489 # if the object has a field named 'active', filter out all inactive
2490 # records unless they were explicitely asked for
2491 if 'active' in self._columns and (active_test and context.get('active_test', True)):
2493 active_in_args = False
2495 if a[0] == 'active':
2496 active_in_args = True
2497 if not active_in_args:
2498 args.insert(0, ('active', '=', 1))
2500 args = [('active', '=', 1)]
2504 e = expression.expression(args)
2505 e.parse(cr, user, self, context)
2506 tables = e.get_tables()
2507 qu1, qu2 = e.to_sql()
2508 qu1 = qu1 and [qu1] or []
2510 qu1, qu2, tables = [], [], ['"%s"' % self._table]
2512 return (qu1, qu2, tables)
2514 def _check_qorder(self, word):
2515 if not regex_order.match(word):
2516 raise except_orm(_('AccessError'), _('Bad query.'))
2519 def search(self, cr, user, args, offset=0, limit=None, order=None,
2520 context=None, count=False):
2523 # compute the where, order by, limit and offset clauses
2524 (qu1, qu2, tables) = self._where_calc(cr, user, args, context=context)
2527 qu1 = ' where '+string.join(qu1, ' and ')
2532 self._check_qorder(order)
2533 order_by = order or self._order
2535 limit_str = limit and ' limit %d' % limit or ''
2536 offset_str = offset and ' offset %d' % offset or ''
2539 # construct a clause for the rules :
2540 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
2542 qu1 = qu1 and qu1+' and '+d1 or ' where '+d1
2546 cr.execute('select count(%s.id) from ' % self._table +
2547 ','.join(tables) +qu1 + limit_str + offset_str, qu2)
2550 # execute the "main" query to fetch the ids we were searching for
2551 cr.execute('select %s.id from ' % self._table + ','.join(tables) +qu1+' order by '+order_by+limit_str+offset_str, qu2)
2553 return [x[0] for x in res]
2555 # returns the different values ever entered for one field
2556 # this is used, for example, in the client when the user hits enter on
2558 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
2561 if field in self._inherit_fields:
2562 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
2564 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
2566 def name_get(self, cr, user, ids, context=None):
2571 if isinstance(ids, (int, long)):
2573 return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids,
2574 [self._rec_name], context, load='_classic_write')]
2576 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=None):
2583 args += [(self._rec_name, operator, name)]
2584 ids = self.search(cr, user, args, limit=limit, context=context)
2585 res = self.name_get(cr, user, ids, context)
2588 def copy(self, cr, uid, id, default=None, context=None):
2593 if 'state' not in default:
2594 if 'state' in self._defaults:
2595 default['state'] = self._defaults['state'](self, cr, uid, context)
2596 data = self.read(cr, uid, [id], context=context)[0]
2597 fields = self.fields_get(cr, uid)
2599 ftype = fields[f]['type']
2601 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
2605 data[f] = default[f]
2606 elif ftype == 'function':
2608 elif ftype == 'many2one':
2610 data[f] = data[f] and data[f][0]
2613 elif ftype in ('one2many', 'one2one'):
2615 rel = self.pool.get(fields[f]['relation'])
2616 for rel_id in data[f]:
2617 # the lines are first duplicated using the wrong (old)
2618 # parent but then are reassigned to the correct one thanks
2620 res.append((4, rel.copy(cr, uid, rel_id, context=context)))
2622 elif ftype == 'many2many':
2623 data[f] = [(6, 0, data[f])]
2625 trans_obj = self.pool.get('ir.translation')
2630 if f in self._columns and self._columns[f].translate:
2631 trans_name=self._name+","+f
2632 elif f in self._inherit_fields and self._inherit_fields[f][2].translate:
2633 trans_name=self._inherit_fields[f][0]+","+f
2638 trans_ids = trans_obj.search(cr, uid, [
2639 ('name', '=', trans_name),
2640 ('res_id','=',data['id'])
2643 trans_data.extend(trans_obj.read(cr,uid,trans_ids,context=context))
2647 for v in self._inherits:
2648 del data[self._inherits[v]]
2650 new_id=self.create(cr, uid, data)
2652 for record in trans_data:
2654 record['res_id']=new_id
2655 trans_obj.create(cr,uid,record)
2659 def check_recursion(self, cr, uid, ids, parent=None):
2661 parent = self._parent_name
2663 while len(ids_parent):
2665 for i in range(0, len(ids), cr.IN_MAX):
2666 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
2667 cr.execute('SELECT distinct "'+parent+'"'+
2668 ' FROM "'+self._table+'" ' \
2669 'WHERE id in ('+','.join(map(str, sub_ids_parent))+')')
2670 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
2671 ids_parent = ids_parent2
2672 for i in ids_parent:
2678 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: