1 # -*- encoding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 ##############################################################################
24 # Object relationnal mapping to postgresql module
25 # . Hierarchical structure
26 # . Constraints consistency, validations
27 # . Object meta Data depends on its status
28 # . Optimised processing by complex query (multiple actions at once)
29 # . Default fields value
30 # . Permissions optimisation
31 # . Persistant object: DB postgresql
33 # . Multi-level caching system
34 # . 2 different inheritancies
36 # - classicals (varchar, integer, boolean, ...)
37 # - relations (one2many, many2one, many2many)
55 from xml import dom, xpath
57 sys.stderr.write("ERROR: Import xpath module\n")
58 sys.stderr.write("ERROR: Try to install the old python-xml package\n")
61 from tools.config import config
63 regex_order = re.compile('^([a-zA-Z0-9_]+( desc)?( asc)?,?)+$', re.I)
66 def intersect(la, lb):
67 return filter(lambda x: x in lb, la)
70 class except_orm(Exception):
71 def __init__(self, name, value):
74 self.args = (name, value)
77 # Readonly python database object browser
78 class browse_null(object):
83 def __getitem__(self, name):
86 def __getattr__(self, name):
87 return False # XXX: return self ?
95 def __nonzero__(self):
100 # TODO: execute an object method on browse_record_list
102 class browse_record_list(list):
104 def __init__(self, lst, context=None):
107 super(browse_record_list, self).__init__(lst)
108 self.context = context
111 class browse_record(object):
112 def __init__(self, cr, uid, id, table, cache, context=None, list_class = None, fields_process={}):
114 table : the object (inherited from orm)
115 context : a dictionnary with an optionnal context
119 assert id, _('Wrong ID for the browse record, got %s, expected an integer.') % str(id)
120 self._list_class = list_class or browse_record_list
125 self._table_name = self._table._name
126 self._context = context
127 self._fields_process = fields_process
129 cache.setdefault(table._name, {})
130 self._data = cache[table._name]
131 if not id in self._data:
132 self._data[id] = {'id': id}
135 def __getitem__(self, name):
138 if not name in self._data[self._id]:
139 # build the list of fields we will fetch
141 # fetch the definition of the field which was asked for
142 if name in self._table._columns:
143 col = self._table._columns[name]
144 elif name in self._table._inherit_fields:
145 col = self._table._inherit_fields[name][2]
146 elif hasattr(self._table, name):
147 if isinstance(getattr(self._table, name), (types.MethodType, types.LambdaType, types.FunctionType)):
148 return lambda *args, **argv: getattr(self._table, name)(self._cr, self._uid, [self._id], *args, **argv)
150 return getattr(self._table, name)
152 logger = netsvc.Logger()
153 logger.notifyChannel('orm', netsvc.LOG_ERROR, "Programming error: field '%s' does not exist in object '%s' !" % (name, self._table._name))
156 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
157 if col._classic_write:
158 # gen the list of "local" (ie not inherited) fields which are classic or many2one
159 ffields = filter(lambda x: x[1]._classic_write, self._table._columns.items())
160 # gen the list of inherited fields
161 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
162 # complete the field list with the inherited fields which are classic or many2one
163 ffields += filter(lambda x: x[1]._classic_write, inherits)
164 # otherwise we fetch only that field
166 ffields = [(name, col)]
167 ids = filter(lambda id: not name in self._data[id], self._data.keys())
169 fffields = map(lambda x: x[0], ffields)
170 datas = self._table.read(self._cr, self._uid, ids, fffields, context=self._context, load="_classic_write")
171 if self._fields_process:
173 if f._type in self._fields_process:
175 d[n] = self._fields_process[f._type](d[n])
176 d[n].set_value(d[n], self, f)
179 # create browse records for 'remote' objects
182 if f._type in ('many2one', 'one2one'):
184 obj = self._table.pool.get(f._obj)
186 if not f._classic_write:
191 data[n] = browse_record(self._cr, self._uid, ids2, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
193 data[n] = browse_null()
195 data[n] = browse_null()
196 elif f._type in ('one2many', 'many2many') and len(data[n]):
197 data[n] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(f._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in data[n]], self._context)
198 self._data[data['id']].update(data)
199 return self._data[self._id][name]
201 def __getattr__(self, name):
202 # raise an AttributeError exception.
205 def __contains__(self, name):
206 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
208 def __hasattr__(self, name):
215 return "browse_record(%s, %d)" % (self._table_name, self._id)
217 def __eq__(self, other):
218 return (self._table_name, self._id) == (other._table_name, other._id)
220 def __ne__(self, other):
221 return (self._table_name, self._id) != (other._table_name, other._id)
223 # we need to define __unicode__ even though we've already defined __str__
224 # because we have overridden __getattr__
225 def __unicode__(self):
226 return unicode(str(self))
229 return hash((self._table_name, self._id))
237 (type returned by postgres when the column was created, type expression to create the column)
241 fields.boolean: 'bool',
242 fields.integer: 'int4',
243 fields.integer_big: 'int8',
247 fields.datetime: 'timestamp',
248 fields.binary: 'bytea',
249 fields.many2one: 'int4',
251 if type(f) in type_dict:
252 f_type = (type_dict[type(f)], type_dict[type(f)])
253 elif isinstance(f, fields.float):
255 f_type = ('numeric', 'NUMERIC(%d,%d)' % (f.digits[0], f.digits[1]))
257 f_type = ('float8', 'DOUBLE PRECISION')
258 elif isinstance(f, (fields.char, fields.reference)):
259 f_type = ('varchar', 'VARCHAR(%d)' % (f.size,))
260 elif isinstance(f, fields.selection):
261 if isinstance(f.selection, list) and isinstance(f.selection[0][0], (str, unicode)):
262 f_size = reduce(lambda x, y: max(x, len(y[0])), f.selection, f.size or 16)
263 elif isinstance(f.selection, list) and isinstance(f.selection[0][0], int):
266 f_size = (hasattr(f, 'size') and f.size) or 16
269 f_type = ('int4', 'INTEGER')
271 f_type = ('varchar', 'VARCHAR(%d)' % f_size)
272 elif isinstance(f, fields.function) and eval('fields.'+(f._type)) in type_dict:
273 t = eval('fields.'+(f._type))
274 f_type = (type_dict[t], type_dict[t])
275 elif isinstance(f, fields.function) and f._type == 'float':
276 f_type = ('float8', 'DOUBLE PRECISION')
277 elif isinstance(f, fields.function) and f._type == 'selection':
278 f_type = ('text', 'text')
280 logger = netsvc.Logger()
281 logger.notifyChannel("init", netsvc.LOG_WARNING, '%s type not supported!' % (type(f)))
286 class orm_template(object):
292 _parent_name = 'parent_id'
293 _parent_store = False
301 def _field_create(self, cr, context={}):
302 cr.execute("SELECT id FROM ir_model_data WHERE name='%s'" % ('model_'+self._name.replace('.','_'),))
304 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
305 id = cr.fetchone()[0]
306 cr.execute("INSERT INTO ir_model (id,model, name, info) VALUES (%s, %s, %s, %s)", (id, self._name, self._description, self.__doc__))
307 if 'module' in context:
308 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
309 ('model_'+self._name.replace('.','_'), context['module'], 'ir.model', id)
313 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
315 for rec in cr.dictfetchall():
316 cols[rec['name']] = rec
318 cr.execute("SELECT id FROM ir_model WHERE model='%s'" % self._name)
319 model_id = cr.fetchone()[0]
321 for (k, f) in self._columns.items():
323 'model_id': model_id,
326 'field_description': f.string.replace("'", " "),
328 'relation': f._obj or 'NULL',
329 'view_load': (f.view_load and 1) or 0,
330 'select_level': str(f.select or 0),
331 'readonly':(f.readonly and 1) or 0,
332 'required':(f.required and 1) or 0,
335 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
336 id = cr.fetchone()[0]
338 cr.execute("""INSERT INTO ir_model_fields (
339 id, model_id, model, name, field_description, ttype,
340 relation,view_load,state,select_level
342 %d,%s,%s,%s,%s,%s,%s,%s,%s,%s
344 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
345 vals['relation'], bool(vals['view_load']), 'base',
348 if 'module' in context:
349 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
350 (('field_'+self._table+'_'+k)[:64], context['module'], 'ir.model.fields', id)
353 for key, val in vals.items():
354 if cols[k][key] != vals[key]:
355 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
357 cr.execute("""UPDATE ir_model_fields SET
358 model_id=%s, field_description=%s, ttype=%s, relation=%s,
359 view_load=%s, select_level=%s, readonly=%s ,required=%s
361 model=%s AND name=%s""", (
362 vals['model_id'], vals['field_description'], vals['ttype'],
363 vals['relation'], bool(vals['view_load']),
364 vals['select_level'], bool(vals['readonly']),bool(vals['required']), vals['model'], vals['name']
369 def _auto_init(self, cr, context={}):
370 self._field_create(cr, context)
372 def __init__(self, cr):
373 if not self._name and not hasattr(self, '_inherit'):
374 name = type(self).__name__.split('.')[0]
375 msg = "The class %s has to have a _name attribute" % name
377 logger = netsvc.Logger()
378 logger.notifyChannel('orm', netsvc.LOG_ERROR, msg )
379 raise except_orm('ValueError', msg )
381 if not self._description:
382 self._description = self._name
384 self._table = self._name.replace('.', '_')
386 def browse(self, cr, uid, select, context=None, list_class=None, fields_process={}):
389 self._list_class = list_class or browse_record_list
391 # need to accepts ints and longs because ids coming from a method
392 # launched by button in the interface have a type long...
393 if isinstance(select, (int, long)):
394 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
395 elif isinstance(select, list):
396 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context)
400 def __export_row(self, cr, uid, row, fields, context=None):
402 data = map(lambda x: '', range(len(fields)))
404 for fpos in range(len(fields)):
413 if isinstance(r, (browse_record_list, list)):
415 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
421 lines2 = self.__export_row(cr, uid, row2, fields2,
424 for fpos2 in range(len(fields)):
425 if lines2 and lines2[0][fpos2]:
426 data[fpos2] = lines2[0][fpos2]
434 data[fpos] = str(r or '')
435 return [data] + lines
437 def export_data(self, cr, uid, ids, fields, context=None):
440 fields = map(lambda x: x.split('/'), fields)
442 for row in self.browse(cr, uid, ids, context):
443 datas += self.__export_row(cr, uid, row, fields, context)
446 def import_data(self, cr, uid, fields, datas, mode='init',
447 current_module=None, noupdate=False, context=None, filename=None):
450 fields = map(lambda x: x.split('/'), fields)
451 logger = netsvc.Logger()
453 def process_liness(self, datas, prefix, fields_def, position=0):
454 line = datas[position]
461 # Import normal fields
463 for i in range(len(fields)):
465 raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
470 if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':id'):
473 if fields_def[field[len(prefix)][:-3]]['type']=='many2many':
475 for word in line[i].split(','):
477 module, xml_id = word.rsplit('.', 1)
479 module, xml_id = current_module, word
480 ir_model_data_obj = self.pool.get('ir.model.data')
481 id = ir_model_data_obj._get_id(cr, uid, module,
483 res_id2 = ir_model_data_obj.read(cr, uid, [id],
484 ['res_id'])[0]['res_id']
486 res_id.append(res_id2)
488 res_id = [(6, 0, res_id)]
491 module, xml_id = line[i].rsplit('.', 1)
493 module, xml_id = current_module, line[i]
494 ir_model_data_obj = self.pool.get('ir.model.data')
495 id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
496 res_id = ir_model_data_obj.read(cr, uid, [id],
497 ['res_id'])[0]['res_id']
498 row[field[0][:-3]] = res_id or False
500 if (len(field) == len(prefix)+1) and \
501 len(field[len(prefix)].split(':lang=')) == 2:
502 f, lang = field[len(prefix)].split(':lang=')
503 translate.setdefault(lang, {})[f]=line[i] or False
505 if (len(field) == len(prefix)+1) and \
506 (prefix == field[0:len(prefix)]):
507 if fields_def[field[len(prefix)]]['type'] == 'integer':
508 res = line[i] and int(line[i])
509 elif fields_def[field[len(prefix)]]['type'] == 'boolean':
510 res = line[i] and eval(line[i])
511 elif fields_def[field[len(prefix)]]['type'] == 'float':
512 res = line[i] and float(line[i])
513 elif fields_def[field[len(prefix)]]['type'] == 'selection':
515 if isinstance(fields_def[field[len(prefix)]]['selection'],
517 sel = fields_def[field[len(prefix)]]['selection']
519 sel = fields_def[field[len(prefix)]]['selection'](self,
522 if str(key) == line[i]:
524 if line[i] and not res:
525 logger.notifyChannel("import", netsvc.LOG_WARNING,
526 "key '%s' not found in selection field '%s'" % \
527 (line[i], field[len(prefix)]))
528 elif fields_def[field[len(prefix)]]['type']=='many2one':
531 relation = fields_def[field[len(prefix)]]['relation']
532 res2 = self.pool.get(relation).name_search(cr, uid,
533 line[i], [], operator='=')
534 res = (res2 and res2[0][0]) or False
536 warning += ('Relation not found: ' + line[i] + \
537 ' on ' + relation + ' !\n')
538 logger.notifyChannel("import", netsvc.LOG_WARNING,
539 'Relation not found: ' + line[i] + \
540 ' on ' + relation + ' !\n')
541 elif fields_def[field[len(prefix)]]['type']=='many2many':
544 relation = fields_def[field[len(prefix)]]['relation']
545 for word in line[i].split(','):
546 res2 = self.pool.get(relation).name_search(cr,
547 uid, word, [], operator='=')
548 res3 = (res2 and res2[0][0]) or False
550 warning += ('Relation not found: ' + \
551 line[i] + ' on '+relation + ' !\n')
552 logger.notifyChannel("import",
554 'Relation not found: ' + line[i] + \
555 ' on '+relation + ' !\n')
561 res = line[i] or False
562 row[field[len(prefix)]] = res
563 elif (prefix==field[0:len(prefix)]):
564 if field[0] not in todo:
565 todo.append(field[len(prefix)])
567 # Import one2many fields
571 newfd = self.pool.get(fields_def[field]['relation']).fields_get(
572 cr, uid, context=context)
573 res = process_liness(self, datas, prefix + [field], newfd, position)
574 (newrow, max2, w2, translate2, data_id2) = res
575 nbrmax = max(nbrmax, max2)
576 warning = warning + w2
577 reduce(lambda x, y: x and y, newrow)
578 row[field] = (reduce(lambda x, y: x or y, newrow.values()) and \
579 [(0, 0, newrow)]) or []
581 while (position+i)<len(datas):
583 for j in range(len(fields)):
585 if (len(field2) <= (len(prefix)+1)) and datas[position+i][j]:
590 (newrow, max2, w2, translate2, data_id2) = process_liness(
591 self, datas, prefix+[field], newfd, position+i)
593 if reduce(lambda x, y: x or y, newrow.values()):
594 row[field].append((0, 0, newrow))
596 nbrmax = max(nbrmax, i)
599 for i in range(max(nbrmax, 1)):
602 result = (row, nbrmax, warning, translate, data_id)
605 fields_def = self.fields_get(cr, uid, context=context)
608 initial_size = len(datas)
609 if config.get('import_partial', False) and filename:
610 data = pickle.load(file(config.get('import_partial')))
611 original_value = data.get(filename, 0)
617 (res, other, warning, translate, data_id) = \
618 process_liness(self, datas, [], fields_def)
621 return (-1, res, warning, '')
622 id = self.pool.get('ir.model.data')._update(cr, uid, self._name,
623 current_module, res, xml_id=data_id, mode=mode,
625 for lang in translate:
626 context2 = context.copy()
627 context2['lang'] = lang
628 self.write(cr, uid, [id], translate[lang], context2)
629 if config.get('import_partial', False) and filename and (not (counter%100)) :
630 data = pickle.load(file(config.get('import_partial')))
631 data[filename] = initial_size - len(datas) + original_value
632 pickle.dump(data, file(config.get('import_partial'),'wb'))
635 #except Exception, e:
636 # logger.notifyChannel("import", netsvc.LOG_ERROR, e)
639 # return (-1, res, e[0], warning)
641 # return (-1, res, e[0], '')
644 # TODO: Send a request with the result and multi-thread !
646 return (done, 0, 0, 0)
648 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
649 raise _('The read method is not implemented on this object !')
651 def _validate(self, cr, uid, ids, context=None):
652 context = context or {}
653 lng = context.get('lang', False) or 'en_US'
654 trans = self.pool.get('ir.translation')
656 for constraint in self._constraints:
657 fun, msg, fields = constraint
658 if not fun(self, cr, uid, ids):
659 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
661 _("Error occured while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
665 raise except_orm('ValidateError', '\n'.join(error_msgs))
667 def default_get(self, cr, uid, fields_list, context=None):
670 def perm_read(self, cr, user, ids, context=None, details=True):
671 raise _('The perm_read method is not implemented on this object !')
673 def unlink(self, cr, uid, ids, context=None):
674 raise _('The unlink method is not implemented on this object !')
676 def write(self, cr, user, ids, vals, context=None):
677 raise _('The write method is not implemented on this object !')
679 def create(self, cr, user, vals, context=None):
680 raise _('The create method is not implemented on this object !')
682 # returns the definition of each field in the object
683 # the optional fields parameter can limit the result to some fields
684 def fields_get(self, cr, user, fields=None, context=None, read_access=True):
688 translation_obj = self.pool.get('ir.translation')
689 model_access_obj = self.pool.get('ir.model.access')
690 for parent in self._inherits:
691 res.update(self.pool.get(parent).fields_get(cr, user, fields,
693 for f in self._columns.keys():
694 res[f] = {'type': self._columns[f]._type}
695 for arg in ('string', 'readonly', 'states', 'size', 'required',
696 'change_default', 'translate', 'help', 'select'):
697 if getattr(self._columns[f], arg):
698 res[f][arg] = getattr(self._columns[f], arg)
700 res[f]['readonly'] = True
701 res[f]['states'] = {}
702 for arg in ('digits', 'invisible','filters'):
703 if hasattr(self._columns[f], arg) \
704 and getattr(self._columns[f], arg):
705 res[f][arg] = getattr(self._columns[f], arg)
707 # translate the field label
708 res_trans = translation_obj._get_source(cr, user,
709 self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
711 res[f]['string'] = res_trans
712 help_trans = translation_obj._get_source(cr, user,
713 self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
715 res[f]['help'] = help_trans
717 if hasattr(self._columns[f], 'selection'):
718 if isinstance(self._columns[f].selection, (tuple, list)):
719 sel = self._columns[f].selection
720 # translate each selection option
722 for (key, val) in sel:
725 val2 = translation_obj._get_source(cr, user,
726 self._name + ',' + f, 'selection',
727 context.get('lang', False) or 'en_US', val)
728 sel2.append((key, val2 or val))
730 res[f]['selection'] = sel
732 # call the 'dynamic selection' function
733 res[f]['selection'] = self._columns[f].selection(self, cr,
735 if res[f]['type'] in ('one2many', 'many2many',
736 'many2one', 'one2one'):
737 res[f]['relation'] = self._columns[f]._obj
738 res[f]['domain'] = self._columns[f]._domain
739 res[f]['context'] = self._columns[f]._context
742 # filter out fields which aren't in the fields list
749 # Overload this method if you need a window title which depends on the context
751 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
754 def __view_look_dom(self, cr, user, node, context=None):
761 if node.nodeType == node.ELEMENT_NODE and node.localName == 'field':
762 if node.hasAttribute('name'):
765 if node.getAttribute('name') in self._columns:
766 relation = self._columns[node.getAttribute('name')]._obj
768 relation = self._inherit_fields[node.getAttribute('name')][2]._obj
775 for f in node.childNodes:
776 if f.nodeType == f.ELEMENT_NODE and f.localName in ('form', 'tree', 'graph'):
778 xarch, xfields = self.pool.get(relation).__view_look_dom_arch(cr, user, f, context)
779 views[str(f.localName)] = {
783 attrs = {'views': views}
784 if node.hasAttribute('widget') and node.getAttribute('widget')=='selection':
785 # We can not use the domain has it is defined according to the record !
786 attrs['selection'] = self.pool.get(relation).name_search(cr, user, '', context=context)
787 if not attrs.get('required',False):
788 attrs['selection'].append((False,''))
789 fields[node.getAttribute('name')] = attrs
791 elif node.nodeType==node.ELEMENT_NODE and node.localName in ('form', 'tree'):
792 result = self.view_header_get(cr, user, False, node.localName, context)
794 node.setAttribute('string', result.decode('utf-8'))
795 if node.nodeType == node.ELEMENT_NODE and node.hasAttribute('groups'):
796 if node.getAttribute('groups'):
797 groups = node.getAttribute('groups').split(',')
799 access_pool = self.pool.get('ir.model.access')
801 readonly = readonly or access_pool.check_groups(cr, user, group)
803 node.setAttribute('invisible', '1')
804 node.removeAttribute('groups')
806 if node.nodeType == node.ELEMENT_NODE:
808 if ('lang' in context) and not result:
809 if node.hasAttribute('string') and node.getAttribute('string'):
810 trans = tools.translate(cr, self._name, 'view', context['lang'], node.getAttribute('string').encode('utf8'))
812 node.setAttribute('string', trans.decode('utf8'))
813 if node.hasAttribute('sum') and node.getAttribute('sum'):
814 trans = tools.translate(cr, self._name, 'view', context['lang'], node.getAttribute('sum').encode('utf8'))
816 node.setAttribute('sum', trans.decode('utf8'))
819 for f in node.childNodes:
820 fields.update(self.__view_look_dom(cr, user, f, context))
823 def __view_look_dom_arch(self, cr, user, node, context=None):
826 fields_def = self.__view_look_dom(cr, user, node, context=context)
828 buttons = xpath.Evaluate('//button', node)
830 for button in buttons:
831 if button.getAttribute('type') == 'object':
836 if user != 1: # admin user has all roles
837 serv = netsvc.LocalService('object_proxy')
838 user_roles = serv.execute_cr(cr, user, 'res.users', 'read', [user], ['roles_id'])[0]['roles_id']
839 cr.execute("select role_id from wkf_transition where signal='%s'" % button.getAttribute('name'))
840 roles = cr.fetchall()
843 ok = ok and serv.execute_cr(cr, user, 'res.roles', 'check', user_roles, role[0])
846 button.setAttribute('readonly', '1')
848 button.setAttribute('readonly', '0')
850 arch = node.toxml(encoding="utf-8").replace('\t', '')
851 fields = self.fields_get(cr, user, fields_def.keys(), context)
852 for field in fields_def:
853 fields[field].update(fields_def[field])
856 def __get_default_calendar_view(self):
857 """Generate a default calendar view (For internal use only).
860 arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
861 '<calendar string="%s" date_start="%s"') % (self._description, self._date_name)
863 if 'user_id' in self._columns:
864 arch += ' color="user_id"'
866 elif 'partner_id' in self._columns:
867 arch += ' color="partner_id"'
869 if 'date_stop' in self._columns:
870 arch += ' date_stop="date_stop"'
872 elif 'date_end' in self._columns:
873 arch += ' date_stop="date_end"'
875 elif 'date_delay' in self._columns:
876 arch += ' date_delay="date_delay"'
878 elif 'planned_hours' in self._columns:
879 arch += ' date_delay="planned_hours"'
882 ' <field name="%s"/>\n'
883 '</calendar>') % (self._rec_name)
888 # if view_id, view_type is not required
890 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False):
893 def _inherit_apply(src, inherit):
894 def _find(node, node2):
895 if node2.nodeType == node2.ELEMENT_NODE and node2.localName == 'xpath':
896 res = xpath.Evaluate(node2.getAttribute('expr'), node)
897 return res and res[0]
899 if node.nodeType == node.ELEMENT_NODE and node.localName == node2.localName:
901 for attr in node2.attributes.keys():
902 if attr == 'position':
904 if node.hasAttribute(attr):
905 if node.getAttribute(attr)==node2.getAttribute(attr):
910 for child in node.childNodes:
911 res = _find(child, node2)
916 doc_src = dom.minidom.parseString(src)
917 doc_dest = dom.minidom.parseString(inherit)
918 toparse = doc_dest.childNodes
920 node2 = toparse.pop(0)
921 if not node2.nodeType == node2.ELEMENT_NODE:
923 if node2.localName == 'data':
924 toparse += node2.childNodes
926 node = _find(doc_src, node2)
929 if node2.hasAttribute('position'):
930 pos = node2.getAttribute('position')
932 parent = node.parentNode
933 for child in node2.childNodes:
934 if child.nodeType == child.ELEMENT_NODE:
935 parent.insertBefore(child, node)
936 parent.removeChild(node)
938 sib = node.nextSibling
939 for child in node2.childNodes:
940 if child.nodeType == child.ELEMENT_NODE:
942 node.appendChild(child)
944 node.parentNode.insertBefore(child, sib)
946 node.parentNode.insertBefore(child, node)
948 raise AttributeError(_('Unknown position in inherited view %s !') % pos)
951 ' %s="%s"' % (attr, node2.getAttribute(attr))
952 for attr in node2.attributes.keys()
953 if attr != 'position'
955 tag = "<%s%s>" % (node2.localName, attrs)
956 raise AttributeError(_("Couldn't find tag '%s' in parent view !") % tag)
957 return doc_src.toxml(encoding="utf-8").replace('\t', '')
959 result = {'type': view_type, 'model': self._name}
966 where = (model and (" and model='%s'" % (self._name,))) or ''
967 cr.execute('SELECT arch,name,field_parent,id,type,inherit_id FROM ir_ui_view WHERE id=%d'+where, (view_id,))
970 arch,name,field_parent,id,type,inherit_id
977 ORDER BY priority''', (self._name, view_type))
978 sql_res = cr.fetchone()
982 view_id = ok or sql_res[3]
985 # if a view was found
987 result['type'] = sql_res[4]
988 result['view_id'] = sql_res[3]
989 result['arch'] = sql_res[0]
991 def _inherit_apply_rec(result, inherit_id):
992 # get all views which inherit from (ie modify) this view
993 cr.execute('select arch,id from ir_ui_view where inherit_id=%d and model=%s order by priority', (inherit_id, self._name))
994 sql_inherit = cr.fetchall()
995 for (inherit, id) in sql_inherit:
996 result = _inherit_apply(result, inherit)
997 result = _inherit_apply_rec(result, id)
1000 result['arch'] = _inherit_apply_rec(result['arch'], sql_res[3])
1002 result['name'] = sql_res[1]
1003 result['field_parent'] = sql_res[2] or False
1005 # otherwise, build some kind of default view
1006 if view_type == 'form':
1007 res = self.fields_get(cr, user, context=context)
1008 xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
1009 '''<form string="%s">''' % (self._description,)
1011 if res[x]['type'] not in ('one2many', 'many2many'):
1012 xml += '<field name="%s"/>' % (x,)
1013 if res[x]['type'] == 'text':
1016 elif view_type == 'tree':
1017 xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
1018 '''<tree string="%s"><field name="%s"/></tree>''' \
1019 % (self._description, self._rec_name)
1020 elif view_type == 'calendar':
1021 xml = self.__get_default_calendar_view()
1024 result['arch'] = xml
1025 result['name'] = 'default'
1026 result['field_parent'] = False
1027 result['view_id'] = 0
1029 doc = dom.minidom.parseString(result['arch'].encode('utf-8'))
1030 xarch, xfields = self.__view_look_dom_arch(cr, user, doc, context=context)
1031 result['arch'] = xarch
1032 result['fields'] = xfields
1036 for key in ('report_sxw_content', 'report_rml_content',
1037 'report_sxw', 'report_rml',
1038 'report_sxw_content_data', 'report_rml_content_data'):
1042 ir_values_obj = self.pool.get('ir.values')
1043 resprint = ir_values_obj.get(cr, user, 'action',
1044 'client_print_multi', [(self._name, False)], False,
1046 resaction = ir_values_obj.get(cr, user, 'action',
1047 'client_action_multi', [(self._name, False)], False,
1050 resrelate = ir_values_obj.get(cr, user, 'action',
1051 'client_action_relate', [(self._name, False)], False,
1053 resprint = map(clean, resprint)
1054 resaction = map(clean, resaction)
1055 resaction = filter(lambda x: not x.get('multi', False), resaction)
1056 resprint = filter(lambda x: not x.get('multi', False), resprint)
1057 resrelate = map(lambda x: x[2], resrelate)
1059 for x in resprint+resaction+resrelate:
1060 x['string'] = x['name']
1062 result['toolbar'] = {
1064 'action': resaction,
1069 _view_look_dom_arch = __view_look_dom_arch
1071 def search_count(self, cr, user, args, context=None):
1074 res = self.search(cr, user, args, context=context, count=True)
1075 if isinstance(res, list):
1079 def search(self, cr, user, args, offset=0, limit=None, order=None,
1080 context=None, count=False):
1081 raise _('The search method is not implemented on this object !')
1083 def name_get(self, cr, user, ids, context=None):
1084 raise _('The name_get method is not implemented on this object !')
1086 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=None):
1087 raise _('The name_search method is not implemented on this object !')
1089 def copy(self, cr, uid, id, default=None, context=None):
1090 raise _('The copy method is not implemented on this object !')
1093 class orm_memory(orm_template):
1094 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count']
1095 _inherit_fields = {}
1100 def __init__(self, cr):
1101 super(orm_memory, self).__init__(cr)
1105 cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
1107 def vaccum(self, cr, uid):
1109 if self.check_id % self._check_time:
1112 max = time.time() - self._max_hours * 60 * 60
1113 for id in self.datas:
1114 if self.datas[id]['internal.date_access'] < max:
1116 self.unlink(cr, uid, tounlink)
1117 if len(self.datas)>self._max_count:
1118 sorted = map(lambda x: (x[1]['internal.date_access'], x[0]), self.datas.items())
1120 ids = map(lambda x: x[1], sorted[:len(self.datas)-self._max_count])
1121 self.unlink(cr, uid, ids)
1124 def read(self, cr, user, ids, fields_to_read=None, context=None, load='_classic_read'):
1127 if not fields_to_read:
1128 fields_to_read = self._columns.keys()
1131 if isinstance(ids, (int, long)):
1135 for f in fields_to_read:
1136 if id in self.datas:
1137 r[f] = self.datas[id].get(f, False)
1138 if r[f] and isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
1141 if id in self.datas:
1142 self.datas[id]['internal.date_access'] = time.time()
1143 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
1144 for f in fields_post:
1145 res2 = self._columns[f].get_memory(cr, self, ids, f, user, context=context, values=result)
1146 for record in result:
1147 record[f] = res2[record['id']]
1148 if isinstance(ids, (int, long)):
1152 def write(self, cr, user, ids, vals, context=None):
1156 if self._columns[field]._classic_write:
1157 vals2[field] = vals[field]
1159 upd_todo.append(field)
1161 self.datas[id_new].update(vals2)
1162 self.datas[id_new]['internal.date_access'] = time.time()
1163 for field in upd_todo:
1164 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
1165 self._validate(cr, user, [id_new], context)
1166 wf_service = netsvc.LocalService("workflow")
1167 wf_service.trg_write(user, self._name, id_new, cr)
1168 self.vaccum(cr, user)
1171 def create(self, cr, user, vals, context=None):
1173 id_new = self.next_id
1175 for f in self._columns.keys():
1179 vals.update(self.default_get(cr, user, default, context))
1183 if self._columns[field]._classic_write:
1184 vals2[field] = vals[field]
1186 upd_todo.append(field)
1187 self.datas[id_new] = vals2
1188 self.datas[id_new]['internal.date_access'] = time.time()
1190 for field in upd_todo:
1191 self._columns[field].set_memory(cr, self, id_new, field, vals[field], user, context)
1192 self._validate(cr, user, [id_new], context)
1193 wf_service = netsvc.LocalService("workflow")
1194 wf_service.trg_create(user, self._name, id_new, cr)
1195 self.vaccum(cr, user)
1198 def default_get(self, cr, uid, fields_list, context=None):
1202 # get the default values for the inherited fields
1203 for f in fields_list:
1204 if f in self._defaults:
1205 value[f] = self._defaults[f](self, cr, uid, context)
1206 fld_def = ((f in self._columns) and self._columns[f]) \
1207 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1210 # get the default values set by the user and override the default
1211 # values defined in the object
1212 ir_values_obj = self.pool.get('ir.values')
1213 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1214 for id, field, field_value in res:
1215 if field in fields_list:
1216 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1217 if fld_def._type in ('many2one', 'one2one'):
1218 obj = self.pool.get(fld_def._obj)
1219 if not obj.search(cr, uid, [('id', '=', field_value)]):
1221 if fld_def._type in ('many2many'):
1222 obj = self.pool.get(fld_def._obj)
1224 for i in range(len(field_value)):
1225 if not obj.search(cr, uid, [('id', '=',
1228 field_value2.append(field_value[i])
1229 field_value = field_value2
1230 if fld_def._type in ('one2many'):
1231 obj = self.pool.get(fld_def._obj)
1233 for i in range(len(field_value)):
1234 field_value2.append({})
1235 for field2 in field_value[i]:
1236 if obj._columns[field2]._type in ('many2one', 'one2one'):
1237 obj2 = self.pool.get(obj._columns[field2]._obj)
1238 if not obj2.search(cr, uid,
1239 [('id', '=', field_value[i][field2])]):
1241 # TODO add test for many2many and one2many
1242 field_value2[i][field2] = field_value[i][field2]
1243 field_value = field_value2
1244 value[field] = field_value
1246 # get the default values from the context
1247 for key in context or {}:
1248 if key.startswith('default_'):
1249 value[key[8:]] = context[key]
1252 def search(self, cr, user, args, offset=0, limit=None, order=None,
1253 context=None, count=False):
1254 return self.datas.keys()
1256 def unlink(self, cr, uid, ids, context=None):
1258 if id in self.datas:
1261 cr.execute('delete from wkf_instance where res_type=%s and res_id in ('+','.join(map(str, ids))+')', (self._name, ))
1264 def perm_read(self, cr, user, ids, context=None, details=True):
1268 'create_uid': (user, 'Root'),
1269 'create_date': time.strftime('%Y-%m-%d %H:%M:%S'),
1271 'write_date': False,
1276 class orm(orm_template):
1278 _sql_constraints = []
1282 _protected = ['read','write','create','default_get','perm_read','unlink','fields_get','fields_view_get','search','name_get','distinct_field_get','name_search','copy','import_data','search_count']
1283 def _parent_store_compute(self, cr):
1284 logger = netsvc.Logger()
1285 logger.notifyChannel('init', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
1286 def browse_rec(root, pos=0):
1288 where = self._parent_name+'='+str(root)
1290 where = self._parent_name+' IS NULL'
1291 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
1293 childs = cr.fetchall()
1295 pos2 = browse_rec(id[0], pos2)
1296 cr.execute('update '+self._table+' set parent_left=%d, parent_right=%d where id=%d', (pos,pos2,root))
1301 def _auto_init(self, cr, context={}):
1302 store_compute = False
1303 logger = netsvc.Logger()
1305 self._field_create(cr, context=context)
1306 if not hasattr(self, "_auto") or self._auto:
1307 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname='%s'" % self._table)
1309 cr.execute("CREATE TABLE \"%s\" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITH OIDS" % self._table)
1312 if self._parent_store:
1313 cr.execute("""SELECT c.relname
1314 FROM pg_class c, pg_attribute a
1315 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
1316 """, (self._table, 'parent_left'))
1318 if 'parent_left' not in self._columns:
1319 logger.notifyChannel('init', netsvc.LOG_ERROR, 'create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)' % (self._table, ))
1320 if 'parent_right' not in self._columns:
1321 logger.notifyChannel('init', netsvc.LOG_ERROR, 'create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)' % (self._table, ))
1322 if self._columns[self._parent_name].ondelete<>'cascade':
1323 logger.notifyChannel('init', netsvc.LOG_ERROR, "the columns %s on object must be set as ondelete='cascasde'" % (self._name, self._parent_name))
1324 cr.execute("ALTER TABLE \"%s\" ADD COLUMN \"%s\" INTEGER" % (self._table, 'parent_left'))
1325 cr.execute("ALTER TABLE \"%s\" ADD COLUMN \"%s\" INTEGER" % (self._table, 'parent_right'))
1327 store_compute = True
1329 if self._log_access:
1331 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
1332 'create_date': 'TIMESTAMP',
1333 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
1334 'write_date': 'TIMESTAMP'
1340 FROM pg_class c, pg_attribute a
1341 WHERE c.relname='%s' AND a.attname='%s' AND c.oid=a.attrelid
1342 """ % (self._table, k))
1344 cr.execute("ALTER TABLE \"%s\" ADD COLUMN \"%s\" %s" %
1345 (self._table, k, logs[k]))
1348 # iterate on the database columns to drop the NOT NULL constraints
1349 # of fields which were required but have been removed
1351 "SELECT a.attname, a.attnotnull "\
1352 "FROM pg_class c, pg_attribute a "\
1353 "WHERE c.oid=a.attrelid AND c.relname='%s'" % self._table)
1354 db_columns = cr.dictfetchall()
1355 for column in db_columns:
1356 if column['attname'] not in ('id', 'oid', 'tableoid', 'ctid', 'xmin', 'xmax', 'cmin', 'cmax'):
1357 if column['attnotnull'] and column['attname'] not in self._columns:
1358 cr.execute("ALTER TABLE \"%s\" ALTER COLUMN \"%s\" DROP NOT NULL" % (self._table, column['attname']))
1360 # iterate on the "object columns"
1361 for k in self._columns:
1362 if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
1364 #raise _('Can not define a column %s. Reserved keyword !') % (k,)
1365 f = self._columns[k]
1367 if isinstance(f, fields.one2many):
1368 cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,))
1370 cr.execute("SELECT count(*) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id))
1371 res = cr.fetchone()[0]
1373 cr.execute("ALTER TABLE \"%s\" ADD FOREIGN KEY (%s) REFERENCES \"%s\" ON DELETE SET NULL" % (self._obj, f._fields_id, f._table))
1374 elif isinstance(f, fields.many2many):
1375 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (f._rel,))
1376 if not cr.dictfetchall():
1377 #FIXME: Remove this try/except
1379 ref = self.pool.get(f._obj)._table
1380 except AttributeError:
1381 ref = f._obj.replace('.', '_')
1382 cr.execute("CREATE TABLE \"%s\" (\"%s\" INTEGER NOT NULL REFERENCES \"%s\" ON DELETE CASCADE, \"%s\" INTEGER NOT NULL REFERENCES \"%s\" ON DELETE CASCADE) WITH OIDS"%(f._rel, f._id1, self._table, f._id2, ref))
1383 cr.execute("CREATE INDEX \"%s_%s_index\" ON \"%s\" (\"%s\")" % (f._rel, f._id1, f._rel, f._id1))
1384 cr.execute("CREATE INDEX \"%s_%s_index\" ON \"%s\" (\"%s\")" % (f._rel, f._id2, f._rel, f._id2))
1387 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size FROM pg_class c,pg_attribute a,pg_type t WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid AND a.atttypid=t.oid", (self._table, k))
1388 res = cr.dictfetchall()
1390 if not isinstance(f, fields.function) or f.store:
1392 # add the missing field
1393 cr.execute("ALTER TABLE \"%s\" ADD COLUMN \"%s\" %s" % (self._table, k, get_pg_type(f)[1]))
1396 if not create and k in self._defaults:
1397 default = self._defaults[k](self, cr, 1, {})
1399 cr.execute("UPDATE \"%s\" SET \"%s\"=NULL" % (self._table, k))
1401 cr.execute("UPDATE \"%s\" SET \"%s\"='%s'" % (self._table, k, default))
1402 if isinstance(f, fields.function):
1403 cr.execute('select id from '+self._table)
1404 ids_lst = map(lambda x: x[0], cr.fetchall())
1407 ids_lst = ids_lst[40:]
1408 res = f.get(cr, self, iids, k, 1, {})
1409 for key,val in res.items():
1412 if (val<>False) or (type(val)<>bool):
1413 cr.execute("UPDATE \"%s\" SET \"%s\"='%s' where id=%d"% (self._table, k, val, key))
1415 # cr.execute("UPDATE \"%s\" SET \"%s\"=NULL where id=%d"% (self._table, k, key))
1417 # and add constraints if needed
1418 if isinstance(f, fields.many2one):
1419 #FIXME: Remove this try/except
1421 ref = self.pool.get(f._obj)._table
1422 except AttributeError:
1423 ref = f._obj.replace('.', '_')
1424 # ir_actions is inherited so foreign key doesn't work on it
1425 if ref != 'ir_actions':
1426 cr.execute("ALTER TABLE \"%s\" ADD FOREIGN KEY (\"%s\") REFERENCES \"%s\" ON DELETE %s" % (self._table, k, ref, f.ondelete))
1428 cr.execute("CREATE INDEX \"%s_%s_index\" ON \"%s\" (\"%s\")" % (self._table, k, self._table, k))
1432 cr.execute("ALTER TABLE \"%s\" ALTER COLUMN \"%s\" SET NOT NULL" % (self._table, k))
1434 logger.notifyChannel('init', netsvc.LOG_WARNING, 'WARNING: unable to set column %s of table %s not null !\nTry to re-run: openerp-server.py --update=module\nIf it doesn\'t work, update records and execute manually:\nALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % (k, self._table, self._table, k))
1438 f_pg_type = f_pg_def['typname']
1439 f_pg_size = f_pg_def['size']
1440 f_pg_notnull = f_pg_def['attnotnull']
1441 if isinstance(f, fields.function) and not f.store:
1442 logger.notifyChannel('init', netsvc.LOG_WARNING, 'column %s (%s) in table %s was converted to a function !\nYou should remove this column from your database.' % (k, f.string, self._table))
1445 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
1448 if f_pg_type != f_obj_type:
1449 logger.notifyChannel('init', netsvc.LOG_WARNING, "column '%s' in table '%s' has changed type (DB = %s, def = %s) !" % (k, self._table, f_pg_type, f._type))
1450 if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size != f.size:
1451 # columns with the name 'type' cannot be changed for an unknown reason?!
1453 if f_pg_size > f.size:
1454 logger.notifyChannel('init', netsvc.LOG_WARNING, "column '%s' in table '%s' has changed size (DB = %d, def = %d), DB size will be kept !" % (k, self._table, f_pg_size, f.size))
1455 # If actual DB size is < than new
1456 # We update varchar size, otherwise, we keep DB size
1457 # to avoid truncated string...
1458 if f_pg_size < f.size:
1459 cr.execute("ALTER TABLE \"%s\" RENAME COLUMN \"%s\" TO temp_change_size" % (self._table, k))
1460 cr.execute("ALTER TABLE \"%s\" ADD COLUMN \"%s\" VARCHAR(%d)" % (self._table, k, f.size))
1461 cr.execute("UPDATE \"%s\" SET \"%s\"=temp_change_size::VARCHAR(%d)" % (self._table, k, f.size))
1462 cr.execute("ALTER TABLE \"%s\" DROP COLUMN temp_change_size" % (self._table,))
1464 if f_pg_type == 'date' and f._type == 'datetime':
1465 cr.execute("ALTER TABLE \"%s\" RENAME COLUMN \"%s\" TO temp_change_type" % (self._table, k))
1466 cr.execute("ALTER TABLE \"%s\" ADD COLUMN \"%s\" TIMESTAMP " % (self._table, k))
1467 cr.execute("UPDATE \"%s\" SET \"%s\"=temp_change_type::TIMESTAMP" % (self._table, k))
1468 cr.execute("ALTER TABLE \"%s\" DROP COLUMN temp_change_type" % (self._table,))
1470 # if the field is required and hasn't got a NOT NULL constraint
1471 if f.required and f_pg_notnull == 0:
1472 # set the field to the default value if any
1473 if k in self._defaults:
1474 default = self._defaults[k](self, cr, 1, {})
1475 if not (default is False):
1476 cr.execute("UPDATE \"%s\" SET \"%s\"='%s' WHERE %s is NULL" % (self._table, k, default, k))
1478 # add the NOT NULL constraint
1480 cr.execute("ALTER TABLE \"%s\" ALTER COLUMN \"%s\" SET NOT NULL" % (self._table, k))
1483 logger.notifyChannel('init', netsvc.LOG_WARNING, 'unable to set a NOT NULL constraint on column %s of the %s table !\nIf you want to have it, you should update the records and execute manually:\nALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % (k, self._table, self._table, k))
1485 elif not f.required and f_pg_notnull == 1:
1486 cr.execute("ALTER TABLE \"%s\" ALTER COLUMN \"%s\" DROP NOT NULL" % (self._table, k))
1488 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = '%s_%s_index' and tablename = '%s'" % (self._table, k, self._table))
1489 res = cr.dictfetchall()
1490 if not res and f.select:
1491 cr.execute("CREATE INDEX \"%s_%s_index\" ON \"%s\" (\"%s\")" % (self._table, k, self._table, k))
1493 if res and not f.select:
1494 cr.execute("DROP INDEX \"%s_%s_index\"" % (self._table, k))
1496 if isinstance(f, fields.many2one):
1497 ref = self.pool.get(f._obj)._table
1498 if ref != 'ir_actions':
1499 cr.execute('SELECT confdeltype, conname FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, ' \
1500 'pg_attribute as att1, pg_attribute as att2 ' \
1501 'WHERE con.conrelid = cl1.oid ' \
1502 'AND cl1.relname = %s ' \
1503 'AND con.confrelid = cl2.oid ' \
1504 'AND cl2.relname = %s ' \
1505 'AND array_lower(con.conkey, 1) = 1 ' \
1506 'AND con.conkey[1] = att1.attnum ' \
1507 'AND att1.attrelid = cl1.oid ' \
1508 'AND att1.attname = %s ' \
1509 'AND array_lower(con.confkey, 1) = 1 ' \
1510 'AND con.confkey[1] = att2.attnum ' \
1511 'AND att2.attrelid = cl2.oid ' \
1512 'AND att2.attname = %s ' \
1513 'AND con.contype = \'f\'', (self._table, ref, k, 'id'))
1514 res = cr.dictfetchall()
1523 if res[0]['confdeltype'] != confdeltype.get(f.ondelete.upper(), 'a'):
1524 cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res[0]['conname'] + '"')
1525 cr.execute('ALTER TABLE "' + self._table + '" ADD FOREIGN KEY ("' + k + '") REFERENCES "' + ref + '" ON DELETE ' + f.ondelete)
1530 cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname='%s'" % self._table)
1531 create = not bool(cr.fetchone())
1533 for (key, con, _) in self._sql_constraints:
1534 cr.execute("SELECT conname FROM pg_constraint where conname='%s_%s'" % (self._table, key))
1535 if not cr.dictfetchall():
1537 cr.execute('alter table \"%s\" add constraint \"%s_%s\" %s' % (self._table, self._table, key, con,))
1540 logger.notifyChannel('init', netsvc.LOG_WARNING, 'unable to add \'%s\' constraint on table %s !\n If you want to have it, you should update the records and execute manually:\nALTER table %s ADD CONSTRAINT %s_%s %s' % (con, self._table, self._table, self._table, key, con,))
1543 if hasattr(self, "_sql"):
1544 for line in self._sql.split(';'):
1545 line2 = line.replace('\n', '').strip()
1550 self._parent_store_compute(cr)
1552 def __init__(self, cr):
1553 super(orm, self).__init__(cr)
1554 self._columns = self._columns.copy()
1555 f = filter(lambda a: isinstance(self._columns[a], fields.function) and self._columns[a].store, self._columns)
1560 for store_field in f:
1561 if not self._columns[store_field].store == True:
1562 dict_store = self._columns[store_field].store
1563 key = dict_store.keys()
1566 tuple_store = self._name, store_field, self._columns[store_field]._fnct.__name__, tuple(dict_store[i][0]), dict_store[i][1], i
1567 list_data.append(tuple_store)
1568 #tuple_store=self._name,store_field,self._columns[store_field]._fnct.__name__,tuple(dict_store[key[0]][0]),dict_store[key[0]][1]
1571 if l[5] in self.pool._store_function.keys():
1572 self.pool._store_function[l[5]].append(l)
1573 temp_list = list(set(self.pool._store_function[l[5]]))
1574 self.pool._store_function[l[5]] = temp_list
1576 list_store.append(l)
1577 self.pool._store_function[l[5]] = list_store
1579 for (key, _, msg) in self._sql_constraints:
1580 self.pool._sql_error[self._table+'_'+key] = msg
1582 # Load manual fields
1584 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
1586 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
1587 for field in cr.dictfetchall():
1588 if field['name'] in self._columns:
1591 'string': field['field_description'],
1592 'required': bool(field['required']),
1593 'readonly': bool(field['readonly']),
1594 'domain': field['domain'] or None,
1595 'size': field['size'],
1596 'ondelete': field['on_delete'],
1597 'translate': (field['translate']),
1598 #'select': int(field['select_level'])
1600 if field['ttype'] == 'selection':
1601 self._columns[field['name']] = getattr(fields, field['ttype'])(eval(field['selection']), **attrs)
1602 elif field['ttype'] == 'many2one':
1603 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], **attrs)
1604 elif field['ttype'] == 'one2many':
1605 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], field['relation_field'], **attrs)
1606 elif field['ttype'] == 'many2many':
1608 _rel1 = field['relation'].replace('.', '_')
1609 _rel2 = field['model'].replace('.', '_')
1610 _rel_name = 'x_%s_%s_%s_rel' %(_rel1, _rel2, random.randint(0, 10000))
1611 self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1613 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1615 self._inherits_reload()
1616 if not self._sequence:
1617 self._sequence = self._table+'_id_seq'
1618 for k in self._defaults:
1619 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1620 for f in self._columns:
1621 self._columns[f].restart()
1623 def default_get(self, cr, uid, fields_list, context=None):
1627 # get the default values for the inherited fields
1628 for t in self._inherits.keys():
1629 value.update(self.pool.get(t).default_get(cr, uid, fields_list,
1632 # get the default values defined in the object
1633 for f in fields_list:
1634 if f in self._defaults:
1635 value[f] = self._defaults[f](self, cr, uid, context)
1636 fld_def = ((f in self._columns) and self._columns[f]) \
1637 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1639 if isinstance(fld_def, fields.property):
1640 property_obj = self.pool.get('ir.property')
1641 definition_id = fld_def._field_get(cr, uid, self._name, f)
1642 nid = property_obj.search(cr, uid, [('fields_id', '=',
1643 definition_id), ('res_id', '=', False)])
1645 prop_value = property_obj.browse(cr, uid, nid[0],
1646 context=context).value
1647 value[f] = (prop_value and int(prop_value.split(',')[1])) \
1650 # get the default values set by the user and override the default
1651 # values defined in the object
1652 ir_values_obj = self.pool.get('ir.values')
1653 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1654 for id, field, field_value in res:
1655 if field in fields_list:
1656 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1657 if fld_def._type in ('many2one', 'one2one'):
1658 obj = self.pool.get(fld_def._obj)
1659 if not obj.search(cr, uid, [('id', '=', field_value)]):
1661 if fld_def._type in ('many2many'):
1662 obj = self.pool.get(fld_def._obj)
1664 for i in range(len(field_value)):
1665 if not obj.search(cr, uid, [('id', '=',
1668 field_value2.append(field_value[i])
1669 field_value = field_value2
1670 if fld_def._type in ('one2many'):
1671 obj = self.pool.get(fld_def._obj)
1673 for i in range(len(field_value)):
1674 field_value2.append({})
1675 for field2 in field_value[i]:
1676 if obj._columns[field2]._type in ('many2one', 'one2one'):
1677 obj2 = self.pool.get(obj._columns[field2]._obj)
1678 if not obj2.search(cr, uid,
1679 [('id', '=', field_value[i][field2])]):
1681 # TODO add test for many2many and one2many
1682 field_value2[i][field2] = field_value[i][field2]
1683 field_value = field_value2
1684 value[field] = field_value
1685 for key in context or {}:
1686 if key.startswith('default_'):
1687 value[key[8:]] = context[key]
1692 # Update objects that uses this one to update their _inherits fields
1694 def _inherits_reload_src(self):
1695 for obj in self.pool.obj_pool.values():
1696 if self._name in obj._inherits:
1697 obj._inherits_reload()
1699 def _inherits_reload(self):
1701 for table in self._inherits:
1702 res.update(self.pool.get(table)._inherit_fields)
1703 for col in self.pool.get(table)._columns.keys():
1704 res[col] = (table, self._inherits[table], self.pool.get(table)._columns[col])
1705 for col in self.pool.get(table)._inherit_fields.keys():
1706 res[col] = (table, self._inherits[table], self.pool.get(table)._inherit_fields[col][2])
1707 self._inherit_fields = res
1708 self._inherits_reload_src()
1710 def fields_get(self, cr, user, fields=None, context=None):
1711 read_access = self.pool.get('ir.model.access').check(cr, user, self._name, 'write', raise_exception=False)
1712 return super(orm, self).fields_get(cr, user, fields, context, read_access)
1714 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
1717 self.pool.get('ir.model.access').check(cr, user, self._name, 'read')
1719 fields = self._columns.keys() + self._inherit_fields.keys()
1721 if isinstance(ids, (int, long)):
1723 result = self._read_flat(cr, user, select, fields, context, load)
1725 for key, v in r.items():
1728 if isinstance(ids, (int, long)):
1732 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
1738 if fields_to_read == None:
1739 fields_to_read = self._columns.keys()
1741 # construct a clause for the rules :
1742 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
1744 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
1745 fields_pre = filter(lambda x: x in self._columns and getattr(self._columns[x], '_classic_write'), fields_to_read) + self._inherits.values()
1749 def convert_field(f):
1750 if f in ('create_date', 'write_date'):
1751 return "date_trunc('second', %s) as %s" % (f, f)
1752 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
1753 return "length(%s) as %s" % (f,f)
1754 return '"%s"' % (f,)
1755 #fields_pre2 = map(lambda x: (x in ('create_date', 'write_date')) and ('date_trunc(\'second\', '+x+') as '+x) or '"'+x+'"', fields_pre)
1756 fields_pre2 = map(convert_field, fields_pre)
1757 for i in range(0, len(ids), cr.IN_MAX):
1758 sub_ids = ids[i:i+cr.IN_MAX]
1760 cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) AND %s ORDER BY %s' % \
1761 (','.join(fields_pre2 + ['id']), self._table,
1762 ','.join([str(x) for x in sub_ids]), d1,
1764 if not cr.rowcount == len({}.fromkeys(sub_ids)):
1765 raise except_orm(_('AccessError'),
1766 _('You try to bypass an access rule (Document type: %s).') % self._description)
1768 cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) ORDER BY %s' % \
1769 (','.join(fields_pre2 + ['id']), self._table,
1770 ','.join([str(x) for x in sub_ids]),
1772 res.extend(cr.dictfetchall())
1774 res = map(lambda x: {'id': x}, ids)
1776 for f in fields_pre:
1777 if self._columns[f].translate:
1778 ids = map(lambda x: x['id'], res)
1779 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
1781 r[f] = res_trans.get(r['id'], False) or r[f]
1783 for table in self._inherits:
1784 col = self._inherits[table]
1785 cols = intersect(self._inherit_fields.keys(), fields_to_read)
1788 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
1796 record.update(res3[record[col]])
1797 if col not in fields_to_read:
1800 # all fields which need to be post-processed by a simple function (symbol_get)
1801 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
1803 # maybe it would be faster to iterate on the fields then on res, so that we wouldn't need
1804 # to get the _symbol_get in each occurence
1806 for f in fields_post:
1807 r[f] = self.columns[f]._symbol_get(r[f])
1808 ids = map(lambda x: x['id'], res)
1810 # all non inherited fields for which the attribute whose name is in load is False
1811 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
1813 # Compute POST fields
1815 for f in fields_post:
1816 todo.setdefault(self._columns[f]._multi, [])
1817 todo[self._columns[f]._multi].append(f)
1818 for key,val in todo.items():
1820 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
1823 record[pos] = res2[record['id']][pos]
1826 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
1828 record[f] = res2[record['id']]
1830 #for f in fields_post:
1831 # # get the value of that field for all records/ids
1832 # res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
1833 # for record in res:
1834 # record[f] = res2[record['id']]
1838 for field in vals.copy():
1840 if field in self._columns:
1841 fobj = self._columns[field]
1848 for group in groups:
1849 module = group.split(".")[0]
1850 grp = group.split(".")[1]
1851 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
1852 (grp, module, 'res.groups', user))
1853 readonly = cr.fetchall()
1854 if readonly[0][0] >= 1:
1857 elif readonly[0][0] == 0:
1863 if type(vals[field]) == type([]):
1865 elif type(vals[field]) == type(0.0):
1867 elif type(vals[field]) == type(''):
1868 vals[field] = '=No Permission='
1873 def perm_read(self, cr, user, ids, context=None, details=True):
1879 if self._log_access:
1880 fields = ', u.create_uid, u.create_date, u.write_uid, u.write_date'
1881 if isinstance(ids, (int, long)):
1884 ids_str = string.join(map(lambda x: str(x), ids), ',')
1885 cr.execute('select u.id'+fields+' from "'+self._table+'" u where u.id in ('+ids_str+')')
1886 res = cr.dictfetchall()
1889 r[key] = r[key] or False
1890 if key in ('write_uid', 'create_uid', 'uid') and details:
1892 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
1893 if isinstance(ids, (int, long)):
1897 def unlink(self, cr, uid, ids, context=None):
1902 if isinstance(ids, (int, long)):
1906 if self._name in self.pool._store_function.keys():
1907 list_store = self.pool._store_function[self._name]
1910 for tuple_fn in list_store:
1912 id_change.append(self._store_get_ids(cr, uid, id, tuple_fn, context)[0])
1913 fn_data = id_change, tuple_fn
1914 fn_list.append(fn_data)
1916 delta = context.get('read_delta', False)
1917 if delta and self._log_access:
1918 for i in range(0, len(ids), cr.IN_MAX):
1919 sub_ids = ids[i:i+cr.IN_MAX]
1920 cr.execute("select (now() - min(write_date)) <= '%s'::interval " \
1921 "from \"%s\" where id in (%s)" %
1922 (delta, self._table, ",".join(map(str, sub_ids))))
1925 raise except_orm(_('ConcurrencyException'),
1926 _('This record was modified in the meanwhile'))
1928 self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink')
1930 wf_service = netsvc.LocalService("workflow")
1932 wf_service.trg_delete(uid, self._name, id, cr)
1934 #cr.execute('select * from '+self._table+' where id in ('+str_d+')', ids)
1935 #res = cr.dictfetchall()
1936 #for key in self._inherits:
1937 # ids2 = [x[self._inherits[key]] for x in res]
1938 # self.pool.get(key).unlink(cr, uid, ids2)
1940 d1, d2 = self.pool.get('ir.rule').domain_get(cr, uid, self._name)
1944 for i in range(0, len(ids), cr.IN_MAX):
1945 sub_ids = ids[i:i+cr.IN_MAX]
1946 str_d = string.join(('%d',)*len(sub_ids), ',')
1948 cr.execute('SELECT id FROM "'+self._table+'" ' \
1949 'WHERE id IN ('+str_d+')'+d1, sub_ids+d2)
1950 if not cr.rowcount == len({}.fromkeys(ids)):
1951 raise except_orm(_('AccessError'),
1952 _('You try to bypass an access rule (Document type: %s).') % \
1956 cr.execute('delete from "'+self._table+'" ' \
1957 'where id in ('+str_d+')'+d1, sub_ids+d2)
1959 cr.execute('delete from "'+self._table+'" ' \
1960 'where id in ('+str_d+')', sub_ids)
1962 for ids, tuple_fn in fn_list:
1963 self._store_set_values(cr, uid, ids, tuple_fn, id_change, context)
1970 def write(self, cr, user, ids, vals, context=None):
1972 for field in vals.copy():
1974 if field in self._columns:
1975 fobj = self._columns[field]
1977 fobj = self._inherit_fields[field][2]
1984 for group in groups:
1985 module = group.split(".")[0]
1986 grp = group.split(".")[1]
1987 cr.execute("select count(*) from res_groups_users_rel where gid in (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
1988 (grp, module, 'res.groups', user))
1989 readonly = cr.fetchall()
1990 if readonly[0][0] >= 1:
1993 elif readonly[0][0] == 0:
2005 if isinstance(ids, (int, long)):
2007 delta = context.get('read_delta', False)
2008 if delta and self._log_access:
2009 for i in range(0, len(ids), cr.IN_MAX):
2010 sub_ids = ids[i:i+cr.IN_MAX]
2011 cr.execute("select (now() - min(write_date)) <= '%s'::interval " \
2012 "from %s where id in (%s)" %
2013 (delta, self._table, ",".join(map(str, sub_ids))))
2017 if field in self._columns and self._columns[field]._classic_write:
2018 raise except_orm(_('ConcurrencyException'),
2019 _('This record was modified in the meanwhile'))
2021 self.pool.get('ir.model.access').check(cr, user, self._name, 'write')
2023 #for v in self._inherits.values():
2024 # assert v not in vals, (v, vals)
2030 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
2032 if field in self._columns:
2033 if self._columns[field]._classic_write:
2034 if (not totranslate) or not self._columns[field].translate:
2035 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
2036 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
2037 direct.append(field)
2039 upd_todo.append(field)
2041 updend.append(field)
2042 if field in self._columns \
2043 and hasattr(self._columns[field], 'selection') \
2045 if self._columns[field]._type == 'reference':
2046 val = vals[field].split(',')[0]
2049 if isinstance(self._columns[field].selection, (tuple, list)):
2050 if val not in dict(self._columns[field].selection):
2051 raise except_orm(_('ValidateError'),
2052 _('The value "%s" for the field "%s" is not in the selection') \
2053 % (vals[field], field))
2055 if val not in dict(self._columns[field].selection(
2056 self, cr, user, context=context)):
2057 raise except_orm(_('ValidateError'),
2058 _('The value "%s" for the field "%s" is not in the selection') \
2059 % (vals[field], field))
2061 if self._log_access:
2062 upd0.append('write_uid=%d')
2063 upd0.append('write_date=now()')
2068 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
2072 for i in range(0, len(ids), cr.IN_MAX):
2073 sub_ids = ids[i:i+cr.IN_MAX]
2074 ids_str = string.join(map(str, sub_ids), ',')
2076 cr.execute('SELECT id FROM "'+self._table+'" ' \
2077 'WHERE id IN ('+ids_str+')'+d1, d2)
2078 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2079 raise except_orm(_('AccessError'),
2080 _('You try to bypass an access rule (Document type: %s).') % \
2083 cr.execute('SELECT id FROM "'+self._table+'" WHERE id IN ('+ids_str+')')
2084 if not cr.rowcount == len({}.fromkeys(sub_ids)):
2085 raise except_orm(_('AccessError'),
2086 _('You try to write on an record that doesn\'t exist ' \
2087 '(Document type: %s).') % self._description)
2089 cr.execute('update "'+self._table+'" set '+string.join(upd0, ',')+' ' \
2090 'where id in ('+ids_str+')'+d1, upd1+ d2)
2092 cr.execute('update "'+self._table+'" set '+string.join(upd0, ',')+' ' \
2093 'where id in ('+ids_str+')', upd1)
2097 if self._columns[f].translate:
2098 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f])
2100 # call the 'set' method of fields which are not classic_write
2101 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
2102 for field in upd_todo:
2104 self._columns[field].set(cr, self, id, field, vals[field], user, context=context)
2106 for table in self._inherits:
2107 col = self._inherits[table]
2109 for i in range(0, len(ids), cr.IN_MAX):
2110 sub_ids = ids[i:i+cr.IN_MAX]
2111 ids_str = string.join(map(str, sub_ids), ',')
2112 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
2113 'where id in ('+ids_str+')', upd1)
2114 nids.extend([x[0] for x in cr.fetchall()])
2118 if self._inherit_fields[val][0] == table:
2120 self.pool.get(table).write(cr, user, nids, v, context)
2122 self._validate(cr, user, ids, context)
2123 # TODO: use _order to set dest at the right position and not first node of parent
2124 if self._parent_store and (self._parent_name in vals):
2126 self.pool._init_parent[self._name]=True
2128 cr.execute('select parent_left,parent_right from '+self._table+' where id=%d', (vals[self._parent_name],))
2133 cr.execute('select max(parent_right),max(parent_right)+1 from '+self._table)
2134 pleft,pright = cr.fetchone()
2135 cr.execute('select parent_left,parent_right,id from '+self._table+' where id in ('+','.join(map(lambda x:'%d',ids))+')', ids)
2137 for cleft,cright,cid in cr.fetchall():
2139 treeshift = pleft - cleft + 1
2141 rightbound = cleft-1
2142 cwidth = cright-cleft+1
2146 treeshift = pleft - cright
2147 leftbound = cright + 1
2149 cwidth = cleft-cright-1
2152 cr.execute('UPDATE '+self._table+'''
2155 WHEN parent_left BETWEEN %d AND %d THEN parent_left + %d
2156 WHEN parent_left BETWEEN %d AND %d THEN parent_left + %d
2160 WHEN parent_right BETWEEN %d AND %d THEN parent_right + %d
2161 WHEN parent_right BETWEEN %d AND %d THEN parent_right + %d
2165 parent_left<%d OR parent_right>%d;
2166 ''', (leftbound,rightbound,cwidth,cleft,cright,treeshift,leftbound,rightbound,
2167 cwidth,cleft,cright,treeshift,leftrange,rightrange))
2169 if 'read_delta' in context:
2170 del context['read_delta']
2172 wf_service = netsvc.LocalService("workflow")
2174 wf_service.trg_write(user, self._name, id, cr)
2175 self._update_function_stored(cr, user, ids, context=context)
2177 if self._name in self.pool._store_function.keys():
2178 list_store = self.pool._store_function[self._name]
2179 for tuple_fn in list_store:
2183 for field in tuple_fn[3]:
2184 if field in vals.keys():
2188 id_change = self._store_get_ids(cr, user, ids[0], tuple_fn, context)
2189 self._store_set_values(cr, user, ids[0], tuple_fn, id_change, context)
2194 # TODO: Should set perm to user.xxx
2196 def create(self, cr, user, vals, context=None):
2197 """ create(cr, user, vals, context) -> int
2198 cr = database cursor
2200 vals = dictionary of the form {'field_name':field_value, ...}
2204 self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
2209 for (t, c) in self._inherits.items():
2211 avoid_table.append(t)
2212 for f in self._columns.keys(): # + self._inherit_fields.keys():
2215 for f in self._inherit_fields.keys():
2216 if (not f in vals) and (not self._inherit_fields[f][0] in avoid_table):
2220 vals.update(self.default_get(cr, user, default, context))
2223 for v in self._inherits:
2224 if self._inherits[v] not in vals:
2227 (upd0, upd1, upd2) = ('', '', [])
2230 for v in vals.keys():
2231 if v in self._inherit_fields:
2232 (table, col, col_detail) = self._inherit_fields[v]
2233 tocreate[table][v] = vals[v]
2236 cr.execute("SELECT nextval('"+self._sequence+"')")
2237 id_new = cr.fetchone()[0]
2238 for table in tocreate:
2239 id = self.pool.get(table).create(cr, user, tocreate[table])
2240 upd0 += ','+self._inherits[table]
2245 if self._columns[field]._classic_write:
2246 upd0 = upd0 + ',"' + field + '"'
2247 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
2248 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
2250 upd_todo.append(field)
2251 if field in self._columns \
2252 and hasattr(self._columns[field], 'selection') \
2254 if self._columns[field]._type == 'reference':
2255 val = vals[field].split(',')[0]
2258 if isinstance(self._columns[field].selection, (tuple, list)):
2259 if val not in dict(self._columns[field].selection):
2260 raise except_orm(_('ValidateError'),
2261 _('The value "%s" for the field "%s" is not in the selection') \
2262 % (vals[field], field))
2264 if val not in dict(self._columns[field].selection(
2265 self, cr, user, context=context)):
2266 raise except_orm(_('ValidateError'),
2267 _('The value "%s" for the field "%s" is not in the selection') \
2268 % (vals[field], field))
2269 if self._log_access:
2270 upd0 += ',create_uid,create_date'
2273 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
2274 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
2275 for field in upd_todo:
2276 self._columns[field].set(cr, self, id_new, field, vals[field], user, context)
2278 self._validate(cr, user, [id_new], context)
2280 if self._parent_store:
2282 self.pool._init_parent[self._name]=True
2284 parent = vals.get(self._parent_name, False)
2286 cr.execute('select parent_left from '+self._table+' where id=%d', (parent,))
2287 pleft = cr.fetchone()[0]
2289 cr.execute('select max(parent_right) from '+self._table)
2290 pleft = cr.fetchone()[0] or 0
2291 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%d', (pleft,))
2292 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%d', (pleft,))
2293 cr.execute('update '+self._table+' set parent_left=%d,parent_right=%d where id=%d', (pleft+1,pleft+2,id_new))
2295 wf_service = netsvc.LocalService("workflow")
2296 wf_service.trg_create(user, self._name, id_new, cr)
2297 self._update_function_stored(cr, user, [id_new], context=context)
2298 if self._name in self.pool._store_function.keys():
2299 list_store = self.pool._store_function[self._name]
2300 for tuple_fn in list_store:
2301 id_change = self._store_get_ids(cr, user, id_new, tuple_fn, context)
2302 self._store_set_values(cr, user, id_new, tuple_fn, id_change, context)
2306 def _store_get_ids(self, cr, uid, ids, tuple_fn, context):
2307 parent_id = getattr(self.pool.get(tuple_fn[0]), tuple_fn[4].func_name)(cr, uid, [ids])
2310 def _store_set_values(self, cr, uid, ids, tuple_fn, parent_id, context):
2314 vals_tot = getattr(self.pool.get(table), tuple_fn[2])(cr, uid, parent_id, name, args, context)
2316 for id in vals_tot.keys():
2317 write_dict[name] = vals_tot[id]
2318 self.pool.get(table).write(cr, uid, [id], write_dict)
2321 def _update_function_stored(self, cr, user, ids, context=None):
2324 f = filter(lambda a: isinstance(self._columns[a], fields.function) \
2325 and self._columns[a].store, self._columns)
2327 result = self.read(cr, user, ids, fields=f, context=context)
2335 if self._columns[field]._type in ('many2one', 'one2one'):
2337 value = res[field][0]
2340 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
2341 upd1.append(self._columns[field]._symbol_set[1](value))
2342 upd1.append(res['id'])
2343 cr.execute('update "' + self._table + '" set ' + \
2344 string.join(upd0, ',') + ' where id = %d', upd1)
2350 def perm_write(self, cr, user, ids, fields, context=None):
2351 raise _('This method does not exist anymore')
2353 # TODO: ameliorer avec NULL
2354 def _where_calc(self, cr, user, args, active_test=True, context=None):
2358 # if the object has a field named 'active', filter out all inactive
2359 # records unless they were explicitely asked for
2360 if 'active' in self._columns and (active_test and context.get('active_test', True)):
2362 args.insert(0, ('active', '=', 1))
2364 args = [('active', '=', 1)]
2368 e = expression.expression(args)
2369 e.parse(cr, user, self, context)
2370 tables = e.get_tables()
2371 qu1, qu2 = e.to_sql()
2372 qu1 = qu1 and [qu1] or []
2374 qu1, qu2, tables = [], [], ['"%s"' % self._table]
2376 return (qu1, qu2, tables)
2378 def _check_qorder(self, word):
2379 if not regex_order.match(word):
2380 raise except_orm(_('AccessError'), _('Bad query.'))
2383 def search(self, cr, user, args, offset=0, limit=None, order=None,
2384 context=None, count=False):
2387 # compute the where, order by, limit and offset clauses
2388 (qu1, qu2, tables) = self._where_calc(cr, user, args, context=context)
2391 qu1 = ' where '+string.join(qu1, ' and ')
2396 self._check_qorder(order)
2397 order_by = order or self._order
2399 limit_str = limit and ' limit %d' % limit or ''
2400 offset_str = offset and ' offset %d' % offset or ''
2403 # construct a clause for the rules :
2404 d1, d2 = self.pool.get('ir.rule').domain_get(cr, user, self._name)
2406 qu1 = qu1 and qu1+' and '+d1 or ' where '+d1
2410 cr.execute('select count(%s.id) from ' % self._table +
2411 ','.join(tables) +qu1 + limit_str + offset_str, qu2)
2414 # execute the "main" query to fetch the ids we were searching for
2415 cr.execute('select %s.id from ' % self._table + ','.join(tables) +qu1+' order by '+order_by+limit_str+offset_str, qu2)
2417 return [x[0] for x in res]
2419 # returns the different values ever entered for one field
2420 # this is used, for example, in the client when the user hits enter on
2422 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
2425 if field in self._inherit_fields:
2426 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
2428 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
2430 def name_get(self, cr, user, ids, context=None):
2435 if isinstance(ids, (int, long)):
2437 return [(r['id'], str(r[self._rec_name])) for r in self.read(cr, user, ids,
2438 [self._rec_name], context, load='_classic_write')]
2440 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=None):
2447 args += [(self._rec_name, operator, name)]
2448 ids = self.search(cr, user, args, limit=limit, context=context)
2449 res = self.name_get(cr, user, ids, context)
2452 def copy(self, cr, uid, id, default=None, context=None):
2457 if 'state' not in default:
2458 if 'state' in self._defaults:
2459 default['state'] = self._defaults['state'](self, cr, uid, context)
2460 data = self.read(cr, uid, [id], context=context)[0]
2461 fields = self.fields_get(cr, uid)
2463 ftype = fields[f]['type']
2465 if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
2469 data[f] = default[f]
2470 elif ftype == 'function':
2472 elif ftype == 'many2one':
2474 data[f] = data[f] and data[f][0]
2477 elif ftype in ('one2many', 'one2one'):
2479 rel = self.pool.get(fields[f]['relation'])
2480 for rel_id in data[f]:
2481 # the lines are first duplicated using the wrong (old)
2482 # parent but then are reassigned to the correct one thanks
2484 res.append((4, rel.copy(cr, uid, rel_id, context=context)))
2486 elif ftype == 'many2many':
2487 data[f] = [(6, 0, data[f])]
2489 trans_obj = self.pool.get('ir.translation')
2494 if f in self._columns and self._columns[f].translate:
2495 trans_name=self._name+","+f
2496 elif f in self._inherit_fields and self._inherit_fields[f][2].translate:
2497 trans_name=self._inherit_fields[f][0]+","+f
2502 trans_ids = trans_obj.search(cr, uid, [
2503 ('name', '=', trans_name),
2504 ('res_id','=',data['id'])
2507 trans_data.extend(trans_obj.read(cr,uid,trans_ids,context=context))
2511 for v in self._inherits:
2512 del data[self._inherits[v]]
2514 new_id=self.create(cr, uid, data)
2516 for record in trans_data:
2518 record['res_id']=new_id
2519 trans_obj.create(cr,uid,record)
2523 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2528 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
2530 fields = self._columns.keys() + self._inherit_fields.keys()
2532 res[lang] = {'code': lang}
2534 if f in self._columns:
2535 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2537 res[lang][f] = res_trans
2539 res[lang][f] = self._columns[f].string
2540 for table in self._inherits:
2541 cols = intersect(self._inherit_fields.keys(), fields)
2542 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2545 res[lang] = {'code': lang}
2546 for f in res2[lang]:
2547 res[lang][f] = res2[lang][f]
2550 def write_string(self, cr, uid, id, langs, vals, context=None):
2553 self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
2556 if field in self._columns:
2557 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field])
2558 for table in self._inherits:
2559 cols = intersect(self._inherit_fields.keys(), vals)
2561 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2564 def check_recursion(self, cr, uid, ids, parent=None):
2566 parent = self._parent_name
2568 while len(ids_parent):
2570 for i in range(0, len(ids), cr.IN_MAX):
2571 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
2572 cr.execute('SELECT distinct "'+parent+'"'+
2573 ' FROM "'+self._table+'" ' \
2574 'WHERE id in ('+','.join(map(str, sub_ids_parent))+')')
2575 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
2576 ids_parent = ids_parent2
2577 for i in ids_parent:
2583 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: