return False
# if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
- if col._classic_write:
+ if col._prefetch:
# gen the list of "local" (ie not inherited) fields which are classic or many2one
ffields = filter(lambda x: x[1]._classic_write, self._table._columns.items())
# gen the list of inherited fields
t = eval('fields.'+(f._type))
f_type = (type_dict[t], type_dict[t])
elif isinstance(f, fields.function) and f._type == 'float':
- f_type = ('float8', 'DOUBLE PRECISION')
+ if f.digits:
+ f_type = ('numeric', 'NUMERIC(%d,%d)' % (f.digits[0], f.digits[1]))
+ else:
+ f_type = ('float8', 'DOUBLE PRECISION')
elif isinstance(f, fields.function) and f._type == 'selection':
f_type = ('text', 'text')
elif isinstance(f, fields.function) and f._type == 'char':
for rr in r :
if isinstance(rr.name, browse_record):
rr = rr.name
- dt+=rr.name+','
+ dt += rr.name or '' + ','
data[fpos] = dt[:-1]
break
lines += lines2[1:]
warning = ''
warning_fields = []
for field in fields_export:
- if imp_comp and len(field)>1:
+ if imp_comp and len(field)>1:
warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
elif len (field) <=1:
if imp_comp and cols.get(field and field[0],False):
fields = map(lambda x: x.split('/'), fields)
logger = netsvc.Logger()
ir_model_data_obj = self.pool.get('ir.model.data')
+
+ def _check_db_id(self, model_name, db_id):
+ obj_model = self.pool.get(model_name)
+ ids = obj_model.search(cr, uid, [('id','=',int(db_id))])
+ if not len(ids):
+ raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, db_id))
+ return True
+
def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0):
line = datas[position]
row = {}
translate = {}
todo = []
- warning = ''
+ warning = []
data_id = False
data_res_id = False
is_xml_id = False
raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
if not line[i]:
continue
- field = fields[i]
+ field = fields[i]
+ if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':db_id'):
+ # Database ID
+ res = False
+ if line[i]:
+ field_name = field[0].split(':')[0]
+ model_rel = fields_def[field_name]['relation']
+
+ if fields_def[field[len(prefix)][:-6]]['type']=='many2many':
+ res_id = []
+ for db_id in line[i].split(config.get('csv_internal_sep')):
+ try:
+ _check_db_id(self, model_rel, db_id)
+ res_id.append(db_id)
+ except Exception,e:
+ warning += [tools.exception_to_unicode(e)]
+ logger.notifyChannel("import", netsvc.LOG_ERROR,
+ tools.exception_to_unicode(e))
+ if len(res_id):
+ res = [(6, 0, res_id)]
+ else:
+ try:
+ _check_db_id(self, model_rel, line[i])
+ res = line[i]
+ except Exception,e:
+ warning += [tools.exception_to_unicode(e)]
+ logger.notifyChannel("import", netsvc.LOG_ERROR,
+ tools.exception_to_unicode(e))
+ row[field_name] = res or False
+ continue
+
if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':id'):
res_id = False
if line[i]:
ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id})
db_id = is_db_id
if is_db_id and int(db_id) != int(is_db_id):
- warning += ("Id is not the same than existing one: " + str(is_db_id) + " !\n")
+ warning += [_("Id is not the same than existing one: %s")%(is_db_id)]
logger.notifyChannel("import", netsvc.LOG_ERROR,
- "Id is not the same than existing one: " + str(is_db_id) + ' !\n')
+ _("Id is not the same than existing one: %s")%(is_db_id))
continue
if field[len(prefix)] == "db_id":
# Database ID
- try:
- line[i]= int(line[i])
- except Exception, e:
- warning += (str(e) + "!\n")
- logger.notifyChannel("import", netsvc.LOG_ERROR,
- str(e) + '!\n')
- continue
- is_db_id = line[i]
- obj_model = self.pool.get(model_name)
- ids = obj_model.search(cr, uid, [('id','=',line[i])])
- if not len(ids):
- warning += ("Database ID doesn't exist: " + model_name + ": " + str(line[i]) + " !\n")
+ try:
+ _check_db_id(self, model_name, line[i])
+ data_res_id = is_db_id = int(line[i])
+ except Exception,e:
+ warning += [tools.exception_to_unicode(e)]
logger.notifyChannel("import", netsvc.LOG_ERROR,
- "Database ID doesn't exist: " + model_name + ": " + str(line[i]) + ' !\n')
+ tools.exception_to_unicode(e))
continue
- else:
- data_res_id = ids[0]
data_ids = ir_model_data_obj.search(cr, uid, [('model','=',model_name),('res_id','=',line[i])])
if len(data_ids):
d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0]
if is_xml_id and not data_id:
data_id = is_xml_id
if is_xml_id and is_xml_id!=data_id:
- warning += ("Id is not the same than existing one: " + str(line[i]) + " !\n")
+ warning += [_("Id is not the same than existing one: %s")%(line[i])]
logger.notifyChannel("import", netsvc.LOG_ERROR,
- "Id is not the same than existing one: " + str(line[i]) + ' !\n')
+ _("Id is not the same than existing one: %s")%(line[i]))
continue
if fields_def[field[len(prefix)]]['type'] == 'integer':
break
if line[i] and not res:
logger.notifyChannel("import", netsvc.LOG_WARNING,
- "key '%s' not found in selection field '%s'" % \
+ _("key '%s' not found in selection field '%s'") % \
(line[i], field[len(prefix)]))
- warning += "Key/value '"+ str(line[i]) +"' not found in selection field '"+str(field[len(prefix)])+"'"
+ warning += [_("Key/value '%s' not found in selection field '%s'")%(line[i],field[len(prefix)])]
elif fields_def[field[len(prefix)]]['type']=='many2one':
res = False
line[i], [], operator='=', context=context)
res = (res2 and res2[0][0]) or False
if not res:
- warning += ('Relation not found: ' + line[i] + \
- ' on ' + relation + ' !\n')
+ warning += [_("Relation not found: %s on '%s'")%(line[i],relation)]
logger.notifyChannel("import", netsvc.LOG_WARNING,
- 'Relation not found: ' + line[i] + \
- ' on ' + relation + ' !\n')
+ _("Relation not found: %s on '%s'")%(line[i],relation))
elif fields_def[field[len(prefix)]]['type']=='many2many':
res = []
if line[i]:
uid, word, [], operator='=', context=context)
res3 = (res2 and res2[0][0]) or False
if not res3:
- warning += ('Relation not found: ' + \
- line[i] + ' on '+relation + ' !\n')
+ warning += [_("Relation not found: %s on '%s'")%(line[i],relation)]
logger.notifyChannel("import",
netsvc.LOG_WARNING,
- 'Relation not found: ' + line[i] + \
- ' on '+relation + ' !\n')
+ _("Relation not found: %s on '%s'")%(line[i],relation))
else:
res.append(res3)
if len(res):
#try:
(res, other, warning, translate, data_id, res_id) = \
process_liness(self, datas, [], current_module, self._name, fields_def)
- if warning:
+ if len(warning):
cr.rollback()
- return (-1, res, 'Line ' + str(counter) +' : ' + warning, '')
+ return (-1, res, 'Line ' + str(counter) +' : ' + '!\n'.join(warning), '')
try:
id = ir_model_data_obj._update(cr, uid, self._name,
except Exception, e:
import psycopg2
if isinstance(e,psycopg2.IntegrityError):
- msg= 'Insertion Failed!'
+ msg= _('Insertion Failed!')
for key in self.pool._sql_error.keys():
if key in e[0]:
msg = self.pool._sql_error[key]
attrs = {'views': views}
if node.hasAttribute('widget') and node.getAttribute('widget')=='selection':
# We can not use the 'string' domain has it is defined according to the record !
- dom = None
+ dom = []
if column._domain and not isinstance(column._domain, (str, unicode)):
dom = column._domain
+
attrs['selection'] = self.pool.get(relation).name_search(cr, user, '', dom, context=context)
if (node.hasAttribute('required') and not int(node.getAttribute('required'))) or not column.required:
attrs['selection'].append((False,''))
if d1:
cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) AND %s ORDER BY %s' % \
(','.join(fields_pre2 + ['id']), self._table,
- ','.join([str(x) for x in sub_ids]), d1,
- self._order), d2)
+ ','.join(['%s' for x in sub_ids]), d1,
+ self._order),sub_ids + d2)
if not cr.rowcount == len({}.fromkeys(sub_ids)):
raise except_orm(_('AccessError'),
_('You try to bypass an access rule (Document type: %s).') % self._description)
else:
cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) ORDER BY %s' % \
(','.join(fields_pre2 + ['id']), self._table,
- ','.join([str(x) for x in sub_ids]),
- self._order))
+ ','.join(['%s' for x in sub_ids]),
+ self._order), sub_ids)
res.extend(cr.dictfetchall())
else:
res = map(lambda x: {'id': x}, ids)
if not edit:
vals.pop(field)
+
if not context:
context = {}
if not ids:
self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
+
upd0 = []
upd1 = []
upd_todo = []
src_trans = self.pool.get(self._name).read(cr,user,ids,[f])
self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans[0][f])
+
# call the 'set' method of fields which are not classic_write
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
if c[0].startswith('default_'):
del rel_context[c[0]]
+ result = []
for field in upd_todo:
for id in ids:
- self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context)
+ result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
for table in self._inherits:
col = self._inherits[table]
cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance,pleft-position+distance, pleft+distance, pright+distance))
- result = self._store_get_values(cr, user, ids, vals.keys(), context)
+ result += self._store_get_values(cr, user, ids, vals.keys(), context)
for order, object, ids, fields in result:
self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
if c[0].startswith('default_'):
del rel_context[c[0]]
+ result = []
for field in upd_todo:
- self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context)
+ result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
self._validate(cr, user, [id_new], context)
- result = self._store_get_values(cr, user, [id_new], vals.keys(), context)
- for order, object, ids, fields2 in result:
- self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
+ if not context.get('no_store_function', False):
+ result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
+ result.sort()
+ done = []
+ for order, object, ids, fields2 in result:
+ if not (object, ids, fields2) in done:
+ self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
+ done.append((object, ids, fields2))
wf_service = netsvc.LocalService("workflow")
wf_service.trg_create(user, self._name, id_new, cr)
result = {}
fncts = self.pool._store_function.get(self._name, [])
for fnct in range(len(fncts)):
+ if fncts[fnct][3]:
+ ok = False
+ for f in (fields or []):
+ if f in fncts[fnct][3]:
+ ok = True
+ break
+ if not ok:
+ continue
+
result.setdefault(fncts[fnct][0], {})
ids2 = fncts[fnct][2](self,cr, uid, ids, context)
for id in filter(None, ids2):