import tools
import sys
+
try:
from xml import dom, xpath
except ImportError:
sys.stderr.write("ERROR: Import xpath module\n")
sys.stderr.write("ERROR: Try to install the old python-xml package\n")
- sys.exit(2)
+ sys.stderr.write('On Ubuntu Jaunty, try this: sudo cp /usr/lib/python2.6/dist-packages/oldxml/_xmlplus/utils/boolean.so /usr/lib/python2.5/site-packages/oldxml/_xmlplus/utils\n')
+ raise
from tools.config import config
self.value = value
self.args = (name, value)
+class BrowseRecordError(Exception):
+ pass
# Readonly python database object browser
class browse_null(object):
'''
if not context:
context = {}
- assert id and isinstance(id, (int, long,)), _('Wrong ID for the browse record, got %r, expected an integer.') % (id,)
self._list_class = list_class or browse_record_list
self._cr = cr
self._uid = uid
cache.setdefault(table._name, {})
self._data = cache[table._name]
+ if not (id and isinstance(id, (int, long,))):
+ raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
+# if not table.exists(cr, uid, id, context):
+# raise BrowseRecordError(_('Object %s does not exists') % (self,))
+
if id not in self._data:
self._data[id] = {'id': id}
return False
# if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
- if col._classic_write:
+ if col._prefetch:
# gen the list of "local" (ie not inherited) fields which are classic or many2one
ffields = filter(lambda x: x[1]._classic_write, self._table._columns.items())
# gen the list of inherited fields
fffields = map(lambda x: x[0], ffields)
datas = self._table.read(self._cr, self._uid, ids, fffields, context=self._context, load="_classic_write")
if self._fields_process:
+ lang = self._context.get('lang', 'en_US') or 'en_US'
+ lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid,[('code','=',lang)])
+ if not lang_obj_ids:
+ raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
+ lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid,lang_obj_ids[0])
for n, f in ffields:
if f._type in self._fields_process:
for d in datas:
d[n] = self._fields_process[f._type](d[n])
- if d[n]:
- d[n].set_value(self._cr, self._uid, d[n], self, f)
+ if (d[n] is not None) and (d[n] is not False):
+ d[n].set_value(self._cr, self._uid, d[n], self, f, lang_obj)
# create browse records for 'remote' objects
t = eval('fields.'+(f._type))
f_type = (type_dict[t], type_dict[t])
elif isinstance(f, fields.function) and f._type == 'float':
- f_type = ('float8', 'DOUBLE PRECISION')
+ if f.digits:
+ f_type = ('numeric', 'NUMERIC(%d,%d)' % (f.digits[0], f.digits[1]))
+ else:
+ f_type = ('float8', 'DOUBLE PRECISION')
elif isinstance(f, fields.function) and f._type == 'selection':
f_type = ('text', 'text')
elif isinstance(f, fields.function) and f._type == 'char':
vals['select_level']
))
if 'module' in context:
+ name1 = 'field_' + self._table + '_' + k
+ cr.execute("select name from ir_model_data where name=%s", (name1,))
+ if cr.fetchone():
+ name1 = name1 + "_" + str(id)
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
- (('field_'+self._table+'_'+k)[:64], context['module'], 'ir.model.fields', id)
+ (name1, context['module'], 'ir.model.fields', id)
)
else:
for key, val in vals.items():
return browse_null()
def __export_row(self, cr, uid, row, fields, context=None):
+
+ def check_type(field_type):
+ if field_type == 'float':
+ return 0.0
+ elif field_type == 'integer':
+ return 0
+ elif field_type == 'boolean':
+ return False
+ return ''
+
lines = []
data = map(lambda x: '', range(len(fields)))
done = []
for fpos in range(len(fields)):
- f = fields[fpos]
+ f = fields[fpos]
if f:
r = row
i = 0
while i < len(f):
- r = r[f[i]]
+ if f[i] == 'db_id':
+ r = r['id']
+ elif f[i] == 'id':
+ model_data = self.pool.get('ir.model.data')
+ data_ids = model_data.search(cr, uid, [('model','=',r._table_name),('res_id','=',r['id'])])
+ if len(data_ids):
+ d = model_data.read(cr, uid, data_ids, ['name','module'])[0]
+ if d['module']:
+ r = '%s.%s'%(d['module'],d['name'])
+ else:
+ r = d['name']
+ else:
+ break
+ else:
+ r = r[f[i]]
if not r:
+ if f[i] in self._columns:
+ r = check_type(self._columns[f[i]]._type)
+ elif f[i] in self._inherit_fields:
+ r = check_type(self._inherit_fields[f[i]][2]._type)
+ data[fpos] = r
break
if isinstance(r, (browse_record_list, list)):
first = True
or [], fields)
if fields2 in done:
break
- done.append(fields2)
+ done.append(fields2)
for row2 in r:
lines2 = self.__export_row(cr, uid, row2, fields2,
- context)
+ context)
if first:
for fpos2 in range(len(fields)):
if lines2 and lines2[0][fpos2]:
data[fpos2] = lines2[0][fpos2]
+ if not data[fpos]:
+ dt = ''
+ for rr in r :
+ if isinstance(rr.name, browse_record):
+ rr = rr.name
+ dt += rr.name or '' + ','
+ data[fpos] = dt[:-1]
+ break
lines += lines2[1:]
first = False
else:
- lines += lines2
+ lines += lines2
break
i += 1
if i == len(f):
+ if isinstance(r, browse_record):
+ r = r.name
data[fpos] = tools.ustr(r or '')
return [data] + lines
- def export_data(self, cr, uid, ids, fields, context=None):
+ def export_data(self, cr, uid, ids, fields_to_export, context=None):
if not context:
context = {}
- fields = map(lambda x: x.split('/'), fields)
+ imp_comp = context.get('import_comp',False)
+ cols = self._columns.copy()
+ for f in self._inherit_fields:
+ cols.update({f: self._inherit_fields[f][2]})
+ fields_to_export = map(lambda x: x.split('/'), fields_to_export)
+ fields_export = fields_to_export+[]
+ warning = ''
+ warning_fields = []
+ for field in fields_export:
+ if imp_comp and len(field)>1:
+ warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
+ elif len (field) <=1:
+ if imp_comp and cols.get(field and field[0],False):
+ if ((isinstance(cols[field[0]], fields.function) and not cols[field[0]].store) \
+ or isinstance(cols[field[0]], fields.related)\
+ or isinstance(cols[field[0]], fields.one2many)):
+ warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
datas = []
+ if imp_comp and len(warning_fields):
+ warning = 'Following columns cannot be exported since you select to be import compatible.\n%s' %('\n'.join(warning_fields))
+ cr.rollback()
+ return {'warning' : warning}
for row in self.browse(cr, uid, ids, context):
- datas += self.__export_row(cr, uid, row, fields, context)
- return datas
+ datas += self.__export_row(cr, uid, row, fields_to_export, context)
+ return {'datas':datas}
def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
if not context:
context = {}
fields = map(lambda x: x.split('/'), fields)
logger = netsvc.Logger()
-
- def process_liness(self, datas, prefix, fields_def, position=0):
+ ir_model_data_obj = self.pool.get('ir.model.data')
+
+ def _check_db_id(self, model_name, db_id):
+ obj_model = self.pool.get(model_name)
+ ids = obj_model.search(cr, uid, [('id','=',int(db_id))])
+ if not len(ids):
+ raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, db_id))
+ return True
+
+ def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0):
line = datas[position]
row = {}
translate = {}
todo = []
- warning = ''
+ warning = []
data_id = False
+ data_res_id = False
+ is_xml_id = False
+ is_db_id = False
+ ir_model_data_obj = self.pool.get('ir.model.data')
#
# Import normal fields
#
for i in range(len(fields)):
if i >= len(line):
raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
- field = fields[i]
- if field == ["id"]:
- data_id = line[i]
+ if not line[i]:
continue
+ field = fields[i]
+ if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':db_id'):
+ # Database ID
+ res = False
+ if line[i]:
+ field_name = field[0].split(':')[0]
+ model_rel = fields_def[field_name]['relation']
+
+ if fields_def[field[len(prefix)][:-6]]['type']=='many2many':
+ res_id = []
+ for db_id in line[i].split(config.get('csv_internal_sep')):
+ try:
+ _check_db_id(self, model_rel, db_id)
+ res_id.append(db_id)
+ except Exception,e:
+ warning += [tools.exception_to_unicode(e)]
+ logger.notifyChannel("import", netsvc.LOG_ERROR,
+ tools.exception_to_unicode(e))
+ if len(res_id):
+ res = [(6, 0, res_id)]
+ else:
+ try:
+ _check_db_id(self, model_rel, line[i])
+ res = line[i]
+ except Exception,e:
+ warning += [tools.exception_to_unicode(e)]
+ logger.notifyChannel("import", netsvc.LOG_ERROR,
+ tools.exception_to_unicode(e))
+ row[field_name] = res or False
+ continue
+
if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':id'):
res_id = False
if line[i]:
if '.' in word:
module, xml_id = word.rsplit('.', 1)
else:
- module, xml_id = current_module, word
- ir_model_data_obj = self.pool.get('ir.model.data')
+ module, xml_id = current_module, word
id = ir_model_data_obj._get_id(cr, uid, module,
xml_id)
res_id2 = ir_model_data_obj.read(cr, uid, [id],
if '.' in line[i]:
module, xml_id = line[i].rsplit('.', 1)
else:
- module, xml_id = current_module, line[i]
- ir_model_data_obj = self.pool.get('ir.model.data')
+ module, xml_id = current_module, line[i]
id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
res_id = ir_model_data_obj.read(cr, uid, [id],
['res_id'])[0]['res_id']
continue
if (len(field) == len(prefix)+1) and \
(prefix == field[0:len(prefix)]):
+ if field[len(prefix)] == "id":
+ # XML ID
+ db_id = False
+ is_xml_id = data_id = line[i]
+ d = data_id.split('.')
+ module = len(d)>1 and d[0] or ''
+ name = len(d)>1 and d[1] or d[0]
+ data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('name','=',name)])
+ if len(data_ids):
+ d = ir_model_data_obj.read(cr, uid, data_ids, ['res_id'])[0]
+ db_id = d['res_id']
+ if is_db_id and not db_id:
+ data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('res_id','=',is_db_id)])
+ if not len(data_ids):
+ ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id})
+ db_id = is_db_id
+ if is_db_id and int(db_id) != int(is_db_id):
+ warning += [_("Id is not the same than existing one: %s")%(is_db_id)]
+ logger.notifyChannel("import", netsvc.LOG_ERROR,
+ _("Id is not the same than existing one: %s")%(is_db_id))
+ continue
+
+ if field[len(prefix)] == "db_id":
+ # Database ID
+ try:
+ _check_db_id(self, model_name, line[i])
+ data_res_id = is_db_id = int(line[i])
+ except Exception,e:
+ warning += [tools.exception_to_unicode(e)]
+ logger.notifyChannel("import", netsvc.LOG_ERROR,
+ tools.exception_to_unicode(e))
+ continue
+ data_ids = ir_model_data_obj.search(cr, uid, [('model','=',model_name),('res_id','=',line[i])])
+ if len(data_ids):
+ d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0]
+ data_id = d['name']
+ if d['module']:
+ data_id = '%s.%s'%(d['module'],d['name'])
+ else:
+ data_id = d['name']
+ if is_xml_id and not data_id:
+ data_id = is_xml_id
+ if is_xml_id and is_xml_id!=data_id:
+ warning += [_("Id is not the same than existing one: %s")%(line[i])]
+ logger.notifyChannel("import", netsvc.LOG_ERROR,
+ _("Id is not the same than existing one: %s")%(line[i]))
+
+ continue
if fields_def[field[len(prefix)]]['type'] == 'integer':
res = line[i] and int(line[i])
elif fields_def[field[len(prefix)]]['type'] == 'boolean':
- res = line[i] and eval(line[i])
+ res = line[i].lower() not in ('0', 'false', 'off')
elif fields_def[field[len(prefix)]]['type'] == 'float':
res = line[i] and float(line[i])
elif fields_def[field[len(prefix)]]['type'] == 'selection':
sel = fields_def[field[len(prefix)]]['selection'](self,
cr, uid, context)
for key, val in sel:
- if str(key) == line[i]:
+ if line[i] in [tools.ustr(key),tools.ustr(val)]: #Acepting key or value for selection field
res = key
+ break
if line[i] and not res:
logger.notifyChannel("import", netsvc.LOG_WARNING,
- "key '%s' not found in selection field '%s'" % \
+ _("key '%s' not found in selection field '%s'") % \
(line[i], field[len(prefix)]))
+
+ warning += [_("Key/value '%s' not found in selection field '%s'")%(line[i],field[len(prefix)])]
+
elif fields_def[field[len(prefix)]]['type']=='many2one':
res = False
if line[i]:
relation = fields_def[field[len(prefix)]]['relation']
res2 = self.pool.get(relation).name_search(cr, uid,
- line[i], [], operator='=')
+ line[i], [], operator='=', context=context)
res = (res2 and res2[0][0]) or False
if not res:
- warning += ('Relation not found: ' + line[i] + \
- ' on ' + relation + ' !\n')
+ warning += [_("Relation not found: %s on '%s'")%(line[i],relation)]
logger.notifyChannel("import", netsvc.LOG_WARNING,
- 'Relation not found: ' + line[i] + \
- ' on ' + relation + ' !\n')
+ _("Relation not found: %s on '%s'")%(line[i],relation))
elif fields_def[field[len(prefix)]]['type']=='many2many':
res = []
if line[i]:
relation = fields_def[field[len(prefix)]]['relation']
for word in line[i].split(config.get('csv_internal_sep')):
res2 = self.pool.get(relation).name_search(cr,
- uid, word, [], operator='=')
+ uid, word, [], operator='=', context=context)
res3 = (res2 and res2[0][0]) or False
if not res3:
- warning += ('Relation not found: ' + \
- line[i] + ' on '+relation + ' !\n')
+ warning += [_("Relation not found: %s on '%s'")%(line[i],relation)]
logger.notifyChannel("import",
netsvc.LOG_WARNING,
- 'Relation not found: ' + line[i] + \
- ' on '+relation + ' !\n')
+ _("Relation not found: %s on '%s'")%(line[i],relation))
else:
res.append(res3)
if len(res):
if field[0] not in todo:
todo.append(field[len(prefix)])
#
- # Import one2many fields
+ # Import one2many, many2many fields
#
nbrmax = 1
for field in todo:
- newfd = self.pool.get(fields_def[field]['relation']).fields_get(
+ relation_obj = self.pool.get(fields_def[field]['relation'])
+ newfd = relation_obj.fields_get(
cr, uid, context=context)
- res = process_liness(self, datas, prefix + [field], newfd, position)
- (newrow, max2, w2, translate2, data_id2) = res
+ res = process_liness(self, datas, prefix + [field], current_module, relation_obj._name, newfd, position)
+ (newrow, max2, w2, translate2, data_id2, data_res_id2) = res
nbrmax = max(nbrmax, max2)
- warning = warning + w2
- reduce(lambda x, y: x and y, newrow)
+ warning = warning + w2
+ reduce(lambda x, y: x and y, newrow)
row[field] = (reduce(lambda x, y: x or y, newrow.values()) and \
- [(0, 0, newrow)]) or []
+ [(0, 0, newrow)]) or []
i = max2
while (position+i)<len(datas):
ok = True
if not ok:
break
- (newrow, max2, w2, translate2, data_id2) = process_liness(
- self, datas, prefix+[field], newfd, position+i)
+ (newrow, max2, w2, translate2, data_id2, data_res_id2) = process_liness(
+ self, datas, prefix+[field], current_module, relation_obj._name, newfd, position+i)
warning = warning+w2
if reduce(lambda x, y: x or y, newrow.values()):
- row[field].append((0, 0, newrow))
+ row[field].append((0, 0, newrow))
i += max2
nbrmax = max(nbrmax, i)
for i in range(max(nbrmax, 1)):
#if datas:
datas.pop(0)
- result = (row, nbrmax, warning, translate, data_id)
+ result = (row, nbrmax, warning, translate, data_id, data_res_id)
return result
fields_def = self.fields_get(cr, uid, context=context)
counter += 1
res = {}
#try:
- (res, other, warning, translate, data_id) = \
- process_liness(self, datas, [], fields_def)
- if warning:
+ (res, other, warning, translate, data_id, res_id) = \
+ process_liness(self, datas, [], current_module, self._name, fields_def)
+ if len(warning):
cr.rollback()
- return (-1, res, warning, '')
- id = self.pool.get('ir.model.data')._update(cr, uid, self._name,
- current_module, res, xml_id=data_id, mode=mode,
- noupdate=noupdate)
+ return (-1, res, 'Line ' + str(counter) +' : ' + '!\n'.join(warning), '')
+
+ try:
+ id = ir_model_data_obj._update(cr, uid, self._name,
+ current_module, res, xml_id=data_id, mode=mode,
+ noupdate=noupdate, res_id=res_id)
+ except Exception, e:
+ import psycopg2
+ if isinstance(e,psycopg2.IntegrityError):
+ msg= _('Insertion Failed!')
+ for key in self.pool._sql_error.keys():
+ if key in e[0]:
+ msg = self.pool._sql_error[key]
+ break
+ return (-1, res,'Line ' + str(counter) +' : ' + msg,'' )
+
for lang in translate:
context2 = context.copy()
context2['lang'] = lang
attrs = {}
try:
if node.getAttribute('name') in self._columns:
- relation = self._columns[node.getAttribute('name')]._obj
+ column = self._columns[node.getAttribute('name')]
else:
- relation = self._inherit_fields[node.getAttribute('name')][2]._obj
+ column = self._inherit_fields[node.getAttribute('name')][2]
except:
- relation = False
+ column = False
- if relation:
+ if column:
+ relation = column._obj
childs = False
views = {}
for f in node.childNodes:
}
attrs = {'views': views}
if node.hasAttribute('widget') and node.getAttribute('widget')=='selection':
- # We can not use the domain has it is defined according to the record !
- attrs['selection'] = self.pool.get(relation).name_search(cr, user, '', context=context)
- if not attrs.get('required',False):
+ # We can not use the 'string' domain has it is defined according to the record !
+ dom = []
+ if column._domain and not isinstance(column._domain, (str, unicode)):
+ dom = column._domain
+
+ attrs['selection'] = self.pool.get(relation).name_search(cr, user, '', dom, context=context)
+ if (node.hasAttribute('required') and not int(node.getAttribute('required'))) or not column.required:
attrs['selection'].append((False,''))
fields[node.getAttribute('name')] = attrs
rolesobj = self.pool.get('res.roles')
usersobj = self.pool.get('res.users')
- buttons = xpath.Evaluate("//button[@type != 'object']", node)
+ buttons = (n for n in node.getElementsByTagName('button') if n.getAttribute('type') != 'object')
for button in buttons:
ok = True
if user != 1: # admin user has all roles
arch = node.toxml(encoding="utf-8").replace('\t', '')
fields = self.fields_get(cr, user, fields_def.keys(), context)
for field in fields_def:
- if field in fields:
+ if field == 'id':
+ # sometime, the view may containt the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
+ fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
+ elif field in fields:
fields[field].update(fields_def[field])
else:
cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
def copy(self, cr, uid, id, default=None, context=None):
raise _('The copy method is not implemented on this object !')
+ def exists(self, cr, uid, id, context=None):
+ raise _('The exists method is not implemented on this object !')
+
def read_string(self, cr, uid, id, langs, fields=None, context=None):
res = {}
res2 = {}
- self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
+ self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
for lang in langs:
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
- self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
+ self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
for lang in langs:
for field in vals:
if field in self._columns:
raise NotImplementedError()
class orm_memory(orm_template):
- _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count']
+ _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
_inherit_fields = {}
_max_count = 200
_max_hours = 1
self._validate(cr, user, [id_new], context)
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(user, self._name, id_new, cr)
- self.vaccum(cr, user)
return id_new
def create(self, cr, user, vals, context=None):
+ self.vaccum(cr, user)
self.next_id += 1
id_new = self.next_id
default = []
self._validate(cr, user, [id_new], context)
wf_service = netsvc.LocalService("workflow")
wf_service.trg_create(user, self._name, id_new, cr)
- self.vaccum(cr, user)
return id_new
def default_get(self, cr, uid, fields_list, context=None):
def _check_removed_columns(self, cr, log=False):
# nothing to check in memory...
pass
+
+ def exists(self, cr, uid, id, context=None):
+ return id in self.datas
class orm(orm_template):
_sql_constraints = []
_table = None
- _protected = ['read','write','create','default_get','perm_read','unlink','fields_get','fields_view_get','search','name_get','distinct_field_get','name_search','copy','import_data','search_count']
+ _protected = ['read','write','create','default_get','perm_read','unlink','fields_get','fields_view_get','search','name_get','distinct_field_get','name_search','copy','import_data','search_count', 'exists']
def _parent_store_compute(self, cr):
logger = netsvc.Logger()
('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
]
- if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size != f.size:
+ # !!! Avoid reduction of varchar field !!!
+ if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
+ # if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size != f.size:
logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed size" % (k, self._table))
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
import random
_rel1 = field['relation'].replace('.', '_')
_rel2 = field['model'].replace('.', '_')
- _rel_name = 'x_%s_%s_%s_rel' %(_rel1, _rel2, random.randint(0, 10000))
+ _rel_name = 'x_%s_%s_%s_rel' %(_rel1, _rel2, field['name'])
self._columns[field['name']] = getattr(fields, field['ttype'])(field['relation'], _rel_name, 'id1', 'id2', **attrs)
else:
self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
self._inherits_reload_src()
def fields_get(self, cr, user, fields=None, context=None):
- read_access = self.pool.get('ir.model.access').check(cr, user, self._name, 'write', raise_exception=False)
+ ira = self.pool.get('ir.model.access')
+ read_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
+ ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
return super(orm, self).fields_get(cr, user, fields, context, read_access)
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
if not context:
context = {}
- self.pool.get('ir.model.access').check(cr, user, self._name, 'read')
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
select = ids
if d1:
cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) AND %s ORDER BY %s' % \
(','.join(fields_pre2 + ['id']), self._table,
- ','.join([str(x) for x in sub_ids]), d1,
- self._order), d2)
+ ','.join(['%s' for x in sub_ids]), d1,
+ self._order),sub_ids + d2)
if not cr.rowcount == len({}.fromkeys(sub_ids)):
raise except_orm(_('AccessError'),
_('You try to bypass an access rule (Document type: %s).') % self._description)
else:
cr.execute('SELECT %s FROM \"%s\" WHERE id IN (%s) ORDER BY %s' % \
(','.join(fields_pre2 + ['id']), self._table,
- ','.join([str(x) for x in sub_ids]),
- self._order))
+ ','.join(['%s' for x in sub_ids]),
+ self._order), sub_ids)
res.extend(cr.dictfetchall())
else:
res = map(lambda x: {'id': x}, ids)
self._check_concurrency(cr, ids, context)
- self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink')
+ self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
+
+ properties = self.pool.get('ir.property')
+ domain = [('res_id', '=', False),
+ ('value', 'in', ['%s,%s' % (self._name, i) for i in ids]),
+ ]
+ if properties.search(cr, uid, domain, context=context):
+ raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
wf_service = netsvc.LocalService("workflow")
- for id in ids:
- wf_service.trg_delete(uid, self._name, id, cr)
+ for oid in ids:
+ wf_service.trg_delete(uid, self._name, oid, cr)
#cr.execute('select * from '+self._table+' where id in ('+str_d+')', ids)
#res = cr.dictfetchall()
if d1:
cr.execute('SELECT id FROM "'+self._table+'" ' \
'WHERE id IN ('+str_d+')'+d1, sub_ids+d2)
- if not cr.rowcount == len({}.fromkeys(ids)):
+ if not cr.rowcount == len(sub_ids):
raise except_orm(_('AccessError'),
_('You try to bypass an access rule (Document type: %s).') % \
self._description)
cr.execute('delete from "'+self._table+'" ' \
'where id in ('+str_d+')', sub_ids)
- for order, object, ids, fields in result_store:
+ for order, object, store_ids, fields in result_store:
if object<>self._name:
obj = self.pool.get(object)
- cr.execute('select id from '+obj._table+' where id in ('+','.join(map(str, ids))+')')
- ids = map(lambda x: x[0], cr.fetchall())
- if ids:
- obj._store_set_values(cr, uid, ids, fields, context)
+ cr.execute('select id from '+obj._table+' where id in ('+','.join(map(str, store_ids))+')')
+ rids = map(lambda x: x[0], cr.fetchall())
+ if rids:
+ obj._store_set_values(cr, uid, rids, fields, context)
return True
#
if not edit:
vals.pop(field)
+
if not context:
context = {}
if not ids:
self._check_concurrency(cr, ids, context)
- self.pool.get('ir.model.access').check(cr, user, self._name, 'write')
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
+
upd0 = []
upd1 = []
else:
cr.execute('update "'+self._table+'" set '+string.join(upd0, ',')+' ' \
'where id in ('+ids_str+')', upd1)
-
+
if totranslate:
for f in direct:
if self._columns[f].translate:
- self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f])
+ src_trans = self.pool.get(self._name).read(cr,user,ids,[f])
+ self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans[0][f])
+
# call the 'set' method of fields which are not classic_write
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
+
+ # default element in context must be removed when call a one2many or many2many
+ rel_context = context.copy()
+ for c in context.items():
+ if c[0].startswith('default_'):
+ del rel_context[c[0]]
+
+ result = []
for field in upd_todo:
for id in ids:
- self._columns[field].set(cr, self, id, field, vals[field], user, context=context)
+ result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
for table in self._inherits:
col = self._inherits[table]
# It's the first node of the parent: position = parent_left+1
if not position:
- if not vals[self._parent_name]:
+ if not vals[self._parent_name]:
position = 1
else:
cr.execute('select parent_left from '+self._table+' where id=%s', (vals[self._parent_name],))
cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance,pleft-position+distance, pleft+distance, pright+distance))
- result = self._store_get_values(cr, user, ids, vals.keys(), context)
+ result += self._store_get_values(cr, user, ids, vals.keys(), context)
for order, object, ids, fields in result:
self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
"""
if not context:
context = {}
- self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
default = []
for (t, c) in self._inherits.items():
if c in vals:
avoid_table.append(t)
- for f in self._columns.keys(): # + self._inherit_fields.keys():
- if not f in vals:
+ for f in self._columns.keys():
+ if (not f in vals) and (not isinstance(self._columns[f], fields.property)):
default.append(f)
for f in self._inherit_fields.keys():
- if (not f in vals) and (self._inherit_fields[f][0] not in avoid_table):
+ if (not f in vals) and (self._inherit_fields[f][0] not in avoid_table) and (not isinstance(self._inherit_fields[f][2], fields.property)):
default.append(f)
if len(default):
if dv in self._columns and self._columns[dv]._type == 'many2many':
if default_values[dv] and isinstance(default_values[dv][0], (int, long)):
default_values[dv] = [(6, 0, default_values[dv])]
-
vals.update(default_values)
tocreate = {}
upd0 += ','+self._inherits[table]
upd1 += ',%s'
upd2.append(id)
-
+
+ #Start : Set bool fields to be False if they are not touched(to make search more powerful)
+ bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
+
+ for bool_field in bool_fields:
+ if bool_field not in vals:
+ vals[bool_field] = False
+ #End
+
for field in vals:
- if self._columns[field]._classic_write:
- upd0 = upd0 + ',"' + field + '"'
- upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
- upd2.append(self._columns[field]._symbol_set[1](vals[field]))
- else:
- upd_todo.append(field)
+ if field in self._columns:
+ if self._columns[field]._classic_write:
+ upd0 = upd0 + ',"' + field + '"'
+ upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
+ upd2.append(self._columns[field]._symbol_set[1](vals[field]))
+ else:
+ upd_todo.append(field)
if field in self._columns \
and hasattr(self._columns[field], 'selection') \
and vals[field]:
upd2.append(user)
cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
- for field in upd_todo:
- self._columns[field].set(cr, self, id_new, field, vals[field], user, context)
-
- self._validate(cr, user, [id_new], context)
if self._parent_store:
if self.pool._init:
cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1,pleft+2,id_new))
+
+ # default element in context must be removed when call a one2many or many2many
+ rel_context = context.copy()
+ for c in context.items():
+ if c[0].startswith('default_'):
+ del rel_context[c[0]]
+
+ result = []
+ for field in upd_todo:
+ result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
+ self._validate(cr, user, [id_new], context)
- result = self._store_get_values(cr, user, [id_new], vals.keys(), context)
- for order, object, ids, fields in result:
- self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
+ if not context.get('no_store_function', False):
+ result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
+ result.sort()
+ done = []
+ for order, object, ids, fields2 in result:
+ if not (object, ids, fields2) in done:
+ self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
+ done.append((object, ids, fields2))
wf_service = netsvc.LocalService("workflow")
wf_service.trg_create(user, self._name, id_new, cr)
result = {}
fncts = self.pool._store_function.get(self._name, [])
for fnct in range(len(fncts)):
+ if fncts[fnct][3]:
+ ok = False
+ for f in (fields or []):
+ if f in fncts[fnct][3]:
+ ok = True
+ break
+ if not ok:
+ continue
+
result.setdefault(fncts[fnct][0], {})
ids2 = fncts[fnct][2](self,cr, uid, ids, context)
for id in filter(None, ids2):
if 'state' in self._defaults:
default['state'] = self._defaults['state'](self, cr, uid, context)
data = self.read(cr, uid, [id], context=context)[0]
- fields = self.fields_get(cr, uid)
+ fields = self.fields_get(cr, uid, context=context)
trans_data=[]
for f in fields:
ftype = fields[f]['type']
def copy(self, cr, uid, id, default=None, context=None):
trans_obj = self.pool.get('ir.translation')
data, trans_data = self.copy_data(cr, uid, id, default, context)
- new_id=self.create(cr, uid, data)
+ new_id = self.create(cr, uid, data, context)
for record in trans_data:
del record['id']
- record['res_id']=new_id
- trans_obj.create(cr,uid,record)
+ record['res_id'] = new_id
+ trans_obj.create(cr, uid, record, context)
return new_id
+ def exists(self, cr, uid, id, context=None):
+ cr.execute('SELECT count(1) FROM "%s" where id=%%s' % (self._table,), (id,))
+ return bool(cr.fetchone()[0])
+
def check_recursion(self, cr, uid, ids, parent=None):
if not parent:
parent = self._parent_name