[FIX] translations: fix tests to adapt to new duplication behaviour and remove contex...
[odoo/odoo.git] / openerp / osv / orm.py
index 1457013..0196839 100644 (file)
@@ -383,20 +383,30 @@ class browse_record(object):
                 raise KeyError(error_msg)
 
             # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
-            if col._prefetch:
+            if col._prefetch and not col.groups:
                 # gen the list of "local" (ie not inherited) fields which are classic or many2one
-                fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
+                field_filter = lambda x: x[1]._classic_write and x[1]._prefetch and not x[1].groups
+                fields_to_fetch = filter(field_filter, self._table._columns.items())
                 # gen the list of inherited fields
                 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
                 # complete the field list with the inherited fields which are classic or many2one
-                fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
+                fields_to_fetch += filter(field_filter, inherits)
             # otherwise we fetch only that field
             else:
                 fields_to_fetch = [(name, col)]
+
             ids = filter(lambda id: name not in self._data[id], self._data.keys())
             # read the results
             field_names = map(lambda x: x[0], fields_to_fetch)
-            field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
+            try:
+                field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
+            except (openerp.exceptions.AccessError, except_orm):
+                if len(ids) == 1:
+                    raise
+                # prefetching attempt failed, perhaps we're violating ACL restrictions involuntarily
+                _logger.info('Prefetching attempt for fields %s on %s failed for ids %s, re-trying just for id %s', field_names, self._model._name, ids, self._id)
+                ids = [self._id]
+                field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
 
             # TODO: improve this, very slow for reports
             if self._fields_process:
@@ -454,7 +464,7 @@ class browse_record(object):
                             new_data[field_name] = browse_null()
                     elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
                         new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
-                    elif field_column._type in ('reference'):
+                    elif field_column._type == 'reference':
                         if result_line[field_name]:
                             if isinstance(result_line[field_name], browse_record):
                                 new_data[field_name] = result_line[field_name]
@@ -629,7 +639,8 @@ class MetaModel(type):
             self._module = module_name
 
         # Remember which models to instanciate for this module.
-        self.module_to_models.setdefault(self._module, []).append(self)
+        if not self._custom:
+            self.module_to_models.setdefault(self._module, []).append(self)
 
 
 # Definition of log access columns, automatically added to models if
@@ -666,6 +677,7 @@ class BaseModel(object):
     _name = None
     _columns = {}
     _constraints = []
+    _custom = False
     _defaults = {}
     _rec_name = None
     _parent_name = 'parent_id'
@@ -683,9 +695,6 @@ class BaseModel(object):
 
     # Transience
     _transient = False # True in a TransientModel
-    _transient_max_count = None
-    _transient_max_hours = None
-    _transient_check_time = 20
 
     # structure:
     #  { 'parent_model': 'm2o_field', ... }
@@ -865,10 +874,6 @@ class BaseModel(object):
                 parent_names = [parent_names]
             else:
                 name = cls._name
-            # for res.parnter.address compatiblity, should be remove in v7
-            if 'res.partner.address' in parent_names:
-                parent_names.pop(parent_names.index('res.partner.address'))
-                parent_names.append('res.partner')
             if not name:
                 raise TypeError('_name is mandatory in case of multiple inheritance')
 
@@ -949,7 +954,8 @@ class BaseModel(object):
         # managed by the metaclass.
         module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
         if cls not in module_model_list:
-            module_model_list.append(cls)
+            if not cls._custom:
+                module_model_list.append(cls)
 
         # Since we don't return an instance here, the __init__
         # method won't be called.
@@ -1010,53 +1016,60 @@ class BaseModel(object):
                     raise except_orm('Error',
                         ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
                 self.pool._store_function.setdefault(object, [])
-                self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
-                self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
+                t = (self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length)
+                if not t in self.pool._store_function[object]:
+                    self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
+                    self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
 
         for (key, _, msg) in self._sql_constraints:
             self.pool._sql_error[self._table+'_'+key] = msg
 
         # Load manual fields
 
-        cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
-        if cr.fetchone():
+        # Check the query is already done for all modules of if we need to
+        # do it ourselves.
+        if self.pool.fields_by_model is not None:
+            manual_fields = self.pool.fields_by_model.get(self._name, [])
+        else:
             cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
-            for field in cr.dictfetchall():
-                if field['name'] in self._columns:
-                    continue
-                attrs = {
-                    'string': field['field_description'],
-                    'required': bool(field['required']),
-                    'readonly': bool(field['readonly']),
-                    'domain': eval(field['domain']) if field['domain'] else None,
-                    'size': field['size'],
-                    'ondelete': field['on_delete'],
-                    'translate': (field['translate']),
-                    'manual': True,
-                    #'select': int(field['select_level'])
-                }
+            manual_fields = cr.dictfetchall()
+        for field in manual_fields:
+            if field['name'] in self._columns:
+                continue
+            attrs = {
+                'string': field['field_description'],
+                'required': bool(field['required']),
+                'readonly': bool(field['readonly']),
+                'domain': eval(field['domain']) if field['domain'] else None,
+                'size': field['size'] or None,
+                'ondelete': field['on_delete'],
+                'translate': (field['translate']),
+                'manual': True,
+                #'select': int(field['select_level'])
+            }
+
+            if field['serialization_field_id']:
+                cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
+                attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
+                if field['ttype'] in ['many2one', 'one2many', 'many2many']:
+                    attrs.update({'relation': field['relation']})
+                self._columns[field['name']] = fields.sparse(**attrs)
+            elif field['ttype'] == 'selection':
+                self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
+            elif field['ttype'] == 'reference':
+                self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
+            elif field['ttype'] == 'many2one':
+                self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
+            elif field['ttype'] == 'one2many':
+                self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
+            elif field['ttype'] == 'many2many':
+                _rel1 = field['relation'].replace('.', '_')
+                _rel2 = field['model'].replace('.', '_')
+                _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
+                self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
+            else:
+                self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
 
-                if field['serialization_field_id']:
-                    cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
-                    attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
-                    if field['ttype'] in ['many2one', 'one2many', 'many2many']:
-                        attrs.update({'relation': field['relation']})
-                    self._columns[field['name']] = fields.sparse(**attrs)
-                elif field['ttype'] == 'selection':
-                    self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
-                elif field['ttype'] == 'reference':
-                    self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
-                elif field['ttype'] == 'many2one':
-                    self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
-                elif field['ttype'] == 'one2many':
-                    self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
-                elif field['ttype'] == 'many2many':
-                    _rel1 = field['relation'].replace('.', '_')
-                    _rel2 = field['model'].replace('.', '_')
-                    _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
-                    self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
-                else:
-                    self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
         self._inherits_check()
         self._inherits_reload()
         if not self._sequence:
@@ -1076,7 +1089,7 @@ class BaseModel(object):
 
         # Validate rec_name
         if self._rec_name is not None:
-            assert self._rec_name in self._columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
+            assert self._rec_name in self._all_columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
         else:
             self._rec_name = 'name'
 
@@ -1361,11 +1374,9 @@ class BaseModel(object):
                      noupdate=noupdate, res_id=id, context=context))
                 cr.execute('RELEASE SAVEPOINT model_load_save')
             except psycopg2.Warning, e:
-                _logger.exception('Failed to import record %s', record)
                 messages.append(dict(info, type='warning', message=str(e)))
                 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
             except psycopg2.Error, e:
-                _logger.exception('Failed to import record %s', record)
                 messages.append(dict(
                     info, type='error',
                     **PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
@@ -1741,7 +1752,7 @@ class BaseModel(object):
                 views = {}
                 xml = "<form>"
                 for f in node:
-                    if f.tag in ('field'):
+                    if f.tag == 'field':
                         xml += etree.tostring(f, encoding="utf-8")
                 xml += "</form>"
                 new_xml = etree.fromstring(encode(xml))
@@ -2010,7 +2021,7 @@ class BaseModel(object):
         view = etree.Element('calendar', string=self._description)
         etree.SubElement(view, 'field', self._rec_name_fallback(cr, user, context))
 
-        if (self._date_name not in self._columns):
+        if self._date_name not in self._columns:
             date_found = False
             for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
                 if dt in self._columns:
@@ -2031,7 +2042,7 @@ class BaseModel(object):
                                 self._columns, 'date_delay'):
                 raise except_orm(
                     _('Invalid Object Architecture!'),
-                    _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % (self._name)))
+                    _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name))
 
         return view
 
@@ -2189,7 +2200,7 @@ class BaseModel(object):
                 are applied
 
             """
-            sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
+            sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name, context=context)
             for (view_arch, view_id) in sql_inherit:
                 source = apply_inheritance_specs(source, view_arch, view_id)
                 source = apply_view_inheritance(cr, user, source, view_id)
@@ -2411,7 +2422,7 @@ class BaseModel(object):
            :rtype: tuple
            :return: the :meth:`~.name_get` pair value for the newly-created record.
         """
-        rec_id = self.create(cr, uid, {self._rec_name: name}, context);
+        rec_id = self.create(cr, uid, {self._rec_name: name}, context)
         return self.name_get(cr, uid, [rec_id], context)[0]
 
     # private implementation of name_search, allows passing a dedicated user for the name_get part to
@@ -2675,7 +2686,7 @@ class BaseModel(object):
         groupby = group_by
         for r in cr.dictfetchall():
             for fld, val in r.items():
-                if val == None: r[fld] = False
+                if val is None: r[fld] = False
             alldata[r['id']] = r
             del r['id']
 
@@ -2844,8 +2855,12 @@ class BaseModel(object):
         """
         Record the creation of a constraint for this model, to make it possible
         to delete it later when the module is uninstalled. Type can be either
-        'f' or 'u' depending on the constraing being a foreign key or not.
+        'f' or 'u' depending on the constraint being a foreign key or not.
         """
+        if not self._module:
+            # no need to save constraints for custom models as they're not part
+            # of any module
+            return
         assert type in ('f', 'u')
         cr.execute("""
             SELECT 1 FROM ir_model_constraint, ir_module_module
@@ -2889,15 +2904,15 @@ class BaseModel(object):
             # usually because they could block deletion due to the FKs.
             # So unless stated otherwise we default them to ondelete=cascade.
             ondelete = ondelete or 'cascade'
-        self._foreign_keys.append((self._table, source_field, dest_model._table, ondelete or 'set null'))
-        _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
-            self._table, source_field, dest_model._table, ondelete)
+        fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null')
+        self._foreign_keys.add(fk_def)
+        _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
 
     # unchecked version: for custom cases, such as m2m relationships
     def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
-        self._foreign_keys.append((source_table, source_field, dest_model._table, ondelete or 'set null'))
-        _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
-            source_table, source_field, dest_model._table, ondelete)
+        fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null')
+        self._foreign_keys.add(fk_def)
+        _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
 
     def _drop_constraint(self, cr, source_table, constraint_name):
         cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
@@ -2928,18 +2943,22 @@ class BaseModel(object):
                 cons, = constraints
                 if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
                     or cons['foreign_table'] != dest_model._table:
+                    # Wrong FK: drop it and recreate
                     _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
                                   source_table, cons['constraint_name'])
                     self._drop_constraint(cr, source_table, cons['constraint_name'])
-                    self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
-                # else it's all good, nothing to do!
+                else:
+                    # it's all good, nothing to do!
+                    return
             else:
                 # Multiple FKs found for the same field, drop them all, and re-create
                 for cons in constraints:
                     _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
                                   source_table, cons['constraint_name'])
                     self._drop_constraint(cr, source_table, cons['constraint_name'])
-                self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
+
+        # (re-)create the FK
+        self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
 
 
 
@@ -2961,7 +2980,7 @@ class BaseModel(object):
           _auto_end).
 
         """
-        self._foreign_keys = []
+        self._foreign_keys = set()
         raise_on_invalid_object_name(self._name)
         if context is None:
             context = {}
@@ -3097,7 +3116,7 @@ class BaseModel(object):
                                     else:
                                         default = self._defaults[k]
 
-                                    if (default is not None):
+                                    if default is not None:
                                         ss = self._columns[k]._symbol_set
                                         query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
                                         cr.execute(query, (ss[1](default),))
@@ -3176,7 +3195,7 @@ class BaseModel(object):
                             # and add constraints if needed
                             if isinstance(f, fields.many2one):
                                 if not self.pool.get(f._obj):
-                                    raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
+                                    raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,))
                                 dest_model = self.pool.get(f._obj)
                                 ref = dest_model._table
                                 # ir_actions is inherited so foreign key doesn't work on it
@@ -3262,8 +3281,8 @@ class BaseModel(object):
         elif not self._columns['parent_right'].select:
             _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
                           self._table)
-        if self._columns[self._parent_name].ondelete != 'cascade':
-            _logger.error("The column %s on object %s must be set as ondelete='cascade'",
+        if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'):
+            _logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'",
                           self._parent_name, self._name)
 
         cr.commit()
@@ -3303,7 +3322,7 @@ class BaseModel(object):
             # TODO the condition could use fields_get_keys().
             if f._fields_id not in other._columns.keys():
                 if f._fields_id not in other._inherit_fields.keys():
-                    raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
+                    raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,))
 
     def _m2m_raise_or_create_relation(self, cr, f):
         m2m_tbl, col1, col2 = f._sql_names(self)
@@ -3311,7 +3330,7 @@ class BaseModel(object):
         cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
         if not cr.dictfetchall():
             if not self.pool.get(f._obj):
-                raise except_orm('Programming Error', ('Many2Many destination model does not exist: `%s`') % (f._obj,))
+                raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,))
             dest_model = self.pool.get(f._obj)
             ref = dest_model._table
             cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2))
@@ -3445,8 +3464,8 @@ class BaseModel(object):
                 _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
                 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
                                                              required=True, ondelete="cascade")
-            elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
-                _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.', field_name, self._name)
+            elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() not in ("cascade", "restrict"):
+                _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, self._name)
                 self._columns[field_name].required = True
                 self._columns[field_name].ondelete = "cascade"
 
@@ -3485,7 +3504,7 @@ class BaseModel(object):
 
         :param cr: database cursor
         :param user: current user id
-        :param fields: list of fields
+        :param allfields: list of fields
         :param context: context arguments, like lang, time zone
         :return: dictionary of field dictionaries, each one describing a field of the business object
         :raise AccessError: * if user has no create/write rights on the requested object
@@ -3550,7 +3569,7 @@ class BaseModel(object):
             if field_name not in self._all_columns:
                 return True
             field = self._all_columns[field_name].column
-            if field.groups:
+            if user != SUPERUSER_ID and field.groups:
                 return self.user_has_groups(cr, user, groups=field.groups, context=context)
             else:
                 return True
@@ -3614,7 +3633,7 @@ class BaseModel(object):
             context = {}
         if not ids:
             return []
-        if fields_to_read == None:
+        if fields_to_read is None:
             fields_to_read = self._columns.keys()
 
         # Construct a clause for the security rules.
@@ -3658,15 +3677,16 @@ class BaseModel(object):
         else:
             res = map(lambda x: {'id': x}, ids)
 
-        for f in fields_pre:
-            if f == self.CONCURRENCY_CHECK_FIELD:
-                continue
-            if self._columns[f].translate:
-                ids = [x['id'] for x in res]
-                #TODO: optimize out of this loop
-                res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
-                for r in res:
-                    r[f] = res_trans.get(r['id'], False) or r[f]
+        if context.get('lang'):
+            for f in fields_pre:
+                if f == self.CONCURRENCY_CHECK_FIELD:
+                    continue
+                if self._columns[f].translate:
+                    ids = [x['id'] for x in res]
+                    #TODO: optimize out of this loop
+                    res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context['lang'], ids)
+                    for r in res:
+                        r[f] = res_trans.get(r['id'], False) or r[f]
 
         for table in self._inherits:
             col = self._inherits[table]
@@ -4428,7 +4448,6 @@ class BaseModel(object):
             upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')"
             upd2.extend((user, user))
         cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
-        self.check_access_rule(cr, user, [id_new], 'create', context=context)
         upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
 
         if self._parent_store and not context.get('defer_parent_store_computation'):
@@ -4481,6 +4500,7 @@ class BaseModel(object):
                 self.name_get(cr, user, [id_new], context=context)[0][1] + \
                 "' " + _("created.")
             self.log(cr, user, id_new, message, True, context=context)
+        self.check_access_rule(cr, user, [id_new], 'create', context=context)
         self._workflow_trigger(cr, user, [id_new], 'trg_create', context=context)
         return id_new
 
@@ -4525,9 +4545,13 @@ class BaseModel(object):
                 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
 
         mapping = {}
+        fresults = {}
         for function in to_compute:
-            # use admin user for accessing objects having rules defined on store fields
-            target_ids = [id for id in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id]
+            fid = id(function[id_mapping_fnct_])
+            if not fid in fresults:
+                # use admin user for accessing objects having rules defined on store fields
+                fresults[fid] = [id2 for id2 in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id2]
+            target_ids = fresults[fid]
 
             # the compound key must consider the priority and model name
             key = (function[priority_], function[model_name_])
@@ -4548,8 +4572,8 @@ class BaseModel(object):
             functions_ids_maps = {}
             # function_ids_maps =
             #   { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
-            for id, functions in id_map.iteritems():
-                functions_ids_maps.setdefault(tuple(functions), []).append(id)
+            for fid, functions in id_map.iteritems():
+                functions_ids_maps.setdefault(tuple(functions), []).append(fid)
             for functions, ids in functions_ids_maps.iteritems():
                 call_map.setdefault((priority,model),[]).append((priority, model, ids,
                                                                  [f[func_field_to_compute_] for f in functions]))
@@ -4703,7 +4727,7 @@ class BaseModel(object):
                     new_tables = []
                     for table in added_tables:
                         # table is just a table name -> switch to the full alias
-                        if table == '"%s"' % (parent_table):
+                        if table == '"%s"' % parent_table:
                             new_tables.append('"%s" as "%s"' % (parent_table, parent_alias))
                         # table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated
                         else:
@@ -4788,8 +4812,8 @@ class BaseModel(object):
                 order_field = order_split[0].strip()
                 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
                 inner_clause = None
-                if order_field == 'id':
-                    order_by_elements.append('"%s"."id" %s' % (self._table, order_direction))
+                if order_field == 'id' or (self._log_access and order_field in LOG_ACCESS_COLUMNS.keys()):
+                    order_by_elements.append('"%s"."%s" %s' % (self._table, order_field, order_direction))
                 elif order_field in self._columns:
                     order_column = self._columns[order_field]
                     if order_column._classic_read:
@@ -4807,6 +4831,8 @@ class BaseModel(object):
                         inner_clause = self._generate_m2o_order_by(order_field, query)
                     else:
                         continue  # ignore non-readable or "non-joinable" fields
+                else:
+                    raise ValueError( _("Sorting field %s not found on model %s") %( order_field, self._name))
                 if inner_clause:
                     if isinstance(inner_clause, list):
                         for clause in inner_clause:
@@ -4851,7 +4877,15 @@ class BaseModel(object):
             return res[0][0]
         cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
         res = cr.fetchall()
-        return [x[0] for x in res]
+
+        # TDE note: with auto_join, we could have several lines about the same result
+        # i.e. a lead with several unread messages; we uniquify the result using
+        # a fast way to do it while preserving order (http://www.peterbe.com/plog/uniqifiers-benchmark)
+        def _uniquify_list(seq):
+            seen = set()
+            return [x for x in seq if x not in seen and not seen.add(x)]
+
+        return _uniquify_list([x[0] for x in res])
 
     # returns the different values ever entered for one field
     # this is used, for example, in the client when the user hits enter on
@@ -4869,7 +4903,7 @@ class BaseModel(object):
         Copy given record's data with all its fields values
 
         :param cr: database cursor
-        :param user: current user id
+        :param uid: current user id
         :param id: id of the record to copy
         :param default: field values to override in the original values of the copied record
         :type default: dictionary
@@ -4896,10 +4930,7 @@ class BaseModel(object):
                 else:
                     default['state'] = self._defaults['state']
 
-        context_wo_lang = context.copy()
-        if 'lang' in context:
-            del context_wo_lang['lang']
-        data = self.read(cr, uid, [id,], context=context_wo_lang)
+        data = self.read(cr, uid, [id,], context=context)
         if data:
             data = data[0]
         else:
@@ -4959,36 +4990,50 @@ class BaseModel(object):
         # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
         fields = self.fields_get(cr, uid, context=context)
 
-        translation_records = []
         for field_name, field_def in fields.items():
+            # removing the lang to compare untranslated values
+            context_wo_lang = dict(context, lang=None)
+            old_record, new_record = self.browse(cr, uid, [old_id, new_id], context=context_wo_lang)
             # we must recursively copy the translations for o2o and o2m
             if field_def['type'] == 'one2many':
                 target_obj = self.pool.get(field_def['relation'])
-                old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
                 # here we rely on the order of the ids to match the translations
                 # as foreseen in copy_data()
-                old_children = sorted(old_record[field_name])
-                new_children = sorted(new_record[field_name])
+                old_children = sorted(r.id for r in old_record[field_name])
+                new_children = sorted(r.id for r in new_record[field_name])
                 for (old_child, new_child) in zip(old_children, new_children):
                     target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
             # and for translatable fields we keep them for copy
             elif field_def.get('translate'):
-                trans_name = ''
                 if field_name in self._columns:
                     trans_name = self._name + "," + field_name
+                    target_id = new_id
+                    source_id = old_id
                 elif field_name in self._inherit_fields:
                     trans_name = self._inherit_fields[field_name][0] + "," + field_name
-                if trans_name:
-                    trans_ids = trans_obj.search(cr, uid, [
-                            ('name', '=', trans_name),
-                            ('res_id', '=', old_id)
-                    ])
-                    translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
+                    # get the id of the parent record to set the translation
+                    inherit_field_name = self._inherit_fields[field_name][1]
+                    target_id = new_record[inherit_field_name].id
+                    source_id = old_record[inherit_field_name].id
+                else:
+                    continue
 
-        for record in translation_records:
-            del record['id']
-            record['res_id'] = new_id
-            trans_obj.create(cr, uid, record, context=context)
+                trans_ids = trans_obj.search(cr, uid, [
+                        ('name', '=', trans_name),
+                        ('res_id', '=', source_id)
+                ])
+                user_lang = context.get('lang')
+                for record in trans_obj.read(cr, uid, trans_ids, context=context):
+                    del record['id']
+                    # remove source to avoid triggering _set_src
+                    del record['source']
+                    record.update({'res_id': target_id})
+                    if user_lang and user_lang == record['lang']:
+                        # 'source' to force the call to _set_src
+                        # 'value' needed if value is changed in copy(), want to see the new_value
+                        record['source'] = old_record[field_name]
+                        record['value'] = new_record[field_name]
+                    trans_obj.create(cr, uid, record, context=context)
 
 
     def copy(self, cr, uid, id, default=None, context=None):
@@ -5028,7 +5073,7 @@ class BaseModel(object):
         """
         if type(ids) in (int, long):
             ids = [ids]
-        query = 'SELECT id FROM "%s"' % (self._table)
+        query = 'SELECT id FROM "%s"' % self._table
         cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
         return [x[0] for x in cr.fetchall()]
 
@@ -5051,20 +5096,18 @@ class BaseModel(object):
         :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
         :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
         """
-
         if not parent:
             parent = self._parent_name
-        ids_parent = ids[:]
-        query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
-        while ids_parent:
-            ids_parent2 = []
-            for i in range(0, len(ids), cr.IN_MAX):
-                sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
-                cr.execute(query, (tuple(sub_ids_parent),))
-                ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
-            ids_parent = ids_parent2
-            for i in ids_parent:
-                if i in ids:
+
+        # must ignore 'active' flag, ir.rules, etc. => direct SQL query
+        query = 'SELECT "%s" FROM "%s" WHERE id = %%s' % (parent, self._table)
+        for id in ids:
+            current_id = id
+            while current_id is not None:
+                cr.execute(query, (current_id,))
+                result = cr.fetchone()
+                current_id = result[0] if result else None
+                if current_id == id:
                     return False
         return True
 
@@ -5131,20 +5174,22 @@ class BaseModel(object):
 
     def _transient_clean_rows_older_than(self, cr, seconds):
         assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
-        cr.execute("SELECT id FROM " + self._table + " WHERE"
-            " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp <"
-            " ((now() at time zone 'UTC') - interval %s)", ("%s seconds" % seconds,))
+        # Never delete rows used in last 5 minutes
+        seconds = max(seconds, 300)
+        query = ("SELECT id FROM " + self._table + " WHERE"
+            " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
+            " < ((now() at time zone 'UTC') - interval %s)")
+        cr.execute(query, ("%s seconds" % seconds,))
         ids = [x[0] for x in cr.fetchall()]
         self.unlink(cr, SUPERUSER_ID, ids)
 
-    def _transient_clean_old_rows(self, cr, count):
-        assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
-        cr.execute(
-            "SELECT id, COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
-            " AS t FROM " + self._table +
-            " ORDER BY t LIMIT %s", (count,))
-        ids = [x[0] for x in cr.fetchall()]
-        self.unlink(cr, SUPERUSER_ID, ids)
+    def _transient_clean_old_rows(self, cr, max_count):
+        # Check how many rows we have in the table
+        cr.execute("SELECT count(*) AS row_count FROM " + self._table)
+        res = cr.fetchall()
+        if res[0][0] <= max_count:
+            return  # max not reached, nothing to do
+        self._transient_clean_rows_older_than(cr, 300)
 
     def _transient_vacuum(self, cr, uid, force=False):
         """Clean the transient records.
@@ -5154,12 +5199,21 @@ class BaseModel(object):
         Actual cleaning will happen only once every "_transient_check_time" calls.
         This means this method can be called frequently called (e.g. whenever
         a new record is created).
+        Example with both max_hours and max_count active:
+        Suppose max_hours = 0.2 (e.g. 12 minutes), max_count = 20, there are 55 rows in the
+        table, 10 created/changed in the last 5 minutes, an additional 12 created/changed between
+        5 and 10 minutes ago, the rest created/changed more then 12 minutes ago.
+        - age based vacuum will leave the 22 rows created/changed in the last 12 minutes
+        - count based vacuum will wipe out another 12 rows. Not just 2, otherwise each addition
+          would immediately cause the maximum to be reached again.
+        - the 10 rows that have been created/changed the last 5 minutes will NOT be deleted
         """
         assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
+        _transient_check_time = 20          # arbitrary limit on vacuum executions
         self._transient_check_count += 1
-        if (not force) and (self._transient_check_count % self._transient_check_time):
-            self._transient_check_count = 0
-            return True
+        if not force and (self._transient_check_count < _transient_check_time):
+            return True  # no vacuum cleaning this time
+        self._transient_check_count = 0
 
         # Age-based expiration
         if self._transient_max_hours:
@@ -5222,6 +5276,10 @@ class BaseModel(object):
     # for backward compatibility
     resolve_o2m_commands_to_record_dicts = resolve_2many_commands
 
+    def _register_hook(self, cr):
+        """ stuff to do right after the registry is built """
+        pass
+
 # keep this import here, at top it will cause dependency cycle errors
 import expression
 
@@ -5298,11 +5356,28 @@ def convert_pgerror_23502(model, fields, info, e):
         'message': message,
         'field': field_name,
     }
+def convert_pgerror_23505(model, fields, info, e):
+    m = re.match(r'^duplicate key (?P<field>\w+) violates unique constraint',
+                 str(e))
+    field_name = m.group('field')
+    if not m or field_name not in fields:
+        return {'message': unicode(e)}
+    message = _(u"The value for the field '%s' already exists.") % field_name
+    field = fields.get(field_name)
+    if field:
+        message = _(u"%s This might be '%s' in the current model, or a field "
+                    u"of the same name in an o2m.") % (message, field['string'])
+    return {
+        'message': message,
+        'field': field_name,
+    }
 
 PGERROR_TO_OE = collections.defaultdict(
     # shape of mapped converters
     lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
     # not_null_violation
     '23502': convert_pgerror_23502,
+    # unique constraint error
+    '23505': convert_pgerror_23505,
 })
 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: