# This is similar to _inherit_fields but:
# 1. includes self fields,
# 2. uses column_info instead of a triple.
+ # Warning: _all_columns is deprecated, use _fields instead
_all_columns = {}
_table = None
# basic setup of field
field.set_class_name(cls, name)
- if field.store:
+ if field.store or field.column:
cls._columns[name] = field.to_column()
else:
# remove potential column that may be overridden by field
"""
def add(name, field):
""" add `field` with the given `name` if it does not exist yet """
- if name not in cls._columns and name not in cls._fields:
+ if name not in cls._fields:
cls._add_field(name, field)
# cyclic import
pool._store_function[model].sort(key=lambda x: x[4])
@classmethod
- def _init_manual_fields(cls, pool, cr):
+ def _init_manual_fields(cls, cr):
# Check whether the query is already done
- if pool.fields_by_model is not None:
- manual_fields = pool.fields_by_model.get(cls._name, [])
+ if cls.pool.fields_by_model is not None:
+ manual_fields = cls.pool.fields_by_model.get(cls._name, [])
else:
cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (cls._name, 'manual'))
manual_fields = cr.dictfetchall()
for field in manual_fields:
- if field['name'] in cls._columns:
+ if field['name'] in cls._fields:
continue
attrs = {
+ 'manual': True,
'string': field['field_description'],
'required': bool(field['required']),
'readonly': bool(field['readonly']),
- 'domain': eval(field['domain']) if field['domain'] else None,
- 'size': field['size'] or None,
- 'ondelete': field['on_delete'],
- 'translate': (field['translate']),
- 'manual': True,
- '_prefetch': False,
- #'select': int(field['select_level'])
}
- if field['serialization_field_id']:
- cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
- attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
- if field['ttype'] in ['many2one', 'one2many', 'many2many']:
- attrs.update({'relation': field['relation']})
- cls._columns[field['name']] = fields.sparse(**attrs)
- elif field['ttype'] == 'selection':
- cls._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
- elif field['ttype'] == 'reference':
- cls._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
+ # FIXME: ignore field['serialization_field_id']
+ if field['ttype'] in ('char', 'text', 'html'):
+ attrs['translate'] = bool(field['translate'])
+ attrs['size'] = field['size'] or None
+ elif field['ttype'] in ('selection', 'reference'):
+ attrs['selection'] = eval(field['selection'])
elif field['ttype'] == 'many2one':
- cls._columns[field['name']] = fields.many2one(field['relation'], **attrs)
+ attrs['comodel_name'] = field['relation']
+ attrs['ondelete'] = field['on_delete']
+ attrs['domain'] = eval(field['domain']) if field['domain'] else None
elif field['ttype'] == 'one2many':
- cls._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
+ attrs['comodel_name'] = field['relation']
+ attrs['inverse_name'] = field['relation_field']
+ attrs['domain'] = eval(field['domain']) if field['domain'] else None
elif field['ttype'] == 'many2many':
+ attrs['comodel_name'] = field['relation']
_rel1 = field['relation'].replace('.', '_')
_rel2 = field['model'].replace('.', '_')
- _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
- cls._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
- else:
- cls._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
+ attrs['relation'] = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
+ attrs['column1'] = 'id1'
+ attrs['column2'] = 'id2'
+ attrs['domain'] = eval(field['domain']) if field['domain'] else None
+ cls._add_field(field['name'], Field.by_type[field['ttype']](**attrs))
@classmethod
def _init_constraints_onchanges(cls):
cls._onchange_methods = defaultdict(list)
for attr, func in getmembers(cls, callable):
if hasattr(func, '_constrains'):
- if not all(name in cls._fields for name in func._constrains):
- _logger.warning("@constrains%r parameters must be field names", func._constrains)
cls._constraint_methods.append(func)
if hasattr(func, '_onchange'):
- if not all(name in cls._fields for name in func._onchange):
- _logger.warning("@onchange%r parameters must be field names", func._onchange)
for name in func._onchange:
cls._onchange_methods[name].append(func)
cls._fields = {}
above = cls.__bases__[0]
for attr, field in getmembers(above, Field.__instancecheck__):
- if not field.inherited:
- cls._add_field(attr, field.new())
+ cls._add_field(attr, field.new())
# introduce magic fields
cls._add_magic_fields()
# register stuff about low-level function fields and custom fields
cls._init_function_fields(pool, cr)
- cls._init_manual_fields(pool, cr)
-
- # process _inherits
- cls._inherits_check()
- cls._inherits_reload()
# register constraints and onchange methods
cls._init_constraints_onchanges()
- # check defaults
- for k in cls._defaults:
- assert k in cls._fields, \
- "Model %s has a default for nonexiting field %s" % (cls._name, k)
-
- # restart columns
- for column in cls._columns.itervalues():
- column.restart()
-
- # validate rec_name
- if cls._rec_name:
- assert cls._rec_name in cls._fields, \
- "Invalid rec_name %s for model %s" % (cls._rec_name, cls._name)
- elif 'name' in cls._fields:
- cls._rec_name = 'name'
-
# prepare ormcache, which must be shared by all instances of the model
cls._ormcache = {}
* "id" is the External ID for the record
* ".id" is the Database ID for the record
"""
- columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
- # Fake columns to avoid special cases in extractor
- columns[None] = fields.char('rec_name')
- columns['id'] = fields.char('External ID')
- columns['.id'] = fields.integer('Database ID')
+ from openerp.fields import Char, Integer
+ fields = dict(self._fields)
+ # Fake fields to avoid special cases in extractor
+ fields[None] = Char('rec_name')
+ fields['id'] = Char('External ID')
+ fields['.id'] = Integer('Database ID')
# m2o fields can't be on multiple lines so exclude them from the
# is_relational field rows filter, but special-case it later on to
# be handled with relational fields (as it can have subfields)
- is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
+ is_relational = lambda field: fields[field].relational
get_o2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
- if columns[field[0]]._type == 'one2many'])
+ if fields[field[0]].type == 'one2many'])
get_nono2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
- if columns[field[0]]._type != 'one2many'])
+ if fields[field[0]].type != 'one2many'])
# Checks if the provided row has any non-empty non-relational field
def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
return any(g(row)) and not any(f(row))
for relfield in set(
field[0] for field in fields_
if is_relational(field[0])):
- column = columns[relfield]
# FIXME: how to not use _obj without relying on fields_get?
- Model = self.pool[column._obj]
+ Model = self.pool[fields[relfield].comodel_name]
# get only cells for this sub-field, should be strictly
- # non-empty, field path [None] is for name_get column
+ # non-empty, field path [None] is for name_get field
indices, subfields = zip(*((index, field[1:] or [None])
for index, field in enumerate(fields_)
if field[0] == relfield))
"""
if context is None: context = {}
Converter = self.pool['ir.fields.converter']
- columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
Translation = self.pool['ir.translation']
+ fields = dict(self._fields)
field_names = dict(
(f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
context.get('lang'))
- or column.string))
- for f, column in columns.iteritems())
+ or field.string))
+ for f, field in fields.iteritems())
convert = Converter.for_model(cr, uid, self, context=context)
order_field = order_split[0]
if order_field in groupby_fields:
- if self._all_columns[order_field.split(':')[0]].column._type == 'many2one':
+ if self._fields[order_field.split(':')[0]].type == 'many2one':
order_clause = self._generate_order_by(order_part, query).replace('ORDER BY ', '')
if order_clause:
orderby_terms.append(order_clause)
field name, type, time informations, qualified name, ...
"""
split = gb.split(':')
- field_type = self._all_columns[split[0]].column._type
+ field_type = self._fields[split[0]].type
gb_function = split[1] if len(split) == 2 else None
temporal = field_type in ('date', 'datetime')
tz_convert = field_type == 'datetime' and context.get('tz') in pytz.all_timezones
assert gb in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
groupby_def = self._columns.get(gb) or (self._inherit_fields.get(gb) and self._inherit_fields.get(gb)[2])
assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
- if not (gb in self._all_columns):
+ if not (gb in self._fields):
# Don't allow arbitrary values, as this would be a SQL injection vector!
raise except_orm(_('Invalid group_by'),
_('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(gb,))
f for f in fields
if f not in ('id', 'sequence')
if f not in groupby_fields
- if f in self._all_columns
- if self._all_columns[f].column._type in ('integer', 'float')
- if getattr(self._all_columns[f].column, '_classic_write')]
+ if f in self._fields
+ if self._fields[f].type in ('integer', 'float')
+ if getattr(self._fields[f].base_field.column, '_classic_write')
+ ]
- field_formatter = lambda f: (self._all_columns[f].column.group_operator or 'sum', self._inherits_join_calc(f, query), f)
+ field_formatter = lambda f: (self._fields[f].group_operator or 'sum', self._inherits_join_calc(f, query), f)
select_terms = ["%s(%s) AS %s" % field_formatter(f) for f in aggregated_fields]
for gb in annotated_groupbys:
_schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
self._table, column['attname'])
- def _save_constraint(self, cr, constraint_name, type):
+ def _save_constraint(self, cr, constraint_name, type, definition):
"""
Record the creation of a constraint for this model, to make it possible
to delete it later when the module is uninstalled. Type can be either
return
assert type in ('f', 'u')
cr.execute("""
- SELECT 1 FROM ir_model_constraint, ir_module_module
+ SELECT type, definition FROM ir_model_constraint, ir_module_module
WHERE ir_model_constraint.module=ir_module_module.id
AND ir_model_constraint.name=%s
AND ir_module_module.name=%s
""", (constraint_name, self._module))
- if not cr.rowcount:
+ constraints = cr.dictfetchone()
+ if not constraints:
cr.execute("""
INSERT INTO ir_model_constraint
- (name, date_init, date_update, module, model, type)
+ (name, date_init, date_update, module, model, type, definition)
VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
(SELECT id FROM ir_module_module WHERE name=%s),
- (SELECT id FROM ir_model WHERE model=%s), %s)""",
- (constraint_name, self._module, self._name, type))
+ (SELECT id FROM ir_model WHERE model=%s), %s, %s)""",
+ (constraint_name, self._module, self._name, type, definition))
+ elif constraints['type'] != type or (definition and constraints['definition'] != definition):
+ cr.execute("""
+ UPDATE ir_model_constraint
+ SET date_update=now() AT TIME ZONE 'UTC', type=%s, definition=%s
+ WHERE name=%s AND module = (SELECT id FROM ir_module_module WHERE name=%s)""",
+ (type, definition, constraint_name, self._module))
def _save_relation_table(self, cr, relation_table):
"""
""" Create the foreign keys recorded by _auto_init. """
for t, k, r, d in self._foreign_keys:
cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
- self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
+ self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f', False)
cr.commit()
del self._foreign_keys
for (key, con, _) in self._sql_constraints:
conname = '%s_%s' % (self._table, key)
- self._save_constraint(cr, conname, 'u')
- cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
- existing_constraints = cr.dictfetchall()
+ # using 1 to get result if no imc but one pgc
+ cr.execute("""SELECT definition, 1
+ FROM ir_model_constraint imc
+ RIGHT JOIN pg_constraint pgc
+ ON (pgc.conname = imc.name)
+ WHERE pgc.conname=%s
+ """, (conname, ))
+ existing_constraints = cr.dictfetchone()
sql_actions = {
'drop': {
'execute': False,
# constraint does not exists:
sql_actions['add']['execute'] = True
sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
- elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
+ elif unify_cons_text(con) != existing_constraints['definition']:
# constraint exists but its definition has changed:
sql_actions['drop']['execute'] = True
- sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
+ sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints['definition'] or '', )
sql_actions['add']['execute'] = True
sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
# we need to add the constraint:
+ self._save_constraint(cr, conname, 'u', unify_cons_text(con))
sql_actions = [item for item in sql_actions.values()]
sql_actions.sort(key=lambda x: x['order'])
for sql_action in [action for action in sql_actions if action['execute']]:
#
@classmethod
- def _inherits_reload_src(cls):
- """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
- for model in cls.pool.values():
- if cls._name in model._inherits:
- model._inherits_reload()
-
- @classmethod
def _inherits_reload(cls):
- """ Recompute the _inherit_fields mapping.
-
- This will also call itself on each inherits'd child model.
+ """ Recompute the _inherit_fields mapping, and inherited fields. """
+ struct = {}
+ fields = {}
+ for parent_model, parent_field in cls._inherits.iteritems():
+ parent = cls.pool[parent_model]
+ # old-api struct for _inherit_fields
+ for name, column in parent._columns.iteritems():
+ struct[name] = (parent_model, parent_field, column, parent_model)
+ for name, source in parent._inherit_fields.iteritems():
+ struct[name] = (parent_model, parent_field, source[2], source[3])
+ # new-api fields for _fields
+ for name, field in parent._fields.iteritems():
+ fields[name] = field.new(
+ inherited=True,
+ related=(parent_field, name),
+ related_sudo=False,
+ )
- """
- res = {}
- for table in cls._inherits:
- other = cls.pool[table]
- for col in other._columns.keys():
- res[col] = (table, cls._inherits[table], other._columns[col], table)
- for col in other._inherit_fields.keys():
- res[col] = (table, cls._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
- cls._inherit_fields = res
+ # old-api stuff
+ cls._inherit_fields = struct
cls._all_columns = cls._get_column_infos()
- # interface columns with new-style fields
- for attr, column in cls._columns.items():
- if attr not in cls._fields:
- cls._add_field(attr, column.to_field())
-
- # interface inherited fields with new-style fields (note that the
- # reverse order is for being consistent with _all_columns above)
- for parent_model, parent_field in reversed(cls._inherits.items()):
- for attr, field in cls.pool[parent_model]._fields.iteritems():
- if attr not in cls._fields:
- cls._add_field(attr, field.new(
- inherited=True,
- related=(parent_field, attr),
- related_sudo=False,
- ))
-
- cls._inherits_reload_src()
+ # add inherited fields that are not redefined locally
+ for name, field in fields.iteritems():
+ if name not in cls._fields:
+ cls._add_field(name, field)
@classmethod
def _get_column_infos(cls):
@api.model
def _prepare_setup_fields(self):
""" Prepare the setup of fields once the models have been loaded. """
- for field in self._fields.itervalues():
- field.reset()
+ type(self)._setup_done = False
+ for name, field in self._fields.items():
+ if field.inherited:
+ del self._fields[name]
+ else:
+ field.reset()
@api.model
- def _setup_fields(self, partial=False):
+ def _setup_fields(self):
""" Setup the fields (dependency triggers, etc). """
- for field in self._fields.itervalues():
- try:
- field.setup(self.env)
- except Exception:
- if not partial:
- raise
+ cls = type(self)
+ if cls._setup_done:
+ return
+ cls._setup_done = True
+
+ # first make sure that parent models are all set up
+ for parent in self._inherits:
+ self.env[parent]._setup_fields()
+
+ # retrieve custom fields
+ if not self._context.get('_setup_fields_partial'):
+ cls._init_manual_fields(self._cr)
+
+ # retrieve inherited fields
+ cls._inherits_check()
+ cls._inherits_reload()
+
+ # set up fields
+ for field in cls._fields.itervalues():
+ field.setup(self.env)
+
+ # update columns (fields may have changed)
+ for name, field in cls._fields.iteritems():
+ if field.column:
+ cls._columns[name] = field.to_column()
# group fields by compute to determine field.computed_fields
fields_by_compute = defaultdict(list)
- for field in self._fields.itervalues():
+ for field in cls._fields.itervalues():
if field.compute:
field.computed_fields = fields_by_compute[field.compute]
field.computed_fields.append(field)
else:
field.computed_fields = []
+ # check constraints
+ for func in cls._constraint_methods:
+ if not all(name in cls._fields for name in func._constrains):
+ _logger.warning("@constrains%r parameters must be field names", func._constrains)
+ for name in cls._onchange_methods:
+ if name not in cls._fields:
+ func = cls._onchange_methods[name]
+ _logger.warning("@onchange%r parameters must be field names", func._onchange)
+
+ # check defaults
+ for name in cls._defaults:
+ assert name in cls._fields, \
+ "Model %s has a default for nonexiting field %s" % (cls._name, name)
+
+ # validate rec_name
+ if cls._rec_name:
+ assert cls._rec_name in cls._fields, \
+ "Invalid rec_name %s for model %s" % (cls._rec_name, cls._name)
+ elif 'name' in cls._fields:
+ cls._rec_name = 'name'
+ elif 'x_name' in cls._fields:
+ cls._rec_name = 'x_name'
+
def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
""" fields_get([fields])
if isinstance(ids, (int, long)):
ids = [ids]
- result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
+ result_store = self._store_get_values(cr, uid, ids, self._fields.keys(), context)
# for recomputing new-style fields
recs = self.browse(cr, uid, ids, context)
for key, val in vals.iteritems():
field = self._fields.get(key)
if field:
- if field.store or field.inherited:
+ if field.column or field.inherited:
old_vals[key] = val
if field.inverse and not field.inherited:
new_vals[key] = val
cr.execute(query, (tuple(ids),))
parents_changed = map(operator.itemgetter(0), cr.fetchall())
- upd0 = []
- upd1 = []
+ updates = [] # list of (column, expr) or (column, pattern, value)
upd_todo = []
updend = []
direct = []
totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
for field in vals:
- field_column = self._all_columns.get(field) and self._all_columns.get(field).column
- if field_column and field_column.deprecated:
- _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
+ ffield = self._fields.get(field)
+ if ffield and ffield.deprecated:
+ _logger.warning('Field %s.%s is deprecated: %s', self._name, field, ffield.deprecated)
if field in self._columns:
- if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
- if (not totranslate) or not self._columns[field].translate:
- upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
- upd1.append(self._columns[field]._symbol_set[1](vals[field]))
+ column = self._columns[field]
+ if hasattr(column, 'selection') and vals[field]:
+ self._check_selection_field_value(cr, user, field, vals[field], context=context)
+ if column._classic_write and not hasattr(column, '_fnct_inv'):
+ if (not totranslate) or not column.translate:
+ updates.append((field, '%s', column._symbol_set[1](vals[field])))
direct.append(field)
else:
upd_todo.append(field)
else:
updend.append(field)
- if field in self._columns \
- and hasattr(self._columns[field], 'selection') \
- and vals[field]:
- self._check_selection_field_value(cr, user, field, vals[field], context=context)
if self._log_access:
- upd0.append('write_uid=%s')
- upd0.append("write_date=(now() at time zone 'UTC')")
- upd1.append(user)
+ updates.append(('write_uid', '%s', user))
+ updates.append(('write_date', "(now() at time zone 'UTC')"))
direct.append('write_uid')
direct.append('write_date')
- if len(upd0):
+ if updates:
self.check_access_rule(cr, user, ids, 'write', context=context)
+ query = 'UPDATE "%s" SET %s WHERE id IN %%s' % (
+ self._table, ','.join('"%s"=%s' % u[:2] for u in updates),
+ )
+ params = tuple(u[2] for u in updates if len(u) > 2)
for sub_ids in cr.split_for_in_conditions(ids):
- cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
- 'where id IN %s', upd1 + [sub_ids])
+ cr.execute(query, params + (sub_ids,))
if cr.rowcount != len(sub_ids):
raise MissingError(_('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
for key, val in vals.iteritems():
field = self._fields.get(key)
if field:
- if field.store or field.inherited:
+ if field.column or field.inherited:
old_vals[key] = val
if field.inverse and not field.inherited:
new_vals[key] = val
for f in value.keys():
if f in field_dict[id]:
value.pop(f)
- upd0 = []
- upd1 = []
+ updates = [] # list of (column, pattern, value)
for v in value:
if v not in val:
continue
- if self._columns[v]._type == 'many2one':
+ column = self._columns[v]
+ if column._type == 'many2one':
try:
value[v] = value[v][0]
except:
pass
- upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
- upd1.append(self._columns[v]._symbol_set[1](value[v]))
- upd1.append(id)
- if upd0 and upd1:
- cr.execute('update "' + self._table + '" set ' + \
- ','.join(upd0) + ' where id = %s', upd1)
+ updates.append((v, '%s', column._symbol_set[1](value[v])))
+ if updates:
+ query = 'UPDATE "%s" SET %s WHERE id = %%s' % (
+ self._table, ','.join('"%s"=%s' % u[:2] for u in updates),
+ )
+ params = tuple(u[2] for u in updates)
+ cr.execute(query, params + (id,))
else:
for f in val:
+ column = self._columns[f]
# use admin user for accessing objects having rules defined on store fields
- result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
+ result = column.get(cr, self, ids, f, SUPERUSER_ID, context=context)
for r in result.keys():
if field_flag:
if r in field_dict.keys():
if f in field_dict[r]:
result.pop(r)
for id, value in result.items():
- if self._columns[f]._type == 'many2one':
+ if column._type == 'many2one':
try:
value = value[0]
except:
pass
- cr.execute('update "' + self._table + '" set ' + \
- '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
+ query = 'UPDATE "%s" SET "%s"=%%s WHERE id = %%s' % (
+ self._table, f,
+ )
+ cr.execute(query, (column._symbol_set[1](value), id))
# invalidate and mark new-style fields to recompute
self.browse(cr, uid, ids, context).modified(fields)
domain = domain[:]
# if the object has a field named 'active', filter out all inactive
# records unless they were explicitely asked for
- if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
+ if 'active' in self._fields and active_test and context.get('active_test', True):
if domain:
# the item[0] trick below works for domain items and '&'/'|'/'!'
# operators too
# build a black list of fields that should not be copied
blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
+ whitelist = set(name for name, field in self._fields.iteritems() if not field.inherited)
+
def blacklist_given_fields(obj):
# blacklist the fields that are given by inheritance
for other, field_to_other in obj._inherits.items():
if field_to_other in default:
# all the fields of 'other' are given by the record: default[field_to_other],
# except the ones redefined in self
- blacklist.update(set(self.pool[other]._all_columns) - set(self._columns))
+ blacklist.update(set(self.pool[other]._fields) - whitelist)
else:
blacklist_given_fields(self.pool[other])
# blacklist deprecated fields
- for name, field in obj._columns.items():
+ for name, field in obj._fields.iteritems():
if field.deprecated:
blacklist.add(name)
blacklist_given_fields(self)
- fields_to_copy = dict((f,fi) for f, fi in self._all_columns.iteritems()
- if fi.column.copy
+ fields_to_copy = dict((f,fi) for f, fi in self._fields.iteritems()
+ if fi.copy
if f not in default
if f not in blacklist)
raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
res = dict(default)
- for f, colinfo in fields_to_copy.iteritems():
- field = colinfo.column
- if field._type == 'many2one':
+ for f, field in fields_to_copy.iteritems():
+ if field.type == 'many2one':
res[f] = data[f] and data[f][0]
- elif field._type == 'one2many':
- other = self.pool[field._obj]
+ elif field.type == 'one2many':
+ other = self.pool[field.comodel_name]
# duplicate following the order of the ids because we'll rely on
# it later for copying translations in copy_translation()!
lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
# the lines are duplicated using the wrong (old) parent, but then
# are reassigned to the correct one thanks to the (0, 0, ...)
res[f] = [(0, 0, line) for line in lines if line]
- elif field._type == 'many2many':
+ elif field.type == 'many2many':
res[f] = [(6, 0, data[f])]
else:
res[f] = data[f]
seen_map[self._name].append(old_id)
trans_obj = self.pool.get('ir.translation')
- # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
- fields = self.fields_get(cr, uid, context=context)
- for field_name, field_def in fields.items():
+ for field_name, field in self._fields.iteritems():
+ if not field.copy:
+ continue
# removing the lang to compare untranslated values
context_wo_lang = dict(context, lang=None)
old_record, new_record = self.browse(cr, uid, [old_id, new_id], context=context_wo_lang)
# we must recursively copy the translations for o2o and o2m
- if field_def['type'] == 'one2many':
- target_obj = self.pool[field_def['relation']]
+ if field.type == 'one2many':
+ target_obj = self.pool[field.comodel_name]
# here we rely on the order of the ids to match the translations
# as foreseen in copy_data()
old_children = sorted(r.id for r in old_record[field_name])
for (old_child, new_child) in zip(old_children, new_children):
target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
# and for translatable fields we keep them for copy
- elif field_def.get('translate'):
+ elif getattr(field, 'translate', False):
if field_name in self._columns:
trans_name = self._name + "," + field_name
target_id = new_id
:return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
"""
- field = self._all_columns.get(field_name)
- field = field.column if field else None
- if not field or field._type != 'many2many' or field._obj != self._name:
+ field = self._fields.get(field_name)
+ if not (field and field.type == 'many2many' and
+ field.comodel_name == self._name and field.store):
# field must be a many2many on itself
raise ValueError('invalid field_name: %r' % (field_name,))
- query = 'SELECT distinct "%s" FROM "%s" WHERE "%s" IN %%s' % (field._id2, field._rel, field._id1)
+ query = 'SELECT distinct "%s" FROM "%s" WHERE "%s" IN %%s' % \
+ (field.column2, field.relation, field.column1)
ids_parent = ids[:]
while ids_parent:
ids_parent2 = []
result, record_ids = [], list(command[2])
# read the records and apply the updates
- other_model = self.pool[self._all_columns[field_name].column._obj]
+ other_model = self.pool[self._fields[field_name].comodel_name]
for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
record.update(updates.get(record['id'], {}))
result.append(record)
def _mapped_func(self, func):
""" Apply function `func` on all records in `self`, and return the
- result as a list or a recordset (if `func` return recordsets).
+ result as a list or a recordset (if `func` returns recordsets).
"""
- vals = [func(rec) for rec in self]
- val0 = vals[0] if vals else func(self)
- if isinstance(val0, BaseModel):
- return reduce(operator.or_, vals, val0)
- return vals
+ if self:
+ vals = [func(rec) for rec in self]
+ return reduce(operator.or_, vals) if isinstance(vals[0], BaseModel) else vals
+ else:
+ vals = func(self)
+ return vals if isinstance(vals, BaseModel) else []
def mapped(self, func):
""" Apply `func` on all records in `self`, and return the result as a
func = lambda rec: filter(None, rec.mapped(name))
return self.browse([rec.id for rec in self if func(rec)])
- def sorted(self, key=None):
- """ Return the recordset `self` ordered by `key` """
+ def sorted(self, key=None, reverse=False):
+ """ Return the recordset `self` ordered by `key`.
+
+ :param key: either a function of one argument that returns a
+ comparison key for each record, or ``None``, in which case
+ records are ordered according the default model's order
+
+ :param reverse: if ``True``, return the result in reverse order
+ """
if key is None:
- return self.search([('id', 'in', self.ids)])
+ recs = self.search([('id', 'in', self.ids)])
+ return self.browse(reversed(recs._ids)) if reverse else recs
else:
- return self.browse(map(int, sorted(self, key=key)))
+ return self.browse(map(int, sorted(self, key=key, reverse=reverse)))
def update(self, values):
""" Update record `self[0]` with `values`. """