def intersect(la, lb):
return filter(lambda x: x in lb, la)
+def fix_import_export_id_paths(fieldname):
+ """
+ Fixes the id fields in import and exports, and splits field paths
+ on '/'.
+
+ :param str fieldname: name of the field to import/export
+ :return: split field name
+ :rtype: list of str
+ """
+ fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
+ fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
+ return fixed_external_id.split('/')
+
class except_orm(Exception):
def __init__(self, name, value):
self.name = name
_order = 'id'
_sequence = None
_description = None
+
+ # structure:
+ # { 'parent_model': 'm2o_field', ... }
_inherits = {}
- # Mapping from inherits'd field name to triple (m, r, f)
- # where m is the model from which it is inherits'd,
- # r is the (local) field towards m,
- # and f is the _column object itself.
+
+ # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
+ # model from which it is inherits'd, r is the (local) field towards m, f
+ # is the _column object itself, and n is the original (i.e. top-most)
+ # parent model.
+ # Example:
+ # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
+ # field_column_obj, origina_parent_model), ... }
_inherit_fields = {}
+
# Mapping field name/column_info object
# This is similar to _inherit_fields but:
# 1. includes self fields,
# 2. uses column_info instead of a triple.
_all_columns = {}
+
_table = None
_invalids = set()
_log_create = False
CONCURRENCY_CHECK_FIELD = '__last_update'
+
def log(self, cr, uid, id, message, secondary=False, context=None):
if context and context.get('disable_log'):
return True
else:
r = d['name']
else:
- break
+ postfix = 0
+ while True:
+ n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
+ if not model_data.search(cr, uid, [('name', '=', n)]):
+ break
+ postfix += 1
+ model_data.create(cr, uid, {
+ 'name': n,
+ 'model': self._name,
+ 'res_id': r['id'],
+ })
+ r = n
else:
r = r[f[i]]
# To display external name of selection field when its exported
cols = self._columns.copy()
for f in self._inherit_fields:
cols.update({f: self._inherit_fields[f][2]})
- def fsplit(fieldname):
- fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
- fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
- return fixed_external_id.split('/')
- fields_to_export = map(fsplit, fields_to_export)
+ fields_to_export = map(fix_import_export_id_paths, fields_to_export)
datas = []
for row in self.browse(cr, uid, ids, context):
datas += self.__export_row(cr, uid, row, fields_to_export, context)
"""
Import given data in given module
- :param cr: database cursor
- :param uid: current user id
- :param fields: list of fields
- :param data: data to import
- :param mode: 'init' or 'update' for record creation
- :param current_module: module name
- :param noupdate: flag for record creation
- :param context: context arguments, like lang, time zone,
- :param filename: optional file to store partial import state for recovery
- :rtype: tuple
-
This method is used when importing data via client menu.
Example of fields to import for a sale.order::
order_line/price_unit,
order_line/product_uom_qty,
order_line/product_uom/id (=xml_id)
+
+ This method returns a 4-tuple with the following structure:
+
+ * The first item is a return code, it returns either ``-1`` in case o
+
+ :param cr: database cursor
+ :param uid: current user id
+ :param fields: list of fields
+ :param data: data to import
+ :param mode: 'init' or 'update' for record creation
+ :param current_module: module name
+ :param noupdate: flag for record creation
+ :param context: context arguments, like lang, time zone,
+ :param filename: optional file to store partial import state for recovery
+ :returns: 4-tuple of a return code, an errored resource, an error message and ???
+ :rtype: (int, dict|0, str|0, ''|0)
"""
if not context:
context = {}
- def _replace_field(x):
- x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
- return x.replace(':id','/id').split('/')
- fields = map(_replace_field, fields)
+ fields = map(fix_import_export_id_paths, fields)
logger = netsvc.Logger()
ir_model_data_obj = self.pool.get('ir.model.data')
:param source: a parent architecture to modify (with parent
modifications already applied)
- :param inherit_id: the database id of the parent view
+ :param inherit_id: the database view_id of the parent view
:return: a modified source where all the modifying architecture
are applied
"""
-
- sql_inherit = self.pool.get('ir.ui.view').get_inherit_views(cr, user, inherit_id, self._name)
- for (inherit, id) in sql_inherit:
- source = apply_inheritance_specs(source, inherit, id)
- source = apply_view_inheritance(cr, user, source, id)
+ sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
+ for (view_arch, view_id) in sql_inherit:
+ source = apply_inheritance_specs(source, view_arch, view_id)
+ source = apply_view_inheritance(cr, user, source, view_id)
return source
result = {'type': view_type, 'model': self._name}
args = [('active', '=', 1)]
if args:
import expression
- e = expression.expression(args)
- e.parse(cr, user, self, context)
+ e = expression.expression(cr, user, args, self, context)
res = e.exp
return res or []
break
f = True
for arg in result:
+ if len(arg) != 3:
+ # Amazing hack: orm_memory handles only simple domains.
+ continue
if arg[1] == '=':
val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
for id in ids:
self._check_access(uid, id, operation)
+# Definition of log access columns, automatically added to models if
+# self._log_access is True
+LOG_ACCESS_COLUMNS = {
+ 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
+ 'create_date': 'TIMESTAMP',
+ 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
+ 'write_date': 'TIMESTAMP'
+}
+# special columns automatically created by the ORM
+MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys() + \
+ ['internal.create_uid', 'internal.date_access'] # for osv_memory only
+
class orm(orm_template):
_sql_constraints = []
_table = None
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
__logger = logging.getLogger('orm')
__schema = logging.getLogger('orm.schema')
+
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
del d['id']
return data
- def _inherits_join_add(self, parent_model_name, query):
+ def _inherits_join_add(self, current_table, parent_model_name, query):
"""
Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
-
+ :param current_table: current model object
:param parent_model_name: name of the parent model for which the clauses should be added
:param query: query object on which the JOIN should be added
"""
- inherits_field = self._inherits[parent_model_name]
+ inherits_field = current_table._inherits[parent_model_name]
parent_model = self.pool.get(parent_model_name)
parent_table_name = parent_model._table
quoted_parent_table_name = '"%s"' % parent_table_name
if quoted_parent_table_name not in query.tables:
query.tables.append(quoted_parent_table_name)
- query.where_clause.append('("%s".%s = %s.id)' % (self._table, inherits_field, parent_table_name))
+ query.where_clause.append('(%s.%s = %s.id)' % (current_table._table, inherits_field, parent_table_name))
+
+
def _inherits_join_calc(self, field, query):
"""
while field in current_table._inherit_fields and not field in current_table._columns:
parent_model_name = current_table._inherit_fields[field][0]
parent_table = self.pool.get(parent_model_name)
- self._inherits_join_add(parent_model_name, query)
+ self._inherits_join_add(current_table, parent_model_name, query)
current_table = parent_table
return '"%s".%s' % (current_table._table, field)
pass
if not val_id:
raise except_orm(_('ValidateError'),
- _('Invalid value for reference field "%s" (last part must be a non-zero integer): "%s"') % (field, value))
+ _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
val = val_model
else:
val = value
elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
return
raise except_orm(_('ValidateError'),
- _('The value "%s" for the field "%s" is not in the selection') % (value, field))
+ _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
def _check_removed_columns(self, cr, log=False):
# iterate on the database columns to drop the NOT NULL constraints
# of fields which were required but have been removed (or will be added by another module)
columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
- columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
+ columns += MAGIC_COLUMNS
cr.execute("SELECT a.attname, a.attnotnull"
" FROM pg_class c, pg_attribute a"
" WHERE c.relname=%s"
column_data = self._select_column_data(cr)
for k, f in self._columns.iteritems():
- if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
+ if k in MAGIC_COLUMNS:
continue
# Don't update custom (also called manual) fields
if f.manual and not update_custom_fields:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
- cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
+ cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
self._table, k, f_pg_type, f._type, newname)
if not isinstance(f, fields.function) or f.store:
# add the missing field
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
- cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
+ cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
self.__schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, k, get_pg_type(f)[1])
def _create_table(self, cr):
cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
- cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''")))
+ cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
self.__schema.debug("Table '%s': created", self._table)
def _add_log_columns(self, cr):
- logs = {
- 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
- 'create_date': 'TIMESTAMP',
- 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
- 'write_date': 'TIMESTAMP'
- }
- for k in logs:
+ for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
cr.execute("""
SELECT c.relname
FROM pg_class c, pg_attribute a
WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
- """, (self._table, k))
+ """, (self._table, field))
if not cr.rowcount:
- cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
+ cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
cr.commit()
self.__schema.debug("Table '%s': added column '%s' with definition=%s",
- self._table, k, logs[k])
+ self._table, field, field_def)
def _select_column_data(self, cr):
if f == order:
ok = False
if ok:
- self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order, length))
+ self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
for (key, _, msg) in self._sql_constraints:
for table in self._inherits:
other = self.pool.get(table)
for col in other._columns.keys():
- res[col] = (table, self._inherits[table], other._columns[col])
+ res[col] = (table, self._inherits[table], other._columns[col], table)
for col in other._inherit_fields.keys():
- res[col] = (table, self._inherits[table], other._inherit_fields[col][2])
+ res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
self._inherit_fields = res
self._all_columns = self._get_column_infos()
self._inherits_reload_src()
inherited field via _inherits) to a ``column_info`` struct
giving detailed columns """
result = {}
- for k, (parent, m2o, col) in self._inherit_fields.iteritems():
- result[k] = fields.column_info(k, col, parent, m2o)
+ for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
+ result[k] = fields.column_info(k, col, parent, m2o, original_parent)
for k, col in self._columns.iteritems():
result[k] = fields.column_info(k, col)
return result
res = []
if len(fields_pre):
def convert_field(f):
- f_qual = "%s.%s" % (self._table, f) # need fully-qualified references in case len(tables) > 1
+ f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
if f in ('create_date', 'write_date'):
return "date_trunc('second', %s) as %s" % (f_qual, f)
if f == self.CONCURRENCY_CHECK_FIELD:
upd_todo = []
for v in vals.keys():
if v in self._inherit_fields:
- (table, col, col_detail) = self._inherit_fields[v]
+ (table, col, col_detail, original_parent) = self._inherit_fields[v]
tocreate[table][v] = vals[v]
del vals[v]
else:
:return: [(priority, model_name, [record_ids,], [function_fields,])]
"""
- # FIXME: rewrite, cleanup, use real variable names
- # e.g.: http://pastie.org/1222060
- result = {}
- fncts = self.pool._store_function.get(self._name, [])
- for fnct in range(len(fncts)):
- if fncts[fnct][3]:
- ok = False
- if not fields:
- ok = True
- for f in (fields or []):
- if f in fncts[fnct][3]:
- ok = True
- break
- if not ok:
- continue
+ if fields is None: fields = []
+ stored_functions = self.pool._store_function.get(self._name, [])
+
+ # use indexed names for the details of the stored_functions:
+ model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
- result.setdefault(fncts[fnct][0], {})
+ # only keep functions that should be triggered for the ``fields``
+ # being written to.
+ to_compute = [f for f in stored_functions \
+ if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
+ mapping = {}
+ for function in to_compute:
# use admin user for accessing objects having rules defined on store fields
- ids2 = fncts[fnct][2](self, cr, ROOT_USER_ID, ids, context)
- for id in filter(None, ids2):
- result[fncts[fnct][0]].setdefault(id, [])
- result[fncts[fnct][0]][id].append(fnct)
- dict = {}
- for object in result:
- k2 = {}
- for id, fnct in result[object].items():
- k2.setdefault(tuple(fnct), [])
- k2[tuple(fnct)].append(id)
- for fnct, id in k2.items():
- dict.setdefault(fncts[fnct[0]][4], [])
- dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4], object, id, map(lambda x: fncts[x][1], fnct)))
- result2 = []
- tmp = dict.keys()
- tmp.sort()
- for k in tmp:
- result2 += dict[k]
- return result2
+ target_ids = [id for id in function[id_mapping_fnct_](self, cr, ROOT_USER_ID, ids, context) if id]
+
+ # the compound key must consider the priority and model name
+ key = (function[priority_], function[model_name_])
+ for target_id in target_ids:
+ mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
+
+ # Here mapping looks like:
+ # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
+ # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
+ # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
+ # }
+
+ # Now we need to generate the batch function calls list
+ # call_map =
+ # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
+ call_map = {}
+ for ((priority,model), id_map) in mapping.iteritems():
+ functions_ids_maps = {}
+ # function_ids_maps =
+ # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
+ for id, functions in id_map.iteritems():
+ functions_ids_maps.setdefault(tuple(functions), []).append(id)
+ for functions, ids in functions_ids_maps.iteritems():
+ call_map.setdefault((priority,model),[]).append((priority, model, ids,
+ [f[func_field_to_compute_] for f in functions]))
+ ordered_keys = call_map.keys()
+ ordered_keys.sort()
+ result = []
+ if ordered_keys:
+ result = reduce(operator.add, (call_map[k] for k in ordered_keys))
+ return result
def _store_set_values(self, cr, uid, ids, fields, context):
"""Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
if domain:
import expression
- e = expression.expression(domain)
- e.parse(cr, user, self, context)
+ e = expression.expression(cr, user, domain, self, context)
tables = e.get_tables()
where_clause, where_params = e.to_sql()
where_clause = where_clause and [where_clause] or []
if parent_model and child_object:
# as inherited rules are being applied, we need to add the missing JOIN
# to reach the parent table (if it was not JOINed yet in the query)
- child_object._inherits_join_add(parent_model, query)
+ child_object._inherits_join_add(child_object, parent_model, query)
query.where_clause += added_clause
query.where_clause_params += added_params
for table in added_tables:
else:
continue # ignore non-readable or "non-joinable" fields
elif order_field in self._inherit_fields:
- parent_obj = self.pool.get(self._inherit_fields[order_field][0])
+ parent_obj = self.pool.get(self._inherit_fields[order_field][3])
order_column = parent_obj._columns[order_field]
if order_column._classic_read:
inner_clause = self._inherits_join_calc(order_field, query)
for f in fields:
ftype = fields[f]['type']
- if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
+ if self._log_access and f in LOG_ACCESS_COLUMNS:
del data[f]
if f in default:
# force a clean recompute!
for parent_column in ['parent_left', 'parent_right']:
data.pop(parent_column, None)
-
- for v in self._inherits:
- del data[self._inherits[v]]
+ # Remove _inherits field's from data recursively, missing parents will
+ # be created by create() (so that copy() copy everything).
+ def remove_ids(inherits_dict):
+ for parent_table in inherits_dict:
+ del data[inherits_dict[parent_table]]
+ remove_ids(self.pool.get(parent_table)._inherits)
+ remove_ids(self._inherits)
return data
def copy_translations(self, cr, uid, old_id, new_id, context=None):