- classicals (varchar, integer, boolean, ...)
- relations (one2many, many2one, many2many)
- functions
-
+
"""
import calendar
regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
regex_object_name = re.compile(r'^[a-z0-9_.]+$')
-# Mapping between openerp module names and their osv classes.
-module_class_list = {}
-
# Super-user identifier (aka Administrator aka root)
ROOT_USER_ID = 1
test_modifiers({}, '{}')
test_modifiers({"invisible": True}, '{"invisible": true}')
test_modifiers({"invisible": False}, '{}')
-
+
def check_object_name(name):
""" Check if the given name is a valid openerp object name.
def intersect(la, lb):
return filter(lambda x: x in lb, la)
+def fix_import_export_id_paths(fieldname):
+ """
+ Fixes the id fields in import and exports, and splits field paths
+ on '/'.
+
+ :param str fieldname: name of the field to import/export
+ :return: split field name
+ :rtype: list of str
+ """
+ fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
+ fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
+ return fixed_external_id.split('/')
+
class except_orm(Exception):
def __init__(self, name, value):
self.name = name
#
class browse_record_list(list):
""" Collection of browse objects
-
+
Such an instance will be returned when doing a ``browse([ids..])``
and will be iterable, yielding browse() objects
"""
class browse_record(object):
""" An object that behaves like a row of an object's table.
It has attributes after the columns of the corresponding object.
-
+
Examples::
-
+
uobj = pool.get('res.users')
user_rec = uobj.browse(cr, uid, 104)
name = user_rec.name
col = self._table._inherit_fields[name][2]
elif hasattr(self._table, str(name)):
attr = getattr(self._table, name)
-
if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
- return lambda *args, **argv: attr(self._cr, self._uid, [self._id], *args, **argv)
+ def function_proxy(*args, **kwargs):
+ if 'context' not in kwargs and self._context:
+ kwargs.update(context=self._context)
+ return attr(self._cr, self._uid, [self._id], *args, **kwargs)
+ return function_proxy
else:
return attr
else:
__repr__ = __str__
+ def refresh(self):
+ """Force refreshing this browse_record's data and all the data of the
+ records that belong to the same cache, by emptying the cache completely,
+ preserving only the record identifiers (for prefetching optimizations).
+ """
+ for model, model_cache in self._cache.iteritems():
+ # only preserve the ids of the records that were in the cache
+ cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
+ self._cache[model].clear()
+ self._cache[model].update(cached_ids)
def get_pg_type(f):
"""
_order = 'id'
_sequence = None
_description = None
+
+ # structure:
+ # { 'parent_model': 'm2o_field', ... }
_inherits = {}
- # Mapping from inherits'd field name to triple (m, r, f)
- # where m is the model from which it is inherits'd,
- # r is the (local) field towards m,
- # and f is the _column object itself.
+
+ # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
+ # model from which it is inherits'd, r is the (local) field towards m, f
+ # is the _column object itself, and n is the original (i.e. top-most)
+ # parent model.
+ # Example:
+ # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
+ # field_column_obj, origina_parent_model), ... }
_inherit_fields = {}
+
# Mapping field name/column_info object
# This is similar to _inherit_fields but:
# 1. includes self fields,
# 2. uses column_info instead of a triple.
_all_columns = {}
+
_table = None
_invalids = set()
_log_create = False
CONCURRENCY_CHECK_FIELD = '__last_update'
+
def log(self, cr, uid, id, message, secondary=False, context=None):
+ if context and context.get('disable_log'):
+ return True
return self.pool.get('res.log').create(cr, uid,
{
'name': message,
'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
nattr = {}
for s in attributes:
- new = copy.copy(getattr(pool.get(parent_name), s))
+ new = copy.copy(getattr(pool.get(parent_name), s, {}))
if s == '_columns':
# Don't _inherit custom fields.
for c in new.keys():
return obj
def __new__(cls):
- """ Register this model.
+ """Register this model.
This doesn't create an instance but simply register the model
as being part of the module where it is defined.
-
- TODO make it possible to not even have to call the constructor
- to be registered.
-
"""
-
# Set the module name (e.g. base, sale, accounting, ...) on the class.
module = cls.__module__.split('.')[0]
if not hasattr(cls, '_module'):
cls._module = module
- # Remember which models to instanciate for this module.
- module_class_list.setdefault(cls._module, []).append(cls)
+ # Record this class in the list of models to instantiate for this module,
+ # managed by the metaclass.
+ module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
+ if cls not in module_model_list:
+ module_model_list.append(cls)
# Since we don't return an instance here, the __init__
# method won't be called.
elif field_type == 'integer':
return 0
elif field_type == 'boolean':
- return False
+ return 'False'
return ''
def selection_field(in_field):
else:
r = d['name']
else:
- break
+ postfix = 0
+ while True:
+ n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
+ if not model_data.search(cr, uid, [('name', '=', n)]):
+ break
+ postfix += 1
+ model_data.create(cr, uid, {
+ 'name': n,
+ 'model': self._name,
+ 'res_id': r['id'],
+ })
+ r = n
else:
r = r[f[i]]
# To display external name of selection field when its exported
cols = self._columns.copy()
for f in self._inherit_fields:
cols.update({f: self._inherit_fields[f][2]})
- def fsplit(x):
- if x=='.id': return [x]
- return x.replace(':id','/id').replace('.id','/.id').split('/')
- fields_to_export = map(fsplit, fields_to_export)
+ fields_to_export = map(fix_import_export_id_paths, fields_to_export)
datas = []
for row in self.browse(cr, uid, ids, context):
datas += self.__export_row(cr, uid, row, fields_to_export, context)
"""
Import given data in given module
- :param cr: database cursor
- :param uid: current user id
- :param fields: list of fields
- :param data: data to import
- :param mode: 'init' or 'update' for record creation
- :param current_module: module name
- :param noupdate: flag for record creation
- :param context: context arguments, like lang, time zone,
- :param filename: optional file to store partial import state for recovery
- :rtype: tuple
-
This method is used when importing data via client menu.
Example of fields to import for a sale.order::
order_line/price_unit,
order_line/product_uom_qty,
order_line/product_uom/id (=xml_id)
+
+ This method returns a 4-tuple with the following structure:
+
+ * The first item is a return code, it returns either ``-1`` in case o
+
+ :param cr: database cursor
+ :param uid: current user id
+ :param fields: list of fields
+ :param data: data to import
+ :param mode: 'init' or 'update' for record creation
+ :param current_module: module name
+ :param noupdate: flag for record creation
+ :param context: context arguments, like lang, time zone,
+ :param filename: optional file to store partial import state for recovery
+ :returns: 4-tuple of a return code, an errored resource, an error message and ???
+ :rtype: (int, dict|0, str|0, ''|0)
"""
if not context:
context = {}
- def _replace_field(x):
- x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
- return x.replace(':id','/id').split('/')
- fields = map(_replace_field, fields)
+ fields = map(fix_import_export_id_paths, fields)
logger = netsvc.Logger()
ir_model_data_obj = self.pool.get('ir.model.data')
if line[i] and skip:
return False
continue
-
+
#set the mode for m2o, o2m, m2m : xml_id/id/name
if len(field) == len(prefix)+1:
mode = False
for db_id in line.split(config.get('csv_internal_sep')):
res.append(_get_id(relation, db_id, current_module, mode))
return [(6,0,res)]
-
+
# ID of the record using a XML ID
if field[len(prefix)]=='id':
try:
relation_obj = self.pool.get(relation)
newfd = relation_obj.fields_get( cr, uid, context=context )
pos = position
-
+
res = many_ids(line[i], relation, current_module, mode)
-
+
first = 0
while pos < len(datas):
res2 = process_liness(self, datas, prefix + [field[len(prefix)]], current_module, relation_obj._name, newfd, pos, first)
nbrmax = max(nbrmax, pos)
warning += w2
first += 1
-
+
if data_res_id2:
res.append((4, data_res_id2))
-
+
if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
break
res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
-
+
elif fields_def[field[len(prefix)]]['type']=='many2one':
relation = fields_def[field[len(prefix)]]['relation']
else:
res = line[i]
-
+
row[field[len(prefix)]] = res or False
result = (row, nbrmax, warning, data_res_id, xml_id)
position = 0
while position<len(datas):
res = {}
-
+
(res, position, warning, res_id, xml_id) = \
process_liness(self, datas, [], current_module, self._name, fields_def, position=position)
if len(warning):
current_module, res, mode=mode, xml_id=xml_id,
noupdate=noupdate, res_id=res_id, context=context)
except Exception, e:
- return (-1, res, 'Line ' + str(position) +' : ' + str(e), '')
+ return (-1, res, 'Line ' + str(position) + ' : ' + tools.ustr(e), '')
if config.get('import_partial', False) and filename and (not (position%100)):
data = pickle.load(file(config.get('import_partial')))
else:
translated_msg = tmp_msg
else:
- translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
+ translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg) or msg
error_msgs.append(
_("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
)
attrs['selection'].append((False, ''))
fields[node.get('name')] = attrs
- field = model_fields[node.get('name')]
- transfer_field_to_modifiers(field, modifiers)
+ field = model_fields.get(node.get('name'))
+ if field:
+ transfer_field_to_modifiers(field, modifiers)
+
elif node.tag in ('form', 'tree'):
result = self.view_header_get(cr, user, False, node.tag, context)
:param view_type: type of the view to return if view_id is None ('form', tree', ...)
:param context: context arguments, like lang, time zone
:param toolbar: true to include contextual actions
- :param submenu: example (portal_project module)
+ :param submenu: deprecated
:return: dictionary describing the composition of the requested view (including inherited views and extensions)
:raise AttributeError:
* if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
return source
- def apply_view_inheritance(source, inherit_id):
+ def apply_view_inheritance(cr, user, source, inherit_id):
""" Apply all the (directly and indirectly) inheriting views.
:param source: a parent architecture to modify (with parent
modifications already applied)
- :param inherit_id: the database id of the parent view
+ :param inherit_id: the database view_id of the parent view
:return: a modified source where all the modifying architecture
are applied
"""
- # get all views which inherit from (ie modify) this view
- cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
- sql_inherit = cr.fetchall()
- for (inherit, id) in sql_inherit:
- source = apply_inheritance_specs(source, inherit, id)
- source = apply_view_inheritance(source, id)
+ sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
+ for (view_arch, view_id) in sql_inherit:
+ source = apply_inheritance_specs(source, view_arch, view_id)
+ source = apply_view_inheritance(cr, user, source, view_id)
return source
result = {'type': view_type, 'model': self._name}
result['view_id'] = sql_res['id']
source = etree.fromstring(encode(sql_res['arch']))
- result['arch'] = apply_view_inheritance(source, result['view_id'])
+ result['arch'] = apply_view_inheritance(cr, user, source, result['view_id'])
result['name'] = sql_res['name']
result['field_parent'] = sql_res['field_parent'] or False
result['arch'] = xarch
result['fields'] = xfields
- if submenu:
- if context and context.get('active_id', False):
- data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
- if data_menu:
- act_id = data_menu.id
- if act_id:
- data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
- result['submenu'] = getattr(data_action, 'menus', False)
if toolbar:
def clean(x):
x = x[2]
raise NotImplementedError(_('The search method is not implemented on this object !'))
def name_get(self, cr, user, ids, context=None):
+ """Returns the preferred display value (text representation) for the records with the
+ given ``ids``. By default this will be the value of the ``name`` column, unless
+ the model implements a custom behavior.
+ Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
+ guaranteed to be.
+
+ :rtype: list(tuple)
+ :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
"""
-
- :param cr: database cursor
- :param user: current user id
- :type user: integer
- :param ids: list of ids
- :param context: context arguments, like lang, time zone
- :type context: dictionary
- :return: tuples with the text representation of requested objects for to-many relationships
-
- """
- if not context:
- context = {}
if not ids:
return []
if isinstance(ids, (int, long)):
[self._rec_name], context, load='_classic_write')]
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
- """
- Search for records and their display names according to a search domain.
-
- :param cr: database cursor
- :param user: current user id
- :param name: object name to search
- :param args: list of tuples specifying search criteria [('field_name', 'operator', 'value'), ...]
- :param operator: operator for search criterion
- :param context: context arguments, like lang, time zone
- :type context: dictionary
- :param limit: optional max number of records to return
- :return: list of object names matching the search criteria, used to provide completion for to-many relationships
-
- This method is equivalent of :py:meth:`~osv.osv.osv.search` on **name** + :py:meth:`~osv.osv.osv.name_get` on the result.
- See :py:meth:`~osv.osv.osv.search` for an explanation of the possible values for the search domain specified in **args**.
-
+ """Search for records that have a display name matching the given ``name`` pattern if compared
+ with the given ``operator``, while also matching the optional search domain (``args``).
+ This is used for example to provide suggestions based on a partial value for a relational
+ field.
+ Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
+ guaranteed to be.
+
+ This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
+ and then :meth:`~.name_get` on the result of the search.
+
+ :param list args: optional search domain (see :meth:`~.search` for syntax),
+ specifying further restrictions
+ :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
+ or ``'='``.
+ :param int limit: optional max number of records to return
+ :rtype: list
+ :return: list of pairs ``(id,text_repr)`` for all matching records.
"""
return self._name_search(cr, user, name, args, operator, context, limit)
def name_create(self, cr, uid, name, context=None):
- """
- Creates a new record by calling :py:meth:`~osv.osv.osv.create` with only one
- value provided: the name of the new record (``_rec_name`` field).
- The new record will also be initialized with any default values applicable
- to this model, or provided through the context. The usual behavior of
- :py:meth:`~osv.osv.osv.create` applies.
- Similarly, this method may raise an exception if the model has multiple
- required fields and some do not have default values.
-
- :param name: name of the record to create
-
- :return: the :py:meth:`~osv.osv.osv.name_get` value for the newly-created record.
+ """Creates a new record by calling :meth:`~.create` with only one
+ value provided: the name of the new record (``_rec_name`` field).
+ The new record will also be initialized with any default values applicable
+ to this model, or provided through the context. The usual behavior of
+ :meth:`~.create` applies.
+ Similarly, this method may raise an exception if the model has multiple
+ required fields and some do not have default values.
+
+ :param name: name of the record to create
+
+ :rtype: tuple
+ :return: the :meth:`~.name_get` pair value for the newly-created record.
"""
rec_id = self.create(cr, uid, {self._rec_name: name}, context);
return self.name_get(cr, uid, [rec_id], context)[0]
def copy(self, cr, uid, id, default=None, context=None):
raise NotImplementedError(_('The copy method is not implemented on this object !'))
- def exists(self, cr, uid, id, context=None):
+ def exists(self, cr, uid, ids, context=None):
+ """Checks whether the given id or ids exist in this model,
+ and return the list of ids that do. This is simple to use for
+ a truth test on a browse_record::
+
+ if record.exists():
+ pass
+
+ :param ids: id or list of ids to check for existence
+ :type ids: int or [int]
+ :return: the list of ids that currently exist, out of
+ the given `ids`
+ """
raise NotImplementedError(_('The exists method is not implemented on this object !'))
def read_string(self, cr, uid, id, langs, fields=None, context=None):
res = {}
res2 = {}
- self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
+ self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
#FIXME: collect all calls to _get_source into one SQL call.
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
- self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
+ self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
#FIXME: try to only call the translation in one SQL
for lang in langs:
for field in vals:
values = defaults
return values
+ def clear_caches(self):
+ """ Clear the caches
+
+ This clears the caches associated to methods decorated with
+ ``tools.ormcache`` or ``tools.ormcache_multi``.
+ """
+ try:
+ getattr(self, '_ormcache')
+ self._ormcache = {}
+ except AttributeError:
+ pass
+
+ def check_access_rule(self, cr, uid, ids, operation, context=None):
+ """Verifies that the operation given by ``operation`` is allowed for the user
+ according to ir.rules.
+
+ :param operation: one of ``write``, ``unlink``
+ :raise except_orm: * if current ir.rules do not permit this operation.
+ :return: None if the operation is allowed
+ """
+ raise NotImplementedError(_('The check_access_rule method is not implemented on this object !'))
+
class orm_memory(orm_template):
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
args = [('active', '=', 1)]
if args:
import expression
- e = expression.expression(args)
- e.parse(cr, user, self, context)
+ e = expression.expression(cr, user, args, self, context)
res = e.exp
return res or []
break
f = True
for arg in result:
+ if len(arg) != 3:
+ # Amazing hack: orm_memory handles only simple domains.
+ continue
if arg[1] == '=':
val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
# nothing to check in memory...
pass
- def exists(self, cr, uid, id, context=None):
- return id in self.datas
+ def exists(self, cr, uid, ids, context=None):
+ if isinstance(ids, (long,int)):
+ ids = [ids]
+ return [id for id in ids if id in self.datas]
+
+ def check_access_rule(self, cr, uid, ids, operation, context=None):
+ # ir.rules do not currently apply for orm.memory instances,
+ # only the implicit visibility=owner one.
+ for id in ids:
+ self._check_access(uid, id, operation)
+
+# Definition of log access columns, automatically added to models if
+# self._log_access is True
+LOG_ACCESS_COLUMNS = {
+ 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
+ 'create_date': 'TIMESTAMP',
+ 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
+ 'write_date': 'TIMESTAMP'
+}
+# special columns automatically created by the ORM
+MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys() + \
+ ['internal.create_uid', 'internal.date_access'] # for osv_memory only
class orm(orm_template):
_sql_constraints = []
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
__logger = logging.getLogger('orm')
__schema = logging.getLogger('orm.schema')
+
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
"""
context = context or {}
- self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
+ self.pool.get('ir.model.access').check(cr, uid, self._name, 'read')
if not fields:
fields = self._columns.keys()
del d['id']
return data
- def _inherits_join_add(self, parent_model_name, query):
+ def _inherits_join_add(self, current_table, parent_model_name, query):
"""
Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
-
+ :param current_table: current model object
:param parent_model_name: name of the parent model for which the clauses should be added
:param query: query object on which the JOIN should be added
"""
- inherits_field = self._inherits[parent_model_name]
+ inherits_field = current_table._inherits[parent_model_name]
parent_model = self.pool.get(parent_model_name)
parent_table_name = parent_model._table
quoted_parent_table_name = '"%s"' % parent_table_name
if quoted_parent_table_name not in query.tables:
query.tables.append(quoted_parent_table_name)
- query.where_clause.append('("%s".%s = %s.id)' % (self._table, inherits_field, parent_table_name))
+ query.where_clause.append('(%s.%s = %s.id)' % (current_table._table, inherits_field, parent_table_name))
+
+
def _inherits_join_calc(self, field, query):
"""
while field in current_table._inherit_fields and not field in current_table._columns:
parent_model_name = current_table._inherit_fields[field][0]
parent_table = self.pool.get(parent_model_name)
- self._inherits_join_add(parent_model_name, query)
+ self._inherits_join_add(current_table, parent_model_name, query)
current_table = parent_table
return '"%s".%s' % (current_table._table, field)
pass
if not val_id:
raise except_orm(_('ValidateError'),
- _('Invalid value for reference field "%s" (last part must be a non-zero integer): "%s"') % (field, value))
+ _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
val = val_model
else:
val = value
elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
return
raise except_orm(_('ValidateError'),
- _('The value "%s" for the field "%s" is not in the selection') % (value, field))
+ _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
def _check_removed_columns(self, cr, log=False):
# iterate on the database columns to drop the NOT NULL constraints
# of fields which were required but have been removed (or will be added by another module)
columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
- columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
+ columns += MAGIC_COLUMNS
cr.execute("SELECT a.attname, a.attnotnull"
" FROM pg_class c, pg_attribute a"
" WHERE c.relname=%s"
column_data = self._select_column_data(cr)
for k, f in self._columns.iteritems():
- if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
+ if k in MAGIC_COLUMNS:
continue
# Don't update custom (also called manual) fields
if f.manual and not update_custom_fields:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
- cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
+ cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
self._table, k, f_pg_type, f._type, newname)
if not isinstance(f, fields.function) or f.store:
# add the missing field
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
- cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
+ cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
self.__schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, k, get_pg_type(f)[1])
def _create_table(self, cr):
cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
- cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''")))
+ cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
self.__schema.debug("Table '%s': created", self._table)
def _add_log_columns(self, cr):
- logs = {
- 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
- 'create_date': 'TIMESTAMP',
- 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
- 'write_date': 'TIMESTAMP'
- }
- for k in logs:
+ for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
cr.execute("""
SELECT c.relname
FROM pg_class c, pg_attribute a
WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
- """, (self._table, k))
+ """, (self._table, field))
if not cr.rowcount:
- cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
+ cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
cr.commit()
self.__schema.debug("Table '%s': added column '%s' with definition=%s",
- self._table, k, logs[k])
+ self._table, field, field_def)
def _select_column_data(self, cr):
if f == order:
ok = False
if ok:
- self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order, length))
+ self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
for (key, _, msg) in self._sql_constraints:
for table in self._inherits:
other = self.pool.get(table)
for col in other._columns.keys():
- res[col] = (table, self._inherits[table], other._columns[col])
+ res[col] = (table, self._inherits[table], other._columns[col], table)
for col in other._inherit_fields.keys():
- res[col] = (table, self._inherits[table], other._inherit_fields[col][2])
+ res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
self._inherit_fields = res
self._all_columns = self._get_column_infos()
self._inherits_reload_src()
inherited field via _inherits) to a ``column_info`` struct
giving detailed columns """
result = {}
- for k, (parent, m2o, col) in self._inherit_fields.iteritems():
- result[k] = fields.column_info(k, col, parent, m2o)
+ for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
+ result[k] = fields.column_info(k, col, parent, m2o, original_parent)
for k, col in self._columns.iteritems():
result[k] = fields.column_info(k, col)
return result
"""
ira = self.pool.get('ir.model.access')
- write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
- ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
+ write_access = ira.check(cr, user, self._name, 'write', False) or \
+ ira.check(cr, user, self._name, 'create', False)
return super(orm, self).fields_get(cr, user, fields, context, write_access)
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
if not context:
context = {}
- self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'read')
if not fields:
fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
if isinstance(ids, (int, long)):
res = []
if len(fields_pre):
def convert_field(f):
- f_qual = "%s.%s" % (self._table, f) # need fully-qualified references in case len(tables) > 1
+ f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
if f in ('create_date', 'write_date'):
return "date_trunc('second', %s) as %s" % (f_qual, f)
if f == self.CONCURRENCY_CHECK_FIELD:
self._check_concurrency(cr, ids, context)
- self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
+ self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink')
properties = self.pool.get('ir.property')
domain = [('res_id', '=', False),
ids = [ids]
self._check_concurrency(cr, ids, context)
- self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'write')
result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
"""
if not context:
context = {}
- self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
vals = self._add_missing_default_values(cr, user, vals, context)
upd_todo = []
for v in vals.keys():
if v in self._inherit_fields:
- (table, col, col_detail) = self._inherit_fields[v]
+ (table, col, col_detail, original_parent) = self._inherit_fields[v]
tocreate[table][v] = vals[v]
del vals[v]
else:
:return: [(priority, model_name, [record_ids,], [function_fields,])]
"""
- # FIXME: rewrite, cleanup, use real variable names
- # e.g.: http://pastie.org/1222060
- result = {}
- fncts = self.pool._store_function.get(self._name, [])
- for fnct in range(len(fncts)):
- if fncts[fnct][3]:
- ok = False
- if not fields:
- ok = True
- for f in (fields or []):
- if f in fncts[fnct][3]:
- ok = True
- break
- if not ok:
- continue
+ if fields is None: fields = []
+ stored_functions = self.pool._store_function.get(self._name, [])
- result.setdefault(fncts[fnct][0], {})
+ # use indexed names for the details of the stored_functions:
+ model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
+ # only keep functions that should be triggered for the ``fields``
+ # being written to.
+ to_compute = [f for f in stored_functions \
+ if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
+
+ mapping = {}
+ for function in to_compute:
# use admin user for accessing objects having rules defined on store fields
- ids2 = fncts[fnct][2](self, cr, ROOT_USER_ID, ids, context)
- for id in filter(None, ids2):
- result[fncts[fnct][0]].setdefault(id, [])
- result[fncts[fnct][0]][id].append(fnct)
- dict = {}
- for object in result:
- k2 = {}
- for id, fnct in result[object].items():
- k2.setdefault(tuple(fnct), [])
- k2[tuple(fnct)].append(id)
- for fnct, id in k2.items():
- dict.setdefault(fncts[fnct[0]][4], [])
- dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4], object, id, map(lambda x: fncts[x][1], fnct)))
- result2 = []
- tmp = dict.keys()
- tmp.sort()
- for k in tmp:
- result2 += dict[k]
- return result2
+ target_ids = [id for id in function[id_mapping_fnct_](self, cr, ROOT_USER_ID, ids, context) if id]
+
+ # the compound key must consider the priority and model name
+ key = (function[priority_], function[model_name_])
+ for target_id in target_ids:
+ mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
+
+ # Here mapping looks like:
+ # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
+ # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
+ # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
+ # }
+
+ # Now we need to generate the batch function calls list
+ # call_map =
+ # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
+ call_map = {}
+ for ((priority,model), id_map) in mapping.iteritems():
+ functions_ids_maps = {}
+ # function_ids_maps =
+ # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
+ for id, functions in id_map.iteritems():
+ functions_ids_maps.setdefault(tuple(functions), []).append(id)
+ for functions, ids in functions_ids_maps.iteritems():
+ call_map.setdefault((priority,model),[]).append((priority, model, ids,
+ [f[func_field_to_compute_] for f in functions]))
+ ordered_keys = call_map.keys()
+ ordered_keys.sort()
+ result = []
+ if ordered_keys:
+ result = reduce(operator.add, (call_map[k] for k in ordered_keys))
+ return result
def _store_set_values(self, cr, uid, ids, fields, context):
"""Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
if domain:
import expression
- e = expression.expression(domain)
- e.parse(cr, user, self, context)
+ e = expression.expression(cr, user, domain, self, context)
tables = e.get_tables()
where_clause, where_params = e.to_sql()
where_clause = where_clause and [where_clause] or []
if parent_model and child_object:
# as inherited rules are being applied, we need to add the missing JOIN
# to reach the parent table (if it was not JOINed yet in the query)
- child_object._inherits_join_add(parent_model, query)
+ child_object._inherits_join_add(child_object, parent_model, query)
query.where_clause += added_clause
query.where_clause_params += added_params
for table in added_tables:
else:
continue # ignore non-readable or "non-joinable" fields
elif order_field in self._inherit_fields:
- parent_obj = self.pool.get(self._inherit_fields[order_field][0])
+ parent_obj = self.pool.get(self._inherit_fields[order_field][3])
order_column = parent_obj._columns[order_field]
if order_column._classic_read:
inner_clause = self._inherits_join_calc(order_field, query)
"""
if context is None:
context = {}
- self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
+ self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read')
query = self._where_calc(cr, user, args, context=context)
self._apply_ir_rules(cr, user, query, 'read', context=context)
for f in fields:
ftype = fields[f]['type']
- if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
+ if self._log_access and f in LOG_ACCESS_COLUMNS:
del data[f]
if f in default:
# force a clean recompute!
for parent_column in ['parent_left', 'parent_right']:
data.pop(parent_column, None)
-
- for v in self._inherits:
- del data[self._inherits[v]]
+ # Remove _inherits field's from data recursively, missing parents will
+ # be created by create() (so that copy() copy everything).
+ def remove_ids(inherits_dict):
+ for parent_table in inherits_dict:
+ del data[inherits_dict[parent_table]]
+ remove_ids(self.pool.get(parent_table)._inherits)
+ remove_ids(self._inherits)
return data
def copy_translations(self, cr, uid, old_id, new_id, context=None):
def exists(self, cr, uid, ids, context=None):
if type(ids) in (int, long):
ids = [ids]
- query = 'SELECT count(1) FROM "%s"' % (self._table)
+ query = 'SELECT id FROM "%s"' % (self._table)
cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
- return cr.fetchone()[0] == len(ids)
+ return [x[0] for x in cr.fetchall()]
def check_recursion(self, cr, uid, ids, context=None, parent=None):
warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \