#
##############################################################################
-#
-# Object relationnal mapping to postgresql module
-# . Hierarchical structure
-# . Constraints consistency, validations
-# . Object meta Data depends on its status
-# . Optimised processing by complex query (multiple actions at once)
-# . Default fields value
-# . Permissions optimisation
-# . Persistant object: DB postgresql
-# . Datas conversions
-# . Multi-level caching system
-# . 2 different inheritancies
-# . Fields:
-# - classicals (varchar, integer, boolean, ...)
-# - relations (one2many, many2one, many2many)
-# - functions
-#
-#
+#.apidoc title: Object Relational Mapping
+#.apidoc module-mods: member-order: bysource
+
+"""
+ Object relational mapping to database (postgresql) module
+ * Hierarchical structure
+ * Constraints consistency, validations
+ * Object meta Data depends on its status
+ * Optimised processing by complex query (multiple actions at once)
+ * Default fields value
+ * Permissions optimisation
+ * Persistant object: DB postgresql
+ * Datas conversions
+ * Multi-level caching system
+ * 2 different inheritancies
+ * Fields:
+ - classicals (varchar, integer, boolean, ...)
+ - relations (one2many, many2one, many2many)
+ - functions
+
+"""
+
import calendar
import copy
import datetime
import time
import traceback
import types
+import simplejson
import openerp.netsvc as netsvc
from lxml import etree
regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
regex_object_name = re.compile(r'^[a-z0-9_.]+$')
-# Mapping between openerp module names and their osv classes.
-module_class_list = {}
+# Super-user identifier (aka Administrator aka root)
+ROOT_USER_ID = 1
+
+def transfer_field_to_modifiers(field, modifiers):
+ default_values = {}
+ state_exceptions = {}
+ for attr in ('invisible', 'readonly', 'required'):
+ state_exceptions[attr] = []
+ default_values[attr] = bool(field.get(attr))
+ for state, modifs in (field.get("states",{})).items():
+ for modif in modifs:
+ if default_values[modif[0]] != modif[1]:
+ state_exceptions[modif[0]].append(state)
+
+ for attr, default_value in default_values.items():
+ if state_exceptions[attr]:
+ modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
+ else:
+ modifiers[attr] = default_value
+
+
+# Don't deal with groups, it is done by check_group().
+# Need the context to evaluate the invisible attribute on tree views.
+# For non-tree views, the context shouldn't be given.
+def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
+ if node.get('attrs'):
+ modifiers.update(eval(node.get('attrs')))
+
+ if node.get('states'):
+ if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
+ # TODO combine with AND or OR, use implicit AND for now.
+ modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
+ else:
+ modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
+
+ for a in ('invisible', 'readonly', 'required'):
+ if node.get(a):
+ v = bool(eval(node.get(a), {'context': context or {}}))
+ if in_tree_view and a == 'invisible':
+ # Invisible in a tree view has a specific meaning, make it a
+ # new key in the modifiers attribute.
+ modifiers['tree_invisible'] = v
+ elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
+ # Don't set the attribute to False if a dynamic value was
+ # provided (i.e. a domain from attrs or states).
+ modifiers[a] = v
+
+
+def simplify_modifiers(modifiers):
+ for a in ('invisible', 'readonly', 'required'):
+ if a in modifiers and not modifiers[a]:
+ del modifiers[a]
+
+
+def transfer_modifiers_to_node(modifiers, node):
+ if modifiers:
+ simplify_modifiers(modifiers)
+ node.set('modifiers', simplejson.dumps(modifiers))
+
+
+def test_modifiers(what, expected):
+ modifiers = {}
+ if isinstance(what, basestring):
+ node = etree.fromstring(what)
+ transfer_node_to_modifiers(node, modifiers)
+ simplify_modifiers(modifiers)
+ json = simplejson.dumps(modifiers)
+ assert json == expected, "%s != %s" % (json, expected)
+ elif isinstance(what, dict):
+ transfer_field_to_modifiers(what, modifiers)
+ simplify_modifiers(modifiers)
+ json = simplejson.dumps(modifiers)
+ assert json == expected, "%s != %s" % (json, expected)
+
+
+# To use this test:
+# import openerp
+# openerp.osv.orm.modifiers_tests()
+def modifiers_tests():
+ test_modifiers('<field name="a"/>', '{}')
+ test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
+ test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
+ test_modifiers('<field name="a" required="1"/>', '{"required": true}')
+ test_modifiers('<field name="a" invisible="0"/>', '{}')
+ test_modifiers('<field name="a" readonly="0"/>', '{}')
+ test_modifiers('<field name="a" required="0"/>', '{}')
+ test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
+ test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
+ test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
+ test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
+
+ # The dictionary is supposed to be the result of fields_get().
+ test_modifiers({}, '{}')
+ test_modifiers({"invisible": True}, '{"invisible": true}')
+ test_modifiers({"invisible": False}, '{}')
def check_object_name(name):
def intersect(la, lb):
return filter(lambda x: x in lb, la)
+def fix_import_export_id_paths(fieldname):
+ """
+ Fixes the id fields in import and exports, and splits field paths
+ on '/'.
+
+ :param str fieldname: name of the field to import/export
+ :return: split field name
+ :rtype: list of str
+ """
+ fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
+ fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
+ return fixed_external_id.split('/')
+
class except_orm(Exception):
def __init__(self, name, value):
self.name = name
class BrowseRecordError(Exception):
pass
-# Readonly python database object browser
class browse_null(object):
+ """ Readonly python database object browser
+ """
def __init__(self):
self.id = False
# TODO: execute an object method on browse_record_list
#
class browse_record_list(list):
+ """ Collection of browse objects
+
+ Such an instance will be returned when doing a ``browse([ids..])``
+ and will be iterable, yielding browse() objects
+ """
def __init__(self, lst, context=None):
if not context:
class browse_record(object):
+ """ An object that behaves like a row of an object's table.
+ It has attributes after the columns of the corresponding object.
+
+ Examples::
+
+ uobj = pool.get('res.users')
+ user_rec = uobj.browse(cr, uid, 104)
+ name = user_rec.name
+ """
logger = netsvc.Logger()
def __init__(self, cr, uid, id, table, cache, context=None, list_class=None, fields_process=None):
- '''
- table : the object (inherited from orm)
- context : dictionary with an optional context
- '''
+ """
+ @param cache a dictionary of model->field->data to be shared accross browse
+ objects, thus reducing the SQL read()s . It can speed up things a lot,
+ but also be disastrous if not discarded after write()/unlink() operations
+ @param table the object (inherited from orm)
+ @param context dictionary with an optional context
+ """
if fields_process is None:
fields_process = {}
if context is None:
col = self._table._inherit_fields[name][2]
elif hasattr(self._table, str(name)):
attr = getattr(self._table, name)
-
if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
- return lambda *args, **argv: attr(self._cr, self._uid, [self._id], *args, **argv)
+ def function_proxy(*args, **kwargs):
+ if 'context' not in kwargs and self._context:
+ kwargs.update(context=self._context)
+ return attr(self._cr, self._uid, [self._id], *args, **kwargs)
+ return function_proxy
else:
return attr
else:
__repr__ = __str__
+ def refresh(self):
+ """Force refreshing this browse_record's data and all the data of the
+ records that belong to the same cache, by emptying the cache completely,
+ preserving only the record identifiers (for prefetching optimizations).
+ """
+ for model, model_cache in self._cache.iteritems():
+ # only preserve the ids of the records that were in the cache
+ cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
+ self._cache[model].clear()
+ self._cache[model].update(cached_ids)
def get_pg_type(f):
- '''
+ """
returns a tuple
(type returned by postgres when the column was created, type expression to create the column)
- '''
+ """
type_dict = {
fields.boolean: 'bool',
_order = 'id'
_sequence = None
_description = None
+
+ # structure:
+ # { 'parent_model': 'm2o_field', ... }
_inherits = {}
- # Mapping from inherits'd field name to triple (m, r, f)
- # where m is the model from which it is inherits'd,
- # r is the (local) field towards m,
- # and f is the _column object itself.
+
+ # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
+ # model from which it is inherits'd, r is the (local) field towards m, f
+ # is the _column object itself, and n is the original (i.e. top-most)
+ # parent model.
+ # Example:
+ # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
+ # field_column_obj, origina_parent_model), ... }
_inherit_fields = {}
+
+ # Mapping field name/column_info object
+ # This is similar to _inherit_fields but:
+ # 1. includes self fields,
+ # 2. uses column_info instead of a triple.
+ _all_columns = {}
+
_table = None
_invalids = set()
_log_create = False
CONCURRENCY_CHECK_FIELD = '__last_update'
+
def log(self, cr, uid, id, message, secondary=False, context=None):
+ if context and context.get('disable_log'):
+ return True
return self.pool.get('res.log').create(cr, uid,
{
'name': message,
'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
nattr = {}
for s in attributes:
- new = copy.copy(getattr(pool.get(parent_name), s))
+ new = copy.copy(getattr(pool.get(parent_name), s, {}))
if s == '_columns':
# Don't _inherit custom fields.
for c in new.keys():
return obj
def __new__(cls):
- """ Register this model.
+ """Register this model.
This doesn't create an instance but simply register the model
as being part of the module where it is defined.
-
- TODO make it possible to not even have to call the constructor
- to be registered.
-
"""
-
# Set the module name (e.g. base, sale, accounting, ...) on the class.
module = cls.__module__.split('.')[0]
if not hasattr(cls, '_module'):
cls._module = module
- # Remember which models to instanciate for this module.
- module_class_list.setdefault(cls._module, []).append(cls)
+ # Record this class in the list of models to instantiate for this module,
+ # managed by the metaclass.
+ module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
+ if cls not in module_model_list:
+ module_model_list.append(cls)
# Since we don't return an instance here, the __init__
# method won't be called.
:param cr: database cursor
:param user: current user id
- :param select: id or list of ids
+ :param select: id or list of ids.
:param context: context arguments, like lang, time zone
:rtype: object or list of objects requested
elif field_type == 'integer':
return 0
elif field_type == 'boolean':
- return False
+ return 'False'
return ''
def selection_field(in_field):
else:
r = d['name']
else:
- break
+ postfix = 0
+ while True:
+ n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
+ if not model_data.search(cr, uid, [('name', '=', n)]):
+ break
+ postfix += 1
+ model_data.create(cr, uid, {
+ 'name': n,
+ 'model': self._name,
+ 'res_id': r['id'],
+ })
+ r = n
else:
r = r[f[i]]
# To display external name of selection field when its exported
cols = self._columns.copy()
for f in self._inherit_fields:
cols.update({f: self._inherit_fields[f][2]})
- def fsplit(x):
- if x=='.id': return [x]
- return x.replace(':id','/id').replace('.id','/.id').split('/')
- fields_to_export = map(fsplit, fields_to_export)
+ fields_to_export = map(fix_import_export_id_paths, fields_to_export)
datas = []
for row in self.browse(cr, uid, ids, context):
datas += self.__export_row(cr, uid, row, fields_to_export, context)
"""
Import given data in given module
- :param cr: database cursor
- :param uid: current user id
- :param fields: list of fields
- :param data: data to import
- :param mode: 'init' or 'update' for record creation
- :param current_module: module name
- :param noupdate: flag for record creation
- :param context: context arguments, like lang, time zone,
- :param filename: optional file to store partial import state for recovery
- :rtype: tuple
-
This method is used when importing data via client menu.
Example of fields to import for a sale.order::
order_line/price_unit,
order_line/product_uom_qty,
order_line/product_uom/id (=xml_id)
+
+ This method returns a 4-tuple with the following structure:
+
+ * The first item is a return code, it returns either ``-1`` in case o
+
+ :param cr: database cursor
+ :param uid: current user id
+ :param fields: list of fields
+ :param data: data to import
+ :param mode: 'init' or 'update' for record creation
+ :param current_module: module name
+ :param noupdate: flag for record creation
+ :param context: context arguments, like lang, time zone,
+ :param filename: optional file to store partial import state for recovery
+ :returns: 4-tuple of a return code, an errored resource, an error message and ???
+ :rtype: (int, dict|0, str|0, ''|0)
"""
if not context:
context = {}
- def _replace_field(x):
- x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
- return x.replace(':id','/id').split('/')
- fields = map(_replace_field, fields)
+ fields = map(fix_import_export_id_paths, fields)
logger = netsvc.Logger()
ir_model_data_obj = self.pool.get('ir.model.data')
return False
continue
+ #set the mode for m2o, o2m, m2m : xml_id/id/name
+ if len(field) == len(prefix)+1:
+ mode = False
+ else:
+ mode = field[len(prefix)+1]
+
+ # TODO: improve this by using csv.csv_reader
+ def many_ids(line, relation, current_module, mode):
+ res = []
+ for db_id in line.split(config.get('csv_internal_sep')):
+ res.append(_get_id(relation, db_id, current_module, mode))
+ return [(6,0,res)]
+
# ID of the record using a XML ID
if field[len(prefix)]=='id':
try:
data_res_id = _get_id(model_name, line[i], current_module, 'id')
- except ValueError, e:
+ except ValueError:
pass
xml_id = line[i]
continue
if field[len(prefix)] in done:
continue
done[field[len(prefix)]] = True
- relation_obj = self.pool.get(fields_def[field[len(prefix)]]['relation'])
+ relation = fields_def[field[len(prefix)]]['relation']
+ relation_obj = self.pool.get(relation)
newfd = relation_obj.fields_get( cr, uid, context=context )
pos = position
- res = []
+
+ res = many_ids(line[i], relation, current_module, mode)
+
first = 0
while pos < len(datas):
res2 = process_liness(self, datas, prefix + [field[len(prefix)]], current_module, relation_obj._name, newfd, pos, first)
nbrmax = max(nbrmax, pos)
warning += w2
first += 1
+
+ if data_res_id2:
+ res.append((4, data_res_id2))
+
if (not newrow) or not reduce(lambda x, y: x or y, newrow.values(), 0):
break
+
res.append( (data_res_id2 and 1 or 0, data_res_id2 or 0, newrow) )
+
elif fields_def[field[len(prefix)]]['type']=='many2one':
relation = fields_def[field[len(prefix)]]['relation']
- if len(field) == len(prefix)+1:
- mode = False
- else:
- mode = field[len(prefix)+1]
res = _get_id(relation, line[i], current_module, mode)
elif fields_def[field[len(prefix)]]['type']=='many2many':
relation = fields_def[field[len(prefix)]]['relation']
- if len(field) == len(prefix)+1:
- mode = False
- else:
- mode = field[len(prefix)+1]
-
- # TODO: improve this by using csv.csv_reader
- res = []
- for db_id in line[i].split(config.get('csv_internal_sep')):
- res.append( _get_id(relation, db_id, current_module, mode) )
- res = [(6,0,res)]
+ res = many_ids(line[i], relation, current_module, mode)
elif fields_def[field[len(prefix)]]['type'] == 'integer':
res = line[i] and int(line[i]) or 0
current_module, res, mode=mode, xml_id=xml_id,
noupdate=noupdate, res_id=res_id, context=context)
except Exception, e:
- return (-1, res, 'Line ' + str(position) +' : ' + str(e), '')
+ return (-1, res, 'Line ' + str(position) + ' : ' + tools.ustr(e), '')
if config.get('import_partial', False) and filename and (not (position%100)):
data = pickle.load(file(config.get('import_partial')))
else:
translated_msg = tmp_msg
else:
- translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
+ translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg) or msg
error_msgs.append(
_("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
)
def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
- """ Returns the definition of each field.
+ """ Return the definition of each field.
The returned value is a dictionary (indiced by field name) of
dictionaries. The _inherits'd fields are included. The string,
res[f]['readonly'] = True
res[f]['states'] = {}
- if hasattr(res[f], 'string'):
+ if 'string' in res[f]:
res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
if res_trans:
res[f]['string'] = res_trans
- if hasattr(res[f], 'help'):
+ if 'help' in res[f]:
help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
if help_trans:
res[f]['help'] = help_trans
- if hasattr(res[f], 'selection'):
+ if 'selection' in res[f]:
if isinstance(field.selection, (tuple, list)):
sel = field.selection
sel2 = []
def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
return False
- def __view_look_dom(self, cr, user, node, view_id, context=None):
- if not context:
+ def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
+ """ Return the description of the fields in the node.
+
+ In a normal call to this method, node is a complete view architecture
+ but it is actually possible to give some sub-node (this is used so
+ that the method can call itself recursively).
+
+ Originally, the field descriptions are drawn from the node itself.
+ But there is now some code calling fields_get() in order to merge some
+ of those information in the architecture.
+
+ """
+ if context is None:
context = {}
result = False
fields = {}
children = True
+ modifiers = {}
+
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
- # return True if node can be displayed to current user
def check_group(node):
+ """ Set invisible to true if the user is not in the specified groups. """
if node.get('groups'):
groups = node.get('groups').split(',')
access_pool = self.pool.get('ir.model.access')
can_see = any(access_pool.check_groups(cr, user, group) for group in groups)
if not can_see:
node.set('invisible', '1')
+ modifiers['invisible'] = True
if 'attrs' in node.attrib:
del(node.attrib['attrs']) #avoid making field visible later
del(node.attrib['groups'])
- return can_see
- else:
- return True
if node.tag in ('field', 'node', 'arrow'):
if node.get('object'):
attrs['selection'].append((False, ''))
fields[node.get('name')] = attrs
+ field = model_fields.get(node.get('name'))
+ if field:
+ transfer_field_to_modifiers(field, modifiers)
+
+
elif node.tag in ('form', 'tree'):
result = self.view_header_get(cr, user, False, node.tag, context)
if result:
node.set('string', result)
+ in_tree_view = node.tag == 'tree'
elif node.tag == 'calendar':
for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
if node.get(additional_field):
fields[node.get(additional_field)] = {}
- if 'groups' in node.attrib:
- check_group(node)
+ check_group(node)
+
+ # The view architeture overrides the python model.
+ # Get the attrs before they are (possibly) deleted by check_group below
+ transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
+
+ # TODO remove attrs couterpart in modifiers when invisible is true ?
# translate view
- if ('lang' in context) and not result:
- if node.get('string'):
+ if 'lang' in context:
+ if node.get('string') and not result:
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
if trans == node.get('string') and ('base_model_name' in context):
# If translation is same as source, perhaps we'd have more luck with the alternative model name
for f in node:
if children or (node.tag == 'field' and f.tag in ('filter','separator')):
- fields.update(self.__view_look_dom(cr, user, f, view_id, context))
+ fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
+ transfer_modifiers_to_node(modifiers, node)
return fields
def _disable_workflow_buttons(self, cr, user, node):
+ """ Set the buttons in node to readonly if the user can't activate them. """
if user == 1:
# admin user can always activate workflow buttons
return node
return node
def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
- fields_def = self.__view_look_dom(cr, user, node, view_id, context=context)
- node = self._disable_workflow_buttons(cr, user, node)
- arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
+ """ Return an architecture and a description of all the fields.
+
+ The field description combines the result of fields_get() and
+ __view_look_dom().
+
+ :param node: the architecture as as an etree
+ :return: a tuple (arch, fields) where arch is the given node as a
+ string and fields is the description of all the fields.
+
+ """
fields = {}
if node.tag == 'diagram':
if node.getchildren()[0].tag == 'node':
- node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, fields_def.keys(), context)
+ node_fields = self.pool.get(node.getchildren()[0].get('object')).fields_get(cr, user, None, context)
+ fields.update(node_fields)
if node.getchildren()[1].tag == 'arrow':
- arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, fields_def.keys(), context)
- for key, value in node_fields.items():
- fields[key] = value
- for key, value in arrow_fields.items():
- fields[key] = value
+ arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
+ fields.update(arrow_fields)
else:
- fields = self.fields_get(cr, user, fields_def.keys(), context)
+ fields = self.fields_get(cr, user, None, context)
+ fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
+ node = self._disable_workflow_buttons(cr, user, node)
+ arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
+ for k in fields.keys():
+ if k not in fields_def:
+ del fields[k]
for field in fields_def:
if field == 'id':
# sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
def __get_default_calendar_view(self):
"""Generate a default calendar view (For internal use only).
"""
+ # TODO could return an etree instead of a string
arch = ('<?xml version="1.0" encoding="utf-8"?>\n'
'<calendar string="%s"') % (self._description)
tree_view = self.fields_view_get(cr, uid, False, 'tree', context=context)
fields_to_search = set()
+ # TODO it seems _all_columns could be used instead of fields_get (no need for translated fields info)
fields = self.fields_get(cr, uid, context=context)
for field in fields:
if fields[field].get('select'):
for field_name in fields_to_search:
field_group.append(etree.Element("field", attrib={'name': field_name}))
+ #TODO tostring can be removed as fromstring is call directly after...
return etree.tostring(search_view, encoding="utf-8").replace('\t', '')
#
:param view_type: type of the view to return if view_id is None ('form', tree', ...)
:param context: context arguments, like lang, time zone
:param toolbar: true to include contextual actions
- :param submenu: example (portal_project module)
+ :param submenu: deprecated
:return: dictionary describing the composition of the requested view (including inherited views and extensions)
:raise AttributeError:
* if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
% (child_view.xml_id, self._name, error_msg))
- def _inherit_apply(src, inherit, inherit_id=None):
- def _find(node, node2):
- if node2.tag == 'xpath':
- res = node.xpath(node2.get('expr'))
- if res:
- return res[0]
- else:
- return None
- else:
- for n in node.getiterator(node2.tag):
- res = True
- if node2.tag == 'field':
- # only compare field names, a field can be only once in a given view
- # at a given level (and for multilevel expressions, we should use xpath
- # inheritance spec anyway)
- if node2.get('name') == n.get('name'):
- return n
- else:
- continue
- for attr in node2.attrib:
- if attr == 'position':
- continue
- if n.get(attr):
- if n.get(attr) == node2.get(attr):
- continue
- res = False
- if res:
- return n
+ def locate(source, spec):
+ """ Locate a node in a source (parent) architecture.
+
+ Given a complete source (parent) architecture (i.e. the field
+ `arch` in a view), and a 'spec' node (a node in an inheriting
+ view that specifies the location in the source view of what
+ should be changed), return (if it exists) the node in the
+ source view matching the specification.
+
+ :param source: a parent architecture to modify
+ :param spec: a modifying node in an inheriting view
+ :return: a node in the source matching the spec
+
+ """
+ if spec.tag == 'xpath':
+ nodes = source.xpath(spec.get('expr'))
+ return nodes[0] if nodes else None
+ elif spec.tag == 'field':
+ # Only compare the field name: a field can be only once in a given view
+ # at a given level (and for multilevel expressions, we should use xpath
+ # inheritance spec anyway).
+ for node in source.getiterator('field'):
+ if node.get('name') == spec.get('name'):
+ return node
+ return None
+ else:
+ for node in source.getiterator(spec.tag):
+ good = True
+ for attr in spec.attrib:
+ if attr != 'position' and (not node.get(attr) or node.get(attr) != spec.get(attr)):
+ good = False
+ break
+ if good:
+ return node
return None
- # End: _find(node, node2)
+ def apply_inheritance_specs(source, specs_arch, inherit_id=None):
+ """ Apply an inheriting view.
+
+ Apply to a source architecture all the spec nodes (i.e. nodes
+ describing where and what changes to apply to some parent
+ architecture) given by an inheriting view.
+
+ :param source: a parent architecture to modify
+ :param specs_arch: a modifying architecture in an inheriting view
+ :param inherit_id: the database id of the inheriting view
+ :return: a modified source where the specs are applied
- doc_dest = etree.fromstring(encode(inherit))
- toparse = [doc_dest]
+ """
+ specs_tree = etree.fromstring(encode(specs_arch))
+ # Queue of specification nodes (i.e. nodes describing where and
+ # changes to apply to some parent architecture).
+ specs = [specs_tree]
- while len(toparse):
- node2 = toparse.pop(0)
- if isinstance(node2, SKIPPED_ELEMENT_TYPES):
+ while len(specs):
+ spec = specs.pop(0)
+ if isinstance(spec, SKIPPED_ELEMENT_TYPES):
continue
- if node2.tag == 'data':
- toparse += [ c for c in doc_dest ]
+ if spec.tag == 'data':
+ specs += [ c for c in specs_tree ]
continue
- node = _find(src, node2)
+ node = locate(source, spec)
if node is not None:
- pos = 'inside'
- if node2.get('position'):
- pos = node2.get('position')
+ pos = spec.get('position', 'inside')
if pos == 'replace':
- parent = node.getparent()
- if parent is None:
- src = copy.deepcopy(node2[0])
+ if node.getparent() is None:
+ source = copy.deepcopy(spec[0])
else:
- for child in node2:
+ for child in spec:
node.addprevious(child)
node.getparent().remove(node)
elif pos == 'attributes':
- for child in node2.getiterator('attribute'):
+ for child in spec.getiterator('attribute'):
attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
if attribute[1]:
node.set(attribute[0], attribute[1])
del(node.attrib[attribute[0]])
else:
sib = node.getnext()
- for child in node2:
+ for child in spec:
if pos == 'inside':
node.append(child)
elif pos == 'after':
raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
else:
attrs = ''.join([
- ' %s="%s"' % (attr, node2.get(attr))
- for attr in node2.attrib
+ ' %s="%s"' % (attr, spec.get(attr))
+ for attr in spec.attrib
if attr != 'position'
])
- tag = "<%s%s>" % (node2.tag, attrs)
+ tag = "<%s%s>" % (spec.tag, attrs)
raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
- return src
- # End: _inherit_apply(src, inherit)
+ return source
+
+ def apply_view_inheritance(cr, user, source, inherit_id):
+ """ Apply all the (directly and indirectly) inheriting views.
+
+ :param source: a parent architecture to modify (with parent
+ modifications already applied)
+ :param inherit_id: the database view_id of the parent view
+ :return: a modified source where all the modifying architecture
+ are applied
+
+ """
+ sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
+ for (view_arch, view_id) in sql_inherit:
+ source = apply_inheritance_specs(source, view_arch, view_id)
+ source = apply_view_inheritance(cr, user, source, view_id)
+ return source
result = {'type': view_type, 'model': self._name}
- ok = True
sql_res = False
parent_view_model = None
- while ok:
- view_ref = context.get(view_type + '_view_ref', False)
+ view_ref = context.get(view_type + '_view_ref')
+ # Search for a root (i.e. without any parent) view.
+ while True:
if view_ref and not view_id:
if '.' in view_ref:
module, view_ref = view_ref.split('.', 1)
FROM ir_ui_view
WHERE id=%s""", (view_id,))
else:
- cr.execute('''SELECT
- arch,name,field_parent,id,type,inherit_id,model
- FROM
- ir_ui_view
- WHERE
- model=%s AND
- type=%s AND
- inherit_id IS NULL
- ORDER BY priority''', (self._name, view_type))
- sql_res = cr.fetchone()
+ cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
+ FROM ir_ui_view
+ WHERE model=%s AND type=%s AND inherit_id IS NULL
+ ORDER BY priority""", (self._name, view_type))
+ sql_res = cr.dictfetchone()
if not sql_res:
break
- ok = sql_res[5]
- view_id = ok or sql_res[3]
- parent_view_model = sql_res[6]
+ view_id = sql_res['inherit_id'] or sql_res['id']
+ parent_view_model = sql_res['model']
+ if not sql_res['inherit_id']:
+ break
# if a view was found
if sql_res:
- result['type'] = sql_res[4]
- result['view_id'] = sql_res[3]
- result['arch'] = sql_res[0]
-
- def _inherit_apply_rec(result, inherit_id):
- # get all views which inherit from (ie modify) this view
- cr.execute('select arch,id from ir_ui_view where inherit_id=%s and model=%s order by priority', (inherit_id, self._name))
- sql_inherit = cr.fetchall()
- for (inherit, id) in sql_inherit:
- result = _inherit_apply(result, inherit, id)
- result = _inherit_apply_rec(result, id)
- return result
-
- inherit_result = etree.fromstring(encode(result['arch']))
- result['arch'] = _inherit_apply_rec(inherit_result, sql_res[3])
-
- result['name'] = sql_res[1]
- result['field_parent'] = sql_res[2] or False
+ result['type'] = sql_res['type']
+ result['view_id'] = sql_res['id']
+
+ source = etree.fromstring(encode(sql_res['arch']))
+ result['arch'] = apply_view_inheritance(cr, user, source, result['view_id'])
+
+ result['name'] = sql_res['name']
+ result['field_parent'] = sql_res['field_parent'] or False
else:
# otherwise, build some kind of default view
if view_type == 'form':
+ # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
res = self.fields_get(cr, user, context=context)
xml = '<?xml version="1.0" encoding="utf-8"?> ' \
'<form string="%s">' % (self._description,)
xml = self.__get_default_search_view(cr, user, context)
else:
- xml = '<?xml version="1.0"?>' # what happens here, graph case?
+ # what happens here, graph case?
raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
result['arch'] = etree.fromstring(encode(xml))
result['name'] = 'default'
result['arch'] = xarch
result['fields'] = xfields
- if submenu:
- if context and context.get('active_id', False):
- data_menu = self.pool.get('ir.ui.menu').browse(cr, user, context['active_id'], context).action
- if data_menu:
- act_id = data_menu.id
- if act_id:
- data_action = self.pool.get('ir.actions.act_window').browse(cr, user, [act_id], context)[0]
- result['submenu'] = getattr(data_action, 'menus', False)
if toolbar:
def clean(x):
x = x[2]
raise NotImplementedError(_('The search method is not implemented on this object !'))
def name_get(self, cr, user, ids, context=None):
+ """Returns the preferred display value (text representation) for the records with the
+ given ``ids``. By default this will be the value of the ``name`` column, unless
+ the model implements a custom behavior.
+ Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
+ guaranteed to be.
+
+ :rtype: list(tuple)
+ :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
"""
-
- :param cr: database cursor
- :param user: current user id
- :type user: integer
- :param ids: list of ids
- :param context: context arguments, like lang, time zone
- :type context: dictionary
- :return: tuples with the text representation of requested objects for to-many relationships
-
- """
- if not context:
- context = {}
if not ids:
return []
if isinstance(ids, (int, long)):
[self._rec_name], context, load='_classic_write')]
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
- """
- Search for records and their display names according to a search domain.
-
- :param cr: database cursor
- :param user: current user id
- :param name: object name to search
- :param args: list of tuples specifying search criteria [('field_name', 'operator', 'value'), ...]
- :param operator: operator for search criterion
- :param context: context arguments, like lang, time zone
- :type context: dictionary
- :param limit: optional max number of records to return
- :return: list of object names matching the search criteria, used to provide completion for to-many relationships
-
- This method is equivalent of :py:meth:`~osv.osv.osv.search` on **name** + :py:meth:`~osv.osv.osv.name_get` on the result.
- See :py:meth:`~osv.osv.osv.search` for an explanation of the possible values for the search domain specified in **args**.
-
+ """Search for records that have a display name matching the given ``name`` pattern if compared
+ with the given ``operator``, while also matching the optional search domain (``args``).
+ This is used for example to provide suggestions based on a partial value for a relational
+ field.
+ Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
+ guaranteed to be.
+
+ This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
+ and then :meth:`~.name_get` on the result of the search.
+
+ :param list args: optional search domain (see :meth:`~.search` for syntax),
+ specifying further restrictions
+ :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
+ or ``'='``.
+ :param int limit: optional max number of records to return
+ :rtype: list
+ :return: list of pairs ``(id,text_repr)`` for all matching records.
"""
return self._name_search(cr, user, name, args, operator, context, limit)
def name_create(self, cr, uid, name, context=None):
- """
- Creates a new record by calling :py:meth:`~osv.osv.osv.create` with only one
- value provided: the name of the new record (``_rec_name`` field).
- The new record will also be initialized with any default values applicable
- to this model, or provided through the context. The usual behavior of
- :py:meth:`~osv.osv.osv.create` applies.
- Similarly, this method may raise an exception if the model has multiple
- required fields and some do not have default values.
-
- :param name: name of the record to create
-
- :return: the :py:meth:`~osv.osv.osv.name_get` value for the newly-created record.
+ """Creates a new record by calling :meth:`~.create` with only one
+ value provided: the name of the new record (``_rec_name`` field).
+ The new record will also be initialized with any default values applicable
+ to this model, or provided through the context. The usual behavior of
+ :meth:`~.create` applies.
+ Similarly, this method may raise an exception if the model has multiple
+ required fields and some do not have default values.
+
+ :param name: name of the record to create
+
+ :rtype: tuple
+ :return: the :meth:`~.name_get` pair value for the newly-created record.
"""
rec_id = self.create(cr, uid, {self._rec_name: name}, context);
return self.name_get(cr, uid, [rec_id], context)[0]
def copy(self, cr, uid, id, default=None, context=None):
raise NotImplementedError(_('The copy method is not implemented on this object !'))
- def exists(self, cr, uid, id, context=None):
+ def exists(self, cr, uid, ids, context=None):
+ """Checks whether the given id or ids exist in this model,
+ and return the list of ids that do. This is simple to use for
+ a truth test on a browse_record::
+
+ if record.exists():
+ pass
+
+ :param ids: id or list of ids to check for existence
+ :type ids: int or [int]
+ :return: the list of ids that currently exist, out of
+ the given `ids`
+ """
raise NotImplementedError(_('The exists method is not implemented on this object !'))
def read_string(self, cr, uid, id, langs, fields=None, context=None):
res = {}
res2 = {}
- self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
+ self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
#FIXME: collect all calls to _get_source into one SQL call.
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
- self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
+ self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
#FIXME: try to only call the translation in one SQL
for lang in langs:
for field in vals:
values = defaults
return values
+ def clear_caches(self):
+ """ Clear the caches
+
+ This clears the caches associated to methods decorated with
+ ``tools.ormcache`` or ``tools.ormcache_multi``.
+ """
+ try:
+ getattr(self, '_ormcache')
+ self._ormcache = {}
+ except AttributeError:
+ pass
+
+ def check_access_rule(self, cr, uid, ids, operation, context=None):
+ """Verifies that the operation given by ``operation`` is allowed for the user
+ according to ir.rules.
+
+ :param operation: one of ``write``, ``unlink``
+ :raise except_orm: * if current ir.rules do not permit this operation.
+ :return: None if the operation is allowed
+ """
+ raise NotImplementedError(_('The check_access_rule method is not implemented on this object !'))
+
class orm_memory(orm_template):
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
- _max_count = config.get('osv_memory_count_limit')
- _max_hours = config.get('osv_memory_age_limit')
+ _inherit_fields = {}
+ _max_count = None
+ _max_hours = None
_check_time = 20
@classmethod
self.datas = {}
self.next_id = 0
self.check_id = 0
+ self._max_count = config.get('osv_memory_count_limit')
+ self._max_hours = config.get('osv_memory_age_limit')
cr.execute('delete from wkf_instance where res_type=%s', (self._name,))
def _check_access(self, uid, object_id, mode):
for k,v in self.datas.iteritems():
if v['internal.date_access'] < max:
tounlink.append(k)
- self.unlink(cr, 1, tounlink)
+ self.unlink(cr, ROOT_USER_ID, tounlink)
# Count-based expiration
if self._max_count and len(self.datas) > self._max_count:
# sort by access time to remove only the first/oldest ones in LRU fashion
records = self.datas.items()
records.sort(key=lambda x:x[1]['internal.date_access'])
- self.unlink(cr, 1, [x[0] for x in records[:len(self.datas)-self._max_count]])
+ self.unlink(cr, ROOT_USER_ID, [x[0] for x in records[:len(self.datas)-self._max_count]])
return True
args = [('active', '=', 1)]
if args:
import expression
- e = expression.expression(args)
- e.parse(cr, user, self, context)
+ e = expression.expression(cr, user, args, self, context)
res = e.exp
return res or []
break
f = True
for arg in result:
+ if len(arg) != 3:
+ # Amazing hack: orm_memory handles only simple domains.
+ continue
if arg[1] == '=':
val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
# nothing to check in memory...
pass
- def exists(self, cr, uid, id, context=None):
- return id in self.datas
+ def exists(self, cr, uid, ids, context=None):
+ if isinstance(ids, (long,int)):
+ ids = [ids]
+ return [id for id in ids if id in self.datas]
+
+ def check_access_rule(self, cr, uid, ids, operation, context=None):
+ # ir.rules do not currently apply for orm.memory instances,
+ # only the implicit visibility=owner one.
+ for id in ids:
+ self._check_access(uid, id, operation)
+
+# Definition of log access columns, automatically added to models if
+# self._log_access is True
+LOG_ACCESS_COLUMNS = {
+ 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
+ 'create_date': 'TIMESTAMP',
+ 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
+ 'write_date': 'TIMESTAMP'
+}
+# special columns automatically created by the ORM
+MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys() + \
+ ['internal.create_uid', 'internal.date_access'] # for osv_memory only
class orm(orm_template):
_sql_constraints = []
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
__logger = logging.getLogger('orm')
__schema = logging.getLogger('orm.schema')
+
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
:param int offset: optional number of records to skip
:param int limit: optional max number of records to return
:param dict context: context arguments, like lang, time zone
- :param order: optional ``order by`` specification, for overriding the natural
- sort ordering of the groups, see also :py:meth:`~osv.osv.osv.search`
- (supported only for many2one fields currently)
+ :param list orderby: optional ``order by`` specification, for
+ overriding the natural sort ordering of the
+ groups, see also :py:meth:`~osv.osv.osv.search`
+ (supported only for many2one fields currently)
:return: list of dictionaries(one dictionary for each record) containing:
* the values of fields grouped by the fields in ``groupby`` argument
"""
context = context or {}
- self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
+ self.pool.get('ir.model.access').check(cr, uid, self._name, 'read')
if not fields:
fields = self._columns.keys()
groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
+ # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
fget = self.fields_get(cr, uid, fields)
float_int_fields = filter(lambda x: fget[x]['type'] in ('float', 'integer'), fields)
flist = ''
del d['id']
return data
- def _inherits_join_add(self, parent_model_name, query):
+ def _inherits_join_add(self, current_table, parent_model_name, query):
"""
Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
-
+ :param current_table: current model object
:param parent_model_name: name of the parent model for which the clauses should be added
:param query: query object on which the JOIN should be added
"""
- inherits_field = self._inherits[parent_model_name]
+ inherits_field = current_table._inherits[parent_model_name]
parent_model = self.pool.get(parent_model_name)
parent_table_name = parent_model._table
quoted_parent_table_name = '"%s"' % parent_table_name
if quoted_parent_table_name not in query.tables:
query.tables.append(quoted_parent_table_name)
- query.where_clause.append('("%s".%s = %s.id)' % (self._table, inherits_field, parent_table_name))
+ query.where_clause.append('(%s.%s = %s.id)' % (current_table._table, inherits_field, parent_table_name))
+
+
def _inherits_join_calc(self, field, query):
"""
while field in current_table._inherit_fields and not field in current_table._columns:
parent_model_name = current_table._inherit_fields[field][0]
parent_table = self.pool.get(parent_model_name)
- self._inherits_join_add(parent_model_name, query)
+ self._inherits_join_add(current_table, parent_model_name, query)
current_table = parent_table
return '"%s".%s' % (current_table._table, field)
while ids_lst:
iids = ids_lst[:40]
ids_lst = ids_lst[40:]
- res = f.get(cr, self, iids, k, 1, {})
+ res = f.get(cr, self, iids, k, ROOT_USER_ID, {})
for key, val in res.items():
if f._multi:
val = val[k]
pass
if not val_id:
raise except_orm(_('ValidateError'),
- _('Invalid value for reference field "%s" (last part must be a non-zero integer): "%s"') % (field, value))
+ _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
val = val_model
else:
val = value
elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
return
raise except_orm(_('ValidateError'),
- _('The value "%s" for the field "%s" is not in the selection') % (value, field))
+ _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
def _check_removed_columns(self, cr, log=False):
# iterate on the database columns to drop the NOT NULL constraints
# of fields which were required but have been removed (or will be added by another module)
columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
- columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
+ columns += MAGIC_COLUMNS
cr.execute("SELECT a.attname, a.attnotnull"
" FROM pg_class c, pg_attribute a"
" WHERE c.relname=%s"
column_data = self._select_column_data(cr)
for k, f in self._columns.iteritems():
- if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
+ if k in MAGIC_COLUMNS:
continue
# Don't update custom (also called manual) fields
if f.manual and not update_custom_fields:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
- cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
+ cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
self._table, k, f_pg_type, f._type, newname)
# set the field to the default value if any
if k in self._defaults:
if callable(self._defaults[k]):
- default = self._defaults[k](self, cr, 1, context)
+ default = self._defaults[k](self, cr, ROOT_USER_ID, context)
else:
default = self._defaults[k]
if not isinstance(f, fields.function) or f.store:
# add the missing field
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
- cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''")))
+ cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
self.__schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, k, get_pg_type(f)[1])
# initialize it
if not create and k in self._defaults:
if callable(self._defaults[k]):
- default = self._defaults[k](self, cr, 1, context)
+ default = self._defaults[k](self, cr, ROOT_USER_ID, context)
else:
default = self._defaults[k]
def _create_table(self, cr):
cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
- cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''")))
+ cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
self.__schema.debug("Table '%s': created", self._table)
def _add_log_columns(self, cr):
- logs = {
- 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
- 'create_date': 'TIMESTAMP',
- 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
- 'write_date': 'TIMESTAMP'
- }
- for k in logs:
+ for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
cr.execute("""
SELECT c.relname
FROM pg_class c, pg_attribute a
WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
- """, (self._table, k))
+ """, (self._table, field))
if not cr.rowcount:
- cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
+ cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
cr.commit()
self.__schema.debug("Table '%s': added column '%s' with definition=%s",
- self._table, k, logs[k])
+ self._table, field, field_def)
def _select_column_data(self, cr):
if f == order:
ok = False
if ok:
- self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order, length))
+ self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
for (key, _, msg) in self._sql_constraints:
if self._name in obj._inherits:
obj._inherits_reload()
+
def _inherits_reload(self):
""" Recompute the _inherit_fields mapping.
res = {}
for table in self._inherits:
other = self.pool.get(table)
- res.update(other._inherit_fields)
for col in other._columns.keys():
- res[col] = (table, self._inherits[table], other._columns[col])
+ res[col] = (table, self._inherits[table], other._columns[col], table)
for col in other._inherit_fields.keys():
- res[col] = (table, self._inherits[table], other._inherit_fields[col][2])
+ res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
self._inherit_fields = res
+ self._all_columns = self._get_column_infos()
self._inherits_reload_src()
+
+ def _get_column_infos(self):
+ """Returns a dict mapping all fields names (direct fields and
+ inherited field via _inherits) to a ``column_info`` struct
+ giving detailed columns """
+ result = {}
+ for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
+ result[k] = fields.column_info(k, col, parent, m2o, original_parent)
+ for k, col in self._columns.iteritems():
+ result[k] = fields.column_info(k, col)
+ return result
+
+
def _inherits_check(self):
for table, field_name in self._inherits.items():
if field_name not in self._columns:
"""
ira = self.pool.get('ir.model.access')
- write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
- ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
+ write_access = ira.check(cr, user, self._name, 'write', False) or \
+ ira.check(cr, user, self._name, 'create', False)
return super(orm, self).fields_get(cr, user, fields, context, write_access)
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
if not context:
context = {}
- self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'read')
if not fields:
fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
if isinstance(ids, (int, long)):
res = []
if len(fields_pre):
def convert_field(f):
- f_qual = "%s.%s" % (self._table, f) # need fully-qualified references in case len(tables) > 1
+ f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
if f in ('create_date', 'write_date'):
return "date_trunc('second', %s) as %s" % (f_qual, f)
if f == self.CONCURRENCY_CHECK_FIELD:
self._check_concurrency(cr, ids, context)
- self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
+ self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink')
properties = self.pool.get('ir.property')
domain = [('res_id', '=', False),
# Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
# as these are not connected with real database foreign keys, and would be dangling references.
- # Step 1. Calling unlink of ir_model_data only for the affected IDS.
- referenced_ids = pool_model_data.search(cr, uid, [('res_id','in',list(sub_ids)),('model','=',self._name)], context=context)
+ # Note: following steps performed as admin to avoid access rights restrictions, and with no context
+ # to avoid possible side-effects during admin calls.
+ # Step 1. Calling unlink of ir_model_data only for the affected IDS
+ reference_ids = pool_model_data.search(cr, ROOT_USER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
# Step 2. Marching towards the real deletion of referenced records
- pool_model_data.unlink(cr, uid, referenced_ids, context=context)
+ if reference_ids:
+ pool_model_data.unlink(cr, ROOT_USER_ID, reference_ids)
# For the same reason, removing the record relevant to ir_values
ir_value_ids = ir_values_obj.search(cr, uid,
ids = [ids]
self._check_concurrency(cr, ids, context)
- self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'write')
result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
"""
if not context:
context = {}
- self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
+ self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
vals = self._add_missing_default_values(cr, user, vals, context)
upd_todo = []
for v in vals.keys():
if v in self._inherit_fields:
- (table, col, col_detail) = self._inherit_fields[v]
+ (table, col, col_detail, original_parent) = self._inherit_fields[v]
tocreate[table][v] = vals[v]
del vals[v]
else:
:return: [(priority, model_name, [record_ids,], [function_fields,])]
"""
- # FIXME: rewrite, cleanup, use real variable names
- # e.g.: http://pastie.org/1222060
- result = {}
- fncts = self.pool._store_function.get(self._name, [])
- for fnct in range(len(fncts)):
- if fncts[fnct][3]:
- ok = False
- if not fields:
- ok = True
- for f in (fields or []):
- if f in fncts[fnct][3]:
- ok = True
- break
- if not ok:
- continue
-
- result.setdefault(fncts[fnct][0], {})
-
- # uid == 1 for accessing objects having rules defined on store fields
- ids2 = fncts[fnct][2](self, cr, 1, ids, context)
- for id in filter(None, ids2):
- result[fncts[fnct][0]].setdefault(id, [])
- result[fncts[fnct][0]][id].append(fnct)
- dict = {}
- for object in result:
- k2 = {}
- for id, fnct in result[object].items():
- k2.setdefault(tuple(fnct), [])
- k2[tuple(fnct)].append(id)
- for fnct, id in k2.items():
- dict.setdefault(fncts[fnct[0]][4], [])
- dict[fncts[fnct[0]][4]].append((fncts[fnct[0]][4], object, id, map(lambda x: fncts[x][1], fnct)))
- result2 = []
- tmp = dict.keys()
- tmp.sort()
- for k in tmp:
- result2 += dict[k]
- return result2
+ if fields is None: fields = []
+ stored_functions = self.pool._store_function.get(self._name, [])
+
+ # use indexed names for the details of the stored_functions:
+ model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
+
+ # only keep functions that should be triggered for the ``fields``
+ # being written to.
+ to_compute = [f for f in stored_functions \
+ if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
+
+ mapping = {}
+ for function in to_compute:
+ # use admin user for accessing objects having rules defined on store fields
+ target_ids = [id for id in function[id_mapping_fnct_](self, cr, ROOT_USER_ID, ids, context) if id]
+
+ # the compound key must consider the priority and model name
+ key = (function[priority_], function[model_name_])
+ for target_id in target_ids:
+ mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
+
+ # Here mapping looks like:
+ # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
+ # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
+ # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
+ # }
+
+ # Now we need to generate the batch function calls list
+ # call_map =
+ # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
+ call_map = {}
+ for ((priority,model), id_map) in mapping.iteritems():
+ functions_ids_maps = {}
+ # function_ids_maps =
+ # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
+ for id, functions in id_map.iteritems():
+ functions_ids_maps.setdefault(tuple(functions), []).append(id)
+ for functions, ids in functions_ids_maps.iteritems():
+ call_map.setdefault((priority,model),[]).append((priority, model, ids,
+ [f[func_field_to_compute_] for f in functions]))
+ ordered_keys = call_map.keys()
+ ordered_keys.sort()
+ result = []
+ if ordered_keys:
+ result = reduce(operator.add, (call_map[k] for k in ordered_keys))
+ return result
def _store_set_values(self, cr, uid, ids, fields, context):
"""Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
for key in keys:
val = todo[key]
if key:
- # uid == 1 for accessing objects having rules defined on store fields
- result = self._columns[val[0]].get(cr, self, ids, val, 1, context=context)
+ # use admin user for accessing objects having rules defined on store fields
+ result = self._columns[val[0]].get(cr, self, ids, val, ROOT_USER_ID, context=context)
for id, value in result.items():
if field_flag:
for f in value.keys():
else:
for f in val:
- # uid == 1 for accessing objects having rules defined on store fields
- result = self._columns[f].get(cr, self, ids, f, 1, context=context)
+ # use admin user for accessing objects having rules defined on store fields
+ result = self._columns[f].get(cr, self, ids, f, ROOT_USER_ID, context=context)
for r in result.keys():
if field_flag:
if r in field_dict.keys():
if domain:
import expression
- e = expression.expression(domain)
- e.parse(cr, user, self, context)
+ e = expression.expression(cr, user, domain, self, context)
tables = e.get_tables()
where_clause, where_params = e.to_sql()
where_clause = where_clause and [where_clause] or []
if parent_model and child_object:
# as inherited rules are being applied, we need to add the missing JOIN
# to reach the parent table (if it was not JOINed yet in the query)
- child_object._inherits_join_add(parent_model, query)
+ child_object._inherits_join_add(child_object, parent_model, query)
query.where_clause += added_clause
query.where_clause_params += added_params
for table in added_tables:
else:
continue # ignore non-readable or "non-joinable" fields
elif order_field in self._inherit_fields:
- parent_obj = self.pool.get(self._inherit_fields[order_field][0])
+ parent_obj = self.pool.get(self._inherit_fields[order_field][3])
order_column = parent_obj._columns[order_field]
if order_column._classic_read:
inner_clause = self._inherits_join_calc(order_field, query)
"""
if context is None:
context = {}
- self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
+ self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read')
query = self._where_calc(cr, user, args, context=context)
self._apply_ir_rules(cr, user, query, 'read', context=context)
else:
raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
+ # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
fields = self.fields_get(cr, uid, context=context)
for f in fields:
ftype = fields[f]['type']
- if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
+ if self._log_access and f in LOG_ACCESS_COLUMNS:
del data[f]
if f in default:
# force a clean recompute!
for parent_column in ['parent_left', 'parent_right']:
data.pop(parent_column, None)
-
- for v in self._inherits:
- del data[self._inherits[v]]
+ # Remove _inherits field's from data recursively, missing parents will
+ # be created by create() (so that copy() copy everything).
+ def remove_ids(inherits_dict):
+ for parent_table in inherits_dict:
+ del data[inherits_dict[parent_table]]
+ remove_ids(self.pool.get(parent_table)._inherits)
+ remove_ids(self._inherits)
return data
def copy_translations(self, cr, uid, old_id, new_id, context=None):
seen_map[self._name].append(old_id)
trans_obj = self.pool.get('ir.translation')
+ # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
fields = self.fields_get(cr, uid, context=context)
translation_records = []
def exists(self, cr, uid, ids, context=None):
if type(ids) in (int, long):
ids = [ids]
- query = 'SELECT count(1) FROM "%s"' % (self._table)
+ query = 'SELECT id FROM "%s"' % (self._table)
cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
- return cr.fetchone()[0] == len(ids)
+ return [x[0] for x in cr.fetchall()]
def check_recursion(self, cr, uid, ids, context=None, parent=None):
warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \