def intersect(la, lb):
return filter(lambda x: x in lb, la)
+def fix_import_export_id_paths(fieldname):
+ """
+ Fixes the id fields in import and exports, and splits field paths
+ on '/'.
+
+ :param str fieldname: name of the field to import/export
+ :return: split field name
+ :rtype: list of str
+ """
+ fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
+ fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
+ return fixed_external_id.split('/')
+
class except_orm(Exception):
def __init__(self, name, value):
self.name = name
_log_create = False
CONCURRENCY_CHECK_FIELD = '__last_update'
+
def log(self, cr, uid, id, message, secondary=False, context=None):
if context and context.get('disable_log'):
return True
else:
r = d['name']
else:
- break
+ postfix = 0
+ while True:
+ n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
+ if not model_data.search(cr, uid, [('name', '=', n)]):
+ break
+ postfix += 1
+ model_data.create(cr, uid, {
+ 'name': n,
+ 'model': self._name,
+ 'res_id': r['id'],
+ })
+ r = n
else:
r = r[f[i]]
# To display external name of selection field when its exported
cols = self._columns.copy()
for f in self._inherit_fields:
cols.update({f: self._inherit_fields[f][2]})
- def fsplit(fieldname):
- fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
- fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
- return fixed_external_id.split('/')
- fields_to_export = map(fsplit, fields_to_export)
+ fields_to_export = map(fix_import_export_id_paths, fields_to_export)
datas = []
for row in self.browse(cr, uid, ids, context):
datas += self.__export_row(cr, uid, row, fields_to_export, context)
"""
Import given data in given module
- :param cr: database cursor
- :param uid: current user id
- :param fields: list of fields
- :param data: data to import
- :param mode: 'init' or 'update' for record creation
- :param current_module: module name
- :param noupdate: flag for record creation
- :param context: context arguments, like lang, time zone,
- :param filename: optional file to store partial import state for recovery
- :rtype: tuple
-
This method is used when importing data via client menu.
Example of fields to import for a sale.order::
order_line/price_unit,
order_line/product_uom_qty,
order_line/product_uom/id (=xml_id)
+
+ This method returns a 4-tuple with the following structure:
+
+ * The first item is a return code, it returns either ``-1`` in case o
+
+ :param cr: database cursor
+ :param uid: current user id
+ :param fields: list of fields
+ :param data: data to import
+ :param mode: 'init' or 'update' for record creation
+ :param current_module: module name
+ :param noupdate: flag for record creation
+ :param context: context arguments, like lang, time zone,
+ :param filename: optional file to store partial import state for recovery
+ :returns: 4-tuple of a return code, an errored resource, an error message and ???
+ :rtype: (int, dict|0, str|0, ''|0)
"""
if not context:
context = {}
- def _replace_field(x):
- x = re.sub('([a-z0-9A-Z_])\\.id$', '\\1/.id', x)
- return x.replace(':id','/id').split('/')
- fields = map(_replace_field, fields)
+ fields = map(fix_import_export_id_paths, fields)
logger = netsvc.Logger()
ir_model_data_obj = self.pool.get('ir.model.data')
args = [('active', '=', 1)]
if args:
import expression
- e = expression.expression(args)
- e.parse(cr, user, self, context)
+ e = expression.expression(cr, user, args, self, context)
res = e.exp
return res or []
break
f = True
for arg in result:
+ if len(arg) != 3:
+ # Amazing hack: orm_memory handles only simple domains.
+ continue
if arg[1] == '=':
val = eval('data[arg[0]]'+'==' +' arg[2]', locals())
elif arg[1] in ['<', '>', 'in', 'not in', '<=', '>=', '<>']:
for id in ids:
self._check_access(uid, id, operation)
+# Definition of log access columns, automatically added to models if
+# self._log_access is True
+LOG_ACCESS_COLUMNS = {
+ 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
+ 'create_date': 'TIMESTAMP',
+ 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
+ 'write_date': 'TIMESTAMP'
+}
+# special columns automatically created by the ORM
+MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys() + \
+ ['internal.create_uid', 'internal.date_access'] # for osv_memory only
+
class orm(orm_template):
_sql_constraints = []
_table = None
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
__logger = logging.getLogger('orm')
__schema = logging.getLogger('orm.schema')
+
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
# iterate on the database columns to drop the NOT NULL constraints
# of fields which were required but have been removed (or will be added by another module)
columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
- columns += ('id', 'write_uid', 'write_date', 'create_uid', 'create_date') # openerp access columns
+ columns += MAGIC_COLUMNS
cr.execute("SELECT a.attname, a.attnotnull"
" FROM pg_class c, pg_attribute a"
" WHERE c.relname=%s"
column_data = self._select_column_data(cr)
for k, f in self._columns.iteritems():
- if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
+ if k in MAGIC_COLUMNS:
continue
# Don't update custom (also called manual) fields
if f.manual and not update_custom_fields:
def _add_log_columns(self, cr):
- logs = {
- 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
- 'create_date': 'TIMESTAMP',
- 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
- 'write_date': 'TIMESTAMP'
- }
- for k in logs:
+ for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
cr.execute("""
SELECT c.relname
FROM pg_class c, pg_attribute a
WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
- """, (self._table, k))
+ """, (self._table, field))
if not cr.rowcount:
- cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k]))
+ cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
cr.commit()
self.__schema.debug("Table '%s': added column '%s' with definition=%s",
- self._table, k, logs[k])
+ self._table, field, field_def)
def _select_column_data(self, cr):
if domain:
import expression
- e = expression.expression(domain)
- e.parse(cr, user, self, context)
+ e = expression.expression(cr, user, domain, self, context)
tables = e.get_tables()
where_clause, where_params = e.to_sql()
where_clause = where_clause and [where_clause] or []
for f in fields:
ftype = fields[f]['type']
- if self._log_access and f in ('create_date', 'create_uid', 'write_date', 'write_uid'):
+ if self._log_access and f in LOG_ACCESS_COLUMNS:
del data[f]
if f in default: