init: function (model, domain, fields, options) {
this.cells = [];
this.domain = domain;
+ this.context = options.context;
this.no_data = true;
+ this.updating = false;
this.model = model;
this.fields = fields;
this.fields.__count = {type: 'integer', string:_t('Quantity')};
// ----------------------------------------------------------------------
// Data updating methods
// ----------------------------------------------------------------------
- // Load the data from the db, using the method this.load_data
// update_data will try to preserve the expand/not expanded status of each
- // column/row. If you want to expand all, then set this.cols.headers/this.rows.headers
+ // column/row. If you want to expand all, then set this.cols.headers/this.rows.headers
// to null before calling update_data.
- update_data: function () {
- var self = this;
-
- return this.load_data().then (function (result) {
- if (result) {
- self.no_data = false;
- self[self.cols.headers ? 'update_headers' : 'expand_headers'](self.cols, result.col_headers);
- self[self.rows.headers ? 'update_headers' : 'expand_headers'](self.rows, result.row_headers);
- } else {
- self.no_data = true;
- }
- });
- },
+ update_data: function () {
+ var self = this;
+ this.updating = this.perform_requests().then (function () {
+ var data = Array.prototype.slice.call(arguments);
+ self.no_data = !data[0].length;
+ if (self.no_data) {
+ return;
+ }
+ var row_headers = [],
+ col_headers = [];
+ self.cells = [];
+
+ var dim_col = self.cols.groupby.length,
+ i, j, index;
+
+ for (i = 0; i < self.rows.groupby.length + 1; i++) {
+ for (j = 0; j < dim_col + 1; j++) {
+ index = i*(dim_col + 1) + j;
+ self.make_headers_and_cell(data[index], row_headers, col_headers, i);
+ }
+ }
+ self.set_headers(row_headers, self.rows);
+ self.set_headers(col_headers, self.cols);
+ });
+ return this.updating;
+ },
- expand_headers: function (root, new_headers) {
- root.headers = new_headers;
- _.each(root.headers, function (header) {
- header.root = root;
- header.expanded = (header.children.length > 0);
- });
- },
+ make_headers_and_cell: function (data_pts, row_headers, col_headers, index, prefix, expand) {
+ var self = this;
+ data_pts.forEach(function (data_pt) {
+ var row_value = (prefix || []).concat(data_pt.attributes.value.slice(0,index));
+ var col_value = data_pt.attributes.value.slice(index);
- update_headers: function (root, new_headers) {
- _.each(root.headers, function (header) {
- var corresponding_header = _.find(new_headers, function (h) {
- return _.isEqual(h.path, header.path);
- });
- if (corresponding_header && header.expanded) {
- corresponding_header.expanded = true;
- _.each(corresponding_header.children, function (c) {
- c.expanded = false;
- });
- }
- if (corresponding_header && (!header.expanded)) {
- corresponding_header.expanded = false;
- }
- });
- var updated_headers = _.filter(new_headers, function (header) {
- return (header.expanded !== undefined);
- });
- _.each(updated_headers, function (header) {
- if (!header.expanded) {
- header.children = [];
- }
- header.root = root;
- });
- root.headers = updated_headers;
- },
+ if (expand && !_.find(col_headers, function (hdr) {return self.isEqual(col_value, hdr.path);})) {
+ return;
+ }
+ var row = self.find_or_create_header(row_headers, row_value, data_pt);
+ var col = self.find_or_create_header(col_headers, col_value, data_pt);
- // ----------------------------------------------------------------------
- // Data loading methods
- // ----------------------------------------------------------------------
-
- // To obtain all the values required to draw the full table, we have to do
- // at least 2 + min(row.groupby.length, col.groupby.length)
- // calls to readgroup. To simplify the code, we will always do
- // 2 + row.groupby.length calls. For example, if row.groupby = [r1, r2, r3]
- // and col.groupby = [c1, c2], then we will make the call with the following
- // groupbys: [r1,r2,r3], [c1,r1,r2,r3], [c1,c2,r1,r2,r3], [].
- load_data: function () {
- var self = this,
- cols = this.cols.groupby,
- rows = this.rows.groupby,
- visible_fields = rows.concat(cols, self.measures);
+ var cell_value = _.map(self.measures, function (m) {
+ return data_pt.attributes.aggregates[m.field];
+ });
+ self.cells.push({
+ x: Math.min(row.id, col.id),
+ y: Math.max(row.id, col.id),
+ values: cell_value
+ });
+ });
+ },
- if (this.measures.length === 0) {
- return $.Deferred.resolve().promise();
- }
+ make_header: function (values) {
+ return _.extend({
+ children: [],
+ domain: this.domain,
+ expanded: undefined,
+ id: _.uniqueId(),
+ path: [],
+ root: undefined,
+ title: undefined
+ }, values || {});
+ },
- var groupbys = _.map(_.range(cols.length + 1), function (i) {
- return cols.slice(0, i).concat(rows);
+ find_or_create_header: function (headers, path, data_pt) {
+ var self = this;
+ var hdr = _.find(headers, function (header) {
+ return self.isEqual(path, header.path);
});
- groupbys.push([]);
-
- var get_data_requests = _.map(groupbys, function (groupby) {
- return self.get_groups(groupby, visible_fields, self.domain);
+ if (hdr) {
+ return hdr;
+ }
+ if (!path.length) {
+ hdr = this.make_header({title: _t('Total')});
+ headers.push(hdr);
+ return hdr;
+ }
+ hdr = this.make_header({
+ path:path,
+ domain:data_pt.model._domain,
+ title: _t(_.last(path))
});
-
- return $.when.apply(null, get_data_requests).then(function () {
- var data = Array.prototype.slice.call(arguments),
- row_data = data[0],
- col_data = (cols.length !== 0) ? data[data.length - 2] : [],
- has_data = data[data.length - 1][0];
-
- return has_data && self.format_data(col_data, row_data, data);
+ var parent = _.find(headers, function (header) {
+ return self.isEqual(header.path, _.initial(path, 1));
});
- },
- get_groups: function (groupbys, fields, domain, path) {
- var self = this,
- groupby = (groupbys.length) ? groupbys[0] : [];
- path = path || [];
+ var previous = parent.children.length ? _.last(parent.children) : parent;
+ headers.splice(headers.indexOf(previous) + 1, 0, hdr);
+ parent.children.push(hdr);
+ return hdr;
+ },
- return this._query_db(groupby, fields, domain, path).then(function (groups) {
- if (groupbys.length > 1) {
- var get_subgroups = $.when.apply(null, _.map(groups, function (group) {
- return self.get_groups(_.rest(groupbys), fields, group.model._domain, path.concat(group.attributes.value)).then(function (subgroups) {
- group.children = subgroups;
- });
- }));
- return get_subgroups.then(function () {
- return groups;
- });
- } else {
- return groups;
+ perform_requests: function (group1, group2, domain) {
+ var self = this,
+ requests = [],
+ row_gbs = _.pluck(this.rows.groupby, 'field'),
+ col_gbs = _.pluck(this.cols.groupby, 'field'),
+ field_list = row_gbs.concat(col_gbs, _.pluck(this.measures, 'field')),
+ fields = field_list.map(function (f) { return self.raw_field(f); });
+
+ group1 = group1 || row_gbs;
+ group2 = group2 || col_gbs;
+
+ var i,j, groupbys;
+ for (i = 0; i < group1.length + 1; i++) {
+ for (j = 0; j < group2.length + 1; j++) {
+ groupbys = group1.slice(0,i).concat(group2.slice(0,j));
+ requests.push(self.get_groups(groupbys, fields, domain || self.domain));
}
- });
-
- },
+ }
+ return $.when.apply(null, requests);
+ },
- _query_db: function (groupby, fields, domain, path) {
- var self = this,
- field_ids = _.without(_.pluck(fields, 'field'), '__count'),
- fields = _.map(field_ids, function(f) { return self.raw_field(f); });
+ // set the 'expanded' status of new_headers more or less like root.headers, with root as root
+ set_headers: function(new_headers, root) {
+ var self = this;
+ if (root.headers) {
+ _.each(root.headers, function (header) {
+ var corresponding_header = _.find(new_headers, function (h) {
+ return self.isEqual(h.path, header.path);
+ });
+ if (corresponding_header && header.expanded) {
+ corresponding_header.expanded = true;
+ _.each(corresponding_header.children, function (c) {
+ c.expanded = false;
+ });
+ }
+ if (corresponding_header && (!header.expanded)) {
+ corresponding_header.expanded = false;
+ corresponding_header.children = [];
+ }
+ });
+ var updated_headers = _.filter(new_headers, function (header) {
+ return (header.expanded !== undefined);
+ });
+ _.each(updated_headers, function (header) {
+ header.root = root;
+ });
+ root.headers = updated_headers;
+ } else {
+ root.headers = new_headers;
+ _.each(root.headers, function (header) {
+ header.root = root;
+ header.expanded = (header.children.length > 0);
+ });
+ }
+ return new_headers;
+ },
- return this.model.query(field_ids)
- .filter(domain)
+ get_groups: function (groupbys, fields, domain) {
+ var self = this;
+ return this.model.query(_.without(fields, '__count'))
+ .filter(domain)
+ .context(this.context)
- .group_by(groupby.field)
- .then(function (results) {
- var groups = _.filter(results, function (group) {
- return group.attributes.length > 0;
- });
- return _.map(groups, function (g) { return self.format_group(g, path); });
- });
- },
+ .lazy(false)
+ .group_by(groupbys)
+ .then(function (groups) {
+ return groups.filter(function (group) {
+ return group.attributes.length > 0;
+ }).map(function (group) {
+ var attrs = group.attributes,
+ grouped_on = attrs.grouped_on instanceof Array ? attrs.grouped_on : [attrs.grouped_on],
+ raw_grouped_on = grouped_on.map(function (f) {
+ return self.raw_field(f);
+ });
+ if (grouped_on.length === 1) {
+ attrs.value = [attrs.value];
+ }
+ attrs.value = _.range(grouped_on.length).map(function (i) {
+ var grp = grouped_on[i],
+ field = self.fields[grp];
+ if (attrs.value[i] === false) {
+ return _t('Undefined');
+ } else if (attrs.value[i] instanceof Array) {
+ return attrs.value[i][1];
+ } else if (field && field.type === 'selection') {
+ var selected = _.where(field.selection, {0: attrs.value[i]})[0];
+ return selected ? selected[1] : attrs.value[i];
+ }
+ return attrs.value[i];
+ });
+ attrs.aggregates.__count = group.attributes.length;
+ attrs.grouped_on = raw_grouped_on;
+ return group;
+ });
+ });
+ },
// if field is a fieldname, returns field, if field is field_id:interval, retuns field_id
raw_field: function (field) {
--- /dev/null
+- |
+ To check that common dangerous operations are not allowed by the safe_eval mechanism, attempt to
+ evaluate unauthorized expressions, and verify that they trigger an error.
+-
+ 1. Try a few common expressions to verify they work with safe_eval
+-
+ !python {model: ir.model}: |
+ from openerp.tools.safe_eval import safe_eval
+ expected = (1, {"a": 9 * 2}, (True, False, None))
+ actual = safe_eval('(1, {"a": 9 * 2}, (True, False, None))')
+ assert actual == expected, "Simple python expressions are not working with safe_eval"
+-
+ 2. Try simple literal definition to verify it works with literal_eval
+-
+ !python {model: ir.model}: |
+ import ast
+ expected = (1, {"a": 9}, (True, False, None))
+ actual = ast.literal_eval('(1, {"a": 9}, (True, False, None))')
+ assert actual == expected, "Simple python expressions are not working with literal_eval"
+-
+ 3. Try arithmetic expression in literal_eval to verify it does not work
+-
+ !python {model: ir.model}: |
+ import ast
+ try:
+ ast.literal_eval('(1, {"a": 2*9}, (True, False, None))')
+ assert False, "literal_eval should not accept arithmetic expressions"
+ except ValueError:
+ pass
+-
+ 4. Try forbidden expressions in literal_eval to verify they are not allowed
+-
+ !python {model: ir.model}: |
+ import ast
+ try:
+ ast.literal_eval('{"a": True.__class__}')
+ assert False, "literal_eval should accept only literals"
+ except ValueError:
+ pass
+-
+ 5. Try forbidden expressions in safe_eval to verify they are not allowed (open)
+-
+ !python {model: ir.model}: |
+ from openerp.tools.safe_eval import safe_eval
+ from openerp.tools.misc import mute_logger
+ try:
+ with mute_logger('openerp.tools.safe_eval'):
+ safe_eval('open("/etc/passwd","r")')
+ assert False, "safe_eval should not allow calling open() builtin"
+ except ValueError:
+ pass
+
+-
+ "ORM test: verify that parent_store computation are going right"
+-
+ 0. Emulate normal behavior of tree structure storing
+-
+ !python {model: res.partner.category}: |
+ # pretend the pool has finished loading to avoid deferring parent_store computation
+ self.pool._init = False
+
+ # Force partner_categ.copy() to copy children
+ self.pool['res.partner.category']._columns['child_ids'].copy = True
+-
+ "1.0 Setup test partner categories: parent root"
+-
+ !record {model: res.partner.category, id: test_categ_root}:
+ name: Root category
+-
+ "1.1 Setup test partner categories: parent category"
+-
+ !record {model: res.partner.category, id: test_categ_0}:
+ name: Parent category
+ parent_id: test_categ_root
+-
+ "1.2 Setup test partner categories: child 1"
+-
+ !record {model: res.partner.category, id: test_categ_1}:
+ name: Child 1
+ parent_id: test_categ_0
+-
+ "1.3 Setup test partner categories: child 2"
+-
+ !record {model: res.partner.category, id: test_categ_2}:
+ name: Child 2
+ parent_id: test_categ_0
+-
+ "1.4 Setup test partner categories: child 2-1"
+-
+ !record {model: res.partner.category, id: test_categ_21}:
+ name: Child 2-1
+ parent_id: test_categ_2
+-
+ 2. Duplicate the parent category and verify that the children have been duplicated too and are below the new parent
+-
+ !python {model: res.partner.category}: |
+ self._columns['child_ids'].copy = True # force copying children for test
+ new_id = self.copy(cr, uid, ref('test_categ_0'))
+ new_struct = self.search(cr, uid, [('parent_id', 'child_of', new_id)])
+ assert len(new_struct) == 4, "After duplication, the new object must have the childs records"
+ old_struct = self.search(cr, uid, [('parent_id', 'child_of', ref('test_categ_0'))])
+ assert len(old_struct) == 4, "After duplication, previous record must have old childs records only"
+ assert (not set(new_struct).intersection(old_struct)), "After duplication, nodes should not be mixed"
+-
+ 3. Duplicate the children then reassign them to the new parent (1st method) and check the parent_store structure.
+-
+ !python {model: res.partner.category}: |
+ new_child1_id = self.copy(cr, uid, ref('test_categ_1'))
+ new_child2_id = self.copy(cr, uid, ref('test_categ_2'))
+ new_id = self.copy(cr, uid, ref('test_categ_0'), {'child_ids': []})
+ self.write(cr, uid, [new_child1_id, new_child2_id], {'parent_id': new_id})
+ new_struct = self.search(cr, uid, [('parent_id', 'child_of', new_id)])
+ assert len(new_struct) == 4, "After duplication, the new object must have the childs records"
+ old_struct = self.search(cr, uid, [('parent_id', 'child_of', ref('test_categ_0'))])
+ assert len(old_struct) == 4, "After duplication, previous record must have old childs records only"
+ assert (not set(new_struct).intersection(old_struct)), "After duplication, nodes should not be mixed"
+-
+ 4. Duplicate the children then reassign them to the new parent (2nd method) and check the parent_store structure.
+-
+ !python {model: res.partner.category}: |
+ new_child1_id = self.copy(cr, uid, ref('test_categ_1'))
+ new_child2_id = self.copy(cr, uid, ref('test_categ_2'))
+ old_struct = self.search(cr, uid, [('parent_id', 'child_of', ref('test_categ_0'))])
+ new_id = self.copy(cr, uid, ref('test_categ_0'), {'child_ids': [(6,0,[new_child1_id, new_child2_id])]})
+ new_struct = self.search(cr, uid, [('parent_id', 'child_of', new_id)])
+ assert len(new_struct) == 4, "After duplication, the new object must have the childs records"
+ old_struct = self.search(cr, uid, [('parent_id', 'child_of', ref('test_categ_0'))])
+ assert len(old_struct) == 4, "After duplication, previous record must have old childs records only"
+ assert (not set(new_struct).intersection(old_struct)), "After duplication, nodes should not be mixed"
+-
+ 5. Duplicate the children then reassign them to the new parent (3rd method) and make sure the parent_store structure is still right.
+-
+ !python {model: res.partner.category}: |
+ new_child1_id = self.copy(cr, uid, ref('test_categ_1'))
+ new_child2_id = self.copy(cr, uid, ref('test_categ_2'))
+ new_id = self.copy(cr, uid, ref('test_categ_0'), {'child_ids': []})
+ self.write(cr, uid, [new_id], {'child_ids': [(4,new_child1_id), (4,new_child2_id)]})
+ new_struct = self.search(cr, uid, [('parent_id', 'child_of', new_id)])
+ assert len(new_struct) == 4, "After duplication, the new object must have the childs records"
+ old_struct = self.search(cr, uid, [('parent_id', 'child_of', ref('test_categ_0'))])
+ assert len(old_struct) == 4, "After duplication, previous record must have old childs records only"
+ assert (not set(new_struct).intersection(old_struct)), "After duplication, nodes should not be mixed"
+-
+ 6. Restore pool state after the test
+-
+ !python {model: res.partner.category}: |
+ self.pool._init = True
+ self.pool['res.partner.category']._columns['child_ids'].copy = False
+
+-
+ "Float precision tests: verify that float rounding methods are working correctly via res.currency"
+-
+ !python {model: res.currency}: |
+ from openerp.tools import float_repr
+ from math import log10
+ currency = self.browse(cr, uid, ref('base.EUR'))
+ def try_round(amount, expected, self=self, cr=cr, currency=currency, float_repr=float_repr,
+ log10=log10):
+ digits = max(0,-int(log10(currency.rounding)))
+ result = float_repr(self.round(cr, 1, currency, amount), precision_digits=digits)
+ assert result == expected, 'Rounding error: got %s, expected %s' % (result, expected)
+ try_round(2.674,'2.67')
+ try_round(2.675,'2.68') # in Python 2.7.2, round(2.675,2) gives 2.67
+ try_round(-2.675,'-2.68') # in Python 2.7.2, round(2.675,2) gives 2.67
+ try_round(0.001,'0.00')
+ try_round(-0.001,'-0.00')
+ try_round(0.0049,'0.00') # 0.0049 is closer to 0 than to 0.01, so should round down
+ try_round(0.005,'0.01') # the rule is to round half away from zero
+ try_round(-0.005,'-0.01') # the rule is to round half away from zero
+
+ def try_zero(amount, expected, self=self, cr=cr, currency=currency):
+ assert self.is_zero(cr, 1, currency, amount) == expected, "Rounding error: %s should be zero!" % amount
+ try_zero(0.01, False)
+ try_zero(-0.01, False)
+ try_zero(0.001, True)
+ try_zero(-0.001, True)
+ try_zero(0.0046, True)
+ try_zero(-0.0046, True)
+ try_zero(2.68-2.675, False) # 2.68 - 2.675 = 0.005 -> rounds to 0.01
+ try_zero(2.68-2.676, True) # 2.68 - 2.675 = 0.004 -> rounds to 0.0
+ try_zero(2.676-2.68, True) # 2.675 - 2.68 = -0.004 -> rounds to -0.0
+ try_zero(2.675-2.68, False) # 2.675 - 2.68 = -0.005 -> rounds to -0.01
+
+ def try_compare(amount1, amount2, expected, self=self, cr=cr, currency=currency):
+ assert self.compare_amounts(cr, 1, currency, amount1, amount2) == expected, \
+ "Rounding error, compare_amounts(%s,%s) should be %s" % (amount1, amount2, expected)
+ try_compare(0.001, 0.001, 0)
+ try_compare(-0.001, -0.001, 0)
+ try_compare(0.001, 0.002, 0)
+ try_compare(-0.001, -0.002, 0)
+ try_compare(2.675, 2.68, 0)
+ try_compare(2.676, 2.68, 0)
+ try_compare(-2.676, -2.68, 0)
+ try_compare(2.674, 2.68, -1)
+ try_compare(-2.674, -2.68, 1)
+ try_compare(3, 2.68, 1)
+ try_compare(-3, -2.68, -1)
+ try_compare(0.01, 0, 1)
+ try_compare(-0.01, 0, -1)
+
+-
+ "Float precision tests: verify that float rounding methods are working correctly via tools"
+-
+ !python {model: res.currency}: |
+ from openerp.tools import float_compare, float_is_zero, float_round, float_repr
- def try_round(amount, expected, precision_digits=3, float_round=float_round, float_repr=float_repr):
- result = float_repr(float_round(amount, precision_digits=precision_digits),
++ def try_round(amount, expected, precision_digits=3, float_round=float_round, float_repr=float_repr, rounding_method='HALF-UP'):
++ result = float_repr(float_round(amount, precision_digits=precision_digits, rounding_method=rounding_method),
+ precision_digits=precision_digits)
+ assert result == expected, 'Rounding error: got %s, expected %s' % (result, expected)
+ try_round(2.6745, '2.675')
+ try_round(-2.6745, '-2.675')
+ try_round(2.6744, '2.674')
+ try_round(-2.6744, '-2.674')
+ try_round(0.0004, '0.000')
+ try_round(-0.0004, '-0.000')
+ try_round(357.4555, '357.456')
+ try_round(-357.4555, '-357.456')
+ try_round(457.4554, '457.455')
+ try_round(-457.4554, '-457.455')
+
++ # Try some rounding value with rounding method UP instead of HALF-UP
++ # We use 8.175 because when normalizing 8.175 with precision_digits=3 it gives
++ # us 8175,0000000001234 as value, and if not handle correctly the rounding UP
++ # value will be incorrect (should be 8,175 and not 8,176)
++ try_round(8.175, '8.175', rounding_method='UP')
++ try_round(8.1751, '8.176', rounding_method='UP')
++ try_round(-8.175, '-8.175', rounding_method='UP')
++ try_round(-8.1751, '-8.175', rounding_method='UP')
++
+ # Extended float range test, inspired by Cloves Almeida's test on bug #882036.
+ fractions = [.0, .015, .01499, .675, .67499, .4555, .4555, .45555]
+ expecteds = ['.00', '.02', '.01', '.68', '.67', '.46', '.456', '.4556']
+ precisions = [2, 2, 2, 2, 2, 2, 3, 4]
+ # Note: max precision for double floats is 53 bits of precision or
+ # 17 significant decimal digits
+ for magnitude in range(7):
+ for i in xrange(len(fractions)):
+ frac, exp, prec = fractions[i], expecteds[i], precisions[i]
+ for sign in [-1,1]:
+ for x in xrange(0,10000,97):
+ n = x * 10**magnitude
+ f = sign * (n + frac)
+ f_exp = ('-' if f != 0 and sign == -1 else '') + str(n) + exp
+ try_round(f, f_exp, precision_digits=prec)
+
+
+ def try_zero(amount, expected, float_is_zero=float_is_zero):
+ assert float_is_zero(amount, precision_digits=3) == expected, "Rounding error: %s should be zero!" % amount
+ try_zero(0.0002, True)
+ try_zero(-0.0002, True)
+ try_zero(0.00034, True)
+ try_zero(0.0005, False)
+ try_zero(-0.0005, False)
+ try_zero(0.0008, False)
+ try_zero(-0.0008, False)
+
+ def try_compare(amount1, amount2, expected, float_compare=float_compare):
+ assert float_compare(amount1, amount2, precision_digits=3) == expected, \
+ "Rounding error, compare_amounts(%s,%s) should be %s" % (amount1, amount2, expected)
+ try_compare(0.0003, 0.0004, 0)
+ try_compare(-0.0003, -0.0004, 0)
+ try_compare(0.0002, 0.0005, -1)
+ try_compare(-0.0002, -0.0005, 1)
+ try_compare(0.0009, 0.0004, 1)
+ try_compare(-0.0009, -0.0004, -1)
+ try_compare(557.4555, 557.4556, 0)
+ try_compare(-557.4555, -557.4556, 0)
+ try_compare(657.4444, 657.445, -1)
+ try_compare(-657.4444, -657.445, 1)
+
+ # Rounding to unusual rounding units (e.g. coin values)
+ def try_round(amount, expected, precision_rounding=None, float_round=float_round, float_repr=float_repr):
+ result = float_repr(float_round(amount, precision_rounding=precision_rounding),
+ precision_digits=2)
+ assert result == expected, 'Rounding error: got %s, expected %s' % (result, expected)
+ try_round(-457.4554, '-457.45', precision_rounding=0.05)
+ try_round(457.444, '457.50', precision_rounding=0.5)
+ try_round(457.3, '455.00', precision_rounding=5)
+ try_round(457.5, '460.00', precision_rounding=5)
+ try_round(457.1, '456.00', precision_rounding=3)
+
+-
+ "Float precision tests: check that proper rounding is performed for float persistence"
+-
+ !python {model: res.currency}: |
+ currency = self.browse(cr, uid, ref('base.EUR'))
+ res_currency_rate = self.pool.get('res.currency.rate')
+ from openerp.tools import float_compare, float_is_zero, float_round, float_repr
+ def try_roundtrip(value, expected, self=self, cr=cr, currency=currency,
+ res_currency_rate=res_currency_rate):
+ rate_id = res_currency_rate.create(cr, 1, {'name':'2000-01-01',
+ 'rate': value,
+ 'currency_id': currency.id})
+ rate = res_currency_rate.read(cr, 1, [rate_id], ['rate'])[0]['rate']
+ assert rate == expected, 'Roundtrip error: got %s back from db, expected %s' % (rate, expected)
+ # res.currency.rate uses 6 digits of precision by default
+ try_roundtrip(2.6748955, 2.674896)
+ try_roundtrip(-2.6748955, -2.674896)
+ try_roundtrip(10000.999999, 10000.999999)
+ try_roundtrip(-10000.999999, -10000.999999)
+
+-
+ "Float precision tests: verify that invalid parameters are forbidden"
+-
+ !python {model: res.currency}: |
+ from openerp.tools import float_compare, float_is_zero, float_round
+ try:
+ float_is_zero(0.01, precision_digits=3, precision_rounding=0.01)
+ except AssertionError:
+ pass
+ try:
+ float_compare(0.01, 0.02, precision_digits=3, precision_rounding=0.01)
+ except AssertionError:
+ pass
+ try:
+ float_round(0.01, precision_digits=3, precision_rounding=0.01)
+ except AssertionError:
+ pass
+-
+ Test res.groups name search
+-
+ !python {model: res.groups}: |
+ all_groups = self.search(cr, uid, [])
+ full_names = [(group.id, group.full_name) for group in self.browse(cr, uid, all_groups)]
+ group_ids = self.search(cr, uid, [('full_name', 'like', '%Sale%')])
+ assert set(group_ids) == set([id for (id, full_name) in full_names if 'Sale' in full_name]), "did not match search for 'Sale'"
+ group_ids = self.search(cr, uid, [('full_name', 'like', '%Technical%')])
+ assert set(group_ids) == set([id for (id, full_name) in full_names if 'Technical' in full_name]), "did not match search for 'Technical'"
+ group_ids = self.search(cr, uid, [('full_name', 'like', '%Sales /%')])
+ assert set(group_ids) == set([id for (id, full_name) in full_names if 'Sales /' in full_name]), "did not match search for 'Sales /'"
+ group_ids = self.search(cr, uid, [('full_name', 'in', ['Administration / Access Rights','Contact Creation'])])
+ assert group_ids, "did not match search for 'Administration / Access Rights' and 'Contact Creation'"