1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
24 import openerp.netsvc as netsvc
26 import openerp.tools as tools
27 from openerp.tools.safe_eval import safe_eval as eval
30 from interface import report_int
32 from openerp.osv.osv import except_osv
33 from openerp.osv.orm import browse_null
34 from openerp.osv.orm import browse_record_list
35 import openerp.pooler as pooler
39 from lxml import etree
40 from openerp.tools.translate import _
42 class external_pdf(render.render):
43 def __init__(self, pdf):
44 render.render.__init__(self)
46 self.output_type='pdf'
53 #TODO: devrait heriter de report_rml a la place de report_int
54 # -> pourrait overrider que create_xml a la place de tout create
55 # heuu, ca marche pas ds tous les cas car graphs sont generes en pdf directment
56 # par pychart, et on passe donc pas par du rml
57 class report_custom(report_int):
58 def __init__(self, name):
59 report_int.__init__(self, name)
62 # fields = [['address','city'],['name'], ['zip']]
63 # conditions = [[('zip','==','3'),(,)],(,),(,)] #same structure as fields
64 # row_canvas = ['Rue', None, None]
66 # [ ['ville','name','zip'] ]
68 def _row_get(self, cr, uid, objs, fields, conditions, row_canvas=None, group_by=None):
73 for cond in conditions:
76 temp = c[0](eval('obj.'+c[1],{'obj': obj}))
77 if not eval('\''+temp+'\''+' '+c[2]+' '+'\''+str(c[3])+'\''):
83 for i in range(len(fields)):
85 row.append(row_canvas and row_canvas[i])
88 elif len(fields[i])==1:
89 if not isinstance(obj, browse_null):
90 row.append(str(eval('obj.'+fields[i][0],{'obj': obj})))
95 levels[fields[i][0]]=True
99 # Process group_by data first
101 if group_by != None and fields[group_by] != None:
102 if fields[group_by][0] in levels.keys():
103 key.append(fields[group_by][0])
104 for l in levels.keys():
105 if l != fields[group_by][0]:
110 objs = eval('obj.'+l,{'obj': obj})
111 if not isinstance(objs, (browse_record_list, list)):
115 for f in range(len(fields)):
116 if (fields[f] and fields[f][0])==l:
117 field_new.append(fields[f][1:])
118 cond_new.append(conditions[f][1:])
120 field_new.append(None)
121 cond_new.append(None)
123 result += self._row_get(cr, uid, objs, field_new, cond_new, row, group_by)
129 def create(self, cr, uid, ids, datas, context=None):
132 self.pool = pooler.get_pool(cr.dbname)
133 report = self.pool.get('ir.report.custom').browse(cr, uid, [datas['report_id']])[0]
134 datas['model'] = report.model_id.model
136 ids = self.pool.get(report.model_id.model).search(cr, uid, [])
139 report_id = datas['report_id']
140 report = self.pool.get('ir.report.custom').read(cr, uid, [report_id], context=context)[0]
141 fields = self.pool.get('ir.report.custom.fields').read(cr, uid, report['fields_child0'], context=context)
143 fields.sort(lambda x,y : x['sequence'] - y['sequence'])
145 if report['field_parent']:
146 parent_field = self.pool.get('ir.model.fields').read(cr, uid, [report['field_parent'][0]], ['model'])
147 model_name = self.pool.get('ir.model').read(cr, uid, [report['model_id'][0]], ['model'], context=context)[0]['model']
150 fct['id'] = lambda x : x
151 fct['gety'] = lambda x: x.split('-')[0]
152 fct['in'] = lambda x: x.split(',')
159 field_child = f['field_child'+str(i)]
162 self.pool.get('ir.model.fields').read(cr, uid, [field_child[0]], ['name'], context=context)[0]['name']
164 if f['fc'+str(i)+'_operande']:
166 cond_op = f['fc'+str(i)+'_op']
167 if len(f['fc'+str(i)+'_op'].split(',')) == 2:
168 cond_op = f['fc'+str(i)+'_op'].split(',')[1]
169 fct_name = f['fc'+str(i)+'_op'].split(',')[0]
170 cond.append((fct[fct_name], f['fc'+str(i)+'_operande'][1], cond_op, f['fc'+str(i)+'_condition']))
173 new_fields.append(row)
174 new_cond.append(cond)
175 objs = self.pool.get(model_name).browse(cr, uid, ids)
187 if report['field_parent']:
189 def build_tree(obj, level, depth):
190 res = self._row_get(cr, uid,[obj], new_fields, new_cond)
192 new_obj = eval('obj.'+report['field_parent'][1],{'obj': obj})
193 if not isinstance(new_obj, list) :
196 if not isinstance(o, browse_null):
197 res += build_tree(o, level, depth+1)
201 results += build_tree(obj, level, 0)
203 results = self._row_get(cr, uid,objs, new_fields, new_cond, group_by=groupby)
206 'calc_sum': lambda l: reduce(lambda x,y: float(x)+float(y), filter(None, l), 0),
207 'calc_avg': lambda l: reduce(lambda x,y: float(x)+float(y), filter(None, l), 0) / (len(filter(None, l)) or 1.0),
208 'calc_max': lambda l: reduce(lambda x,y: max(x,y), [(i or 0.0) for i in l], 0),
209 'calc_min': lambda l: reduce(lambda x,y: min(x,y), [(i or 0.0) for i in l], 0),
210 'calc_count': lambda l: len(filter(None, l)),
211 'False': lambda l: '\r\n'.join(filter(None, l)),
212 'groupby': lambda l: reduce(lambda x,y: x or y, l)
220 if not line[groupby] and prev in res_dic:
221 res_dic[prev].append(line)
224 if res_dic.has_key(line[groupby]):
225 res_dic[line[groupby]].append(line)
227 res_dic[line[groupby]] = []
228 res_dic[line[groupby]].append(line)
229 #we use the keys in results since they are ordered, whereas in res_dic.heys() they aren't
230 for key in filter(None, [x[groupby] for x in results]):
232 for col in range(len(fields)):
234 row.append(fct['groupby'](map(lambda x: x[col], res_dic[key])))
236 row.append(fct[str(fields[col]['operation'])](map(lambda x: x[col], res_dic[key])))
240 if report['type']=='table':
241 if report['field_parent']:
242 res = self._create_tree(uid, ids, report, fields, level, results, context)
245 for idx in range(len(fields)):
246 if fields[idx]['name'] == report['sortby']:
250 results.sort(lambda x,y : cmp(float(x[sort_idx]),float(y[sort_idx])))
252 results.sort(lambda x,y : cmp(x[sort_idx],y[sort_idx]))
254 results = results[:int(report['limitt'])]
255 res = self._create_table(uid, ids, report, fields, None, results, context)
256 elif report['type'] in ('pie','bar', 'line'):
261 for j in range(len(r)):
262 if j == 0 and not r[j]:
264 elif j == 0 and r[j]:
269 row.append(float(r[j]))
273 if report['type']=='pie':
274 res = self._create_pie(cr,uid, ids, report, fields, results2, context)
275 elif report['type']=='bar':
276 res = self._create_bars(cr,uid, ids, report, fields, results2, context)
277 elif report['type']=='line':
278 res = self._create_lines(cr,uid, ids, report, fields, results2, context)
279 return (self.obj.get(), 'pdf')
281 def _create_tree(self, uid, ids, report, fields, level, results, context):
282 pageSize=common.pageSize.get(report['print_format'], [210.0,297.0])
283 if report['print_orientation']=='landscape':
284 pageSize=[pageSize[1],pageSize[0]]
286 new_doc = etree.Element('report')
288 config = etree.SubElement(new_doc, 'config')
290 def _append_node(name, text):
291 n = etree.SubElement(config, name)
294 _append_node('date', time.strftime('%d/%m/%Y'))
295 _append_node('PageFormat', '%s' % report['print_format'])
296 _append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
297 _append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
298 _append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
300 length = pageSize[0]-30-reduce(lambda x,y:x+(y['width'] or 0), fields, 0)
303 if not f['width']: count+=1
306 f['width']=round((float(length)/count)-0.5)
308 _append_node('tableSize', '%s' % ','.join(map(lambda x: '%.2fmm' % (x['width'],), fields)))
309 _append_node('report-header', '%s' % (report['title'],))
310 _append_node('report-footer', '%s' % (report['footer'],))
312 header = etree.SubElement(new_doc, 'header')
314 field = etree.SubElement(header, 'field')
315 field.text = f['name']
317 lines = etree.SubElement(new_doc, 'lines')
321 node_line = etree.SubElement(lines, 'row')
323 for f in range(len(fields)):
324 col = etree.SubElement(node_line, 'col')
326 col.attrib.update(para='yes',
328 space=str(3*shift)+'mm')
330 col.text = prefix+str(line[f]) or ''
335 transform = etree.XSLT(
336 etree.parse(os.path.join(tools.config['root_path'],
337 'addons/base/report/custom_new.xsl')))
338 rml = etree.tostring(transform(new_doc))
340 self.obj = render.rml(rml)
345 def _create_lines(self, cr, uid, ids, report, fields, results, context):
346 pool = pooler.get_pool(cr.dbname)
347 pdf_string = cStringIO.StringIO()
348 can = canvas.init(fname=pdf_string, format='pdf')
350 can.show(80,380,'/16/H'+report['title'])
352 ar = area.T(size=(350,350),
353 #x_coord = category_coord.T(['2005-09-01','2005-10-22'],0),
354 x_axis = axis.X(label = fields[0]['name'], format="/a-30{}%s"),
355 y_axis = axis.Y(label = ', '.join(map(lambda x : x['name'], fields[1:]))))
358 process_date['D'] = lambda x : reduce(lambda xx,yy : xx+'-'+yy,x.split('-')[1:3])
359 process_date['M'] = lambda x : x.split('-')[1]
360 process_date['Y'] = lambda x : x.split('-')[0]
363 order_date['D'] = lambda x : time.mktime((2005,int(x.split('-')[0]), int(x.split('-')[1]),0,0,0,0,0,0))
364 order_date['M'] = lambda x : x
365 order_date['Y'] = lambda x : x
374 field_id = (f['field_child3'] and f['field_child3'][0]) or (f['field_child2'] and f['field_child2'][0]) or (f['field_child1'] and f['field_child1'][0]) or (f['field_child0'] and f['field_child0'][0])
376 type = pool.get('ir.model.fields').read(cr, uid, [field_id],['ttype'])
377 if type[0]['ttype'] == 'date':
379 fct[idx] = process_date[report['frequency']]
381 fct[idx] = lambda x : x
383 fct[idx] = lambda x : x
386 # plots are usually displayed year by year
387 # so we do so if the first field is a date
391 key = process_date['Y'](r[date_idx])
392 if not data_by_year.has_key(key):
393 data_by_year[key] = []
394 for i in range(len(r)):
396 data_by_year[key].append(r)
398 data_by_year[''] = results
401 nb_bar = len(data_by_year)*(len(fields)-1)
402 colors = map(lambda x:line_style.T(color=x), misc.choice_colors(nb_bar))
404 for line in data_by_year.keys():
406 # sum data and save it in a list. An item for a fields
407 for d in data_by_year[line]:
408 for idx in range(len(fields)-1):
409 fields_bar.append({})
410 if fields_bar[idx].has_key(d[0]):
411 fields_bar[idx][d[0]] += d[idx+1]
413 fields_bar[idx][d[0]] = d[idx+1]
414 for idx in range(len(fields)-1):
416 for k in fields_bar[idx].keys():
418 data[k] += fields_bar[idx][k]
420 data[k] = fields_bar[idx][k]
425 # cumulate if necessary
427 data_cum.append([k, float(data[k])+float(prev)])
428 if fields[idx+1]['cumulate']:
431 plot = line_plot.T(label=fields[idx+1]['name']+' '+str(line), data = data_cum, line_style=colors[idx0*(len(fields)-1)+idx])
433 abscissa.update(fields_bar[idx])
436 abscissa = map(lambda x : [x, None], abscissa)
437 ar.x_coord = category_coord.T(abscissa,0)
441 self.obj = external_pdf(pdf_string.getvalue())
448 def _create_bars(self, cr, uid, ids, report, fields, results, context):
449 pool = pooler.get_pool(cr.dbname)
450 pdf_string = cStringIO.StringIO()
451 can = canvas.init(fname=pdf_string, format='pdf')
453 can.show(80,380,'/16/H'+report['title'])
456 process_date['D'] = lambda x : reduce(lambda xx,yy : xx+'-'+yy,x.split('-')[1:3])
457 process_date['M'] = lambda x : x.split('-')[1]
458 process_date['Y'] = lambda x : x.split('-')[0]
461 order_date['D'] = lambda x : time.mktime((2005,int(x.split('-')[0]), int(x.split('-')[1]),0,0,0,0,0,0))
462 order_date['M'] = lambda x : x
463 order_date['Y'] = lambda x : x
465 ar = area.T(size=(350,350),
466 x_axis = axis.X(label = fields[0]['name'], format="/a-30{}%s"),
467 y_axis = axis.Y(label = ', '.join(map(lambda x : x['name'], fields[1:]))))
473 field_id = (f['field_child3'] and f['field_child3'][0]) or (f['field_child2'] and f['field_child2'][0]) or (f['field_child1'] and f['field_child1'][0]) or (f['field_child0'] and f['field_child0'][0])
475 type = pool.get('ir.model.fields').read(cr, uid, [field_id],['ttype'])
476 if type[0]['ttype'] == 'date':
478 fct[idx] = process_date[report['frequency']]
480 fct[idx] = lambda x : x
482 fct[idx] = lambda x : x
485 # plot are usually displayed year by year
486 # so we do so if the first field is a date
490 key = process_date['Y'](r[date_idx])
491 if not data_by_year.has_key(key):
492 data_by_year[key] = []
493 for i in range(len(r)):
495 data_by_year[key].append(r)
497 data_by_year[''] = results
500 nb_bar = len(data_by_year)*(len(fields)-1)
501 colors = map(lambda x:fill_style.Plain(bgcolor=x), misc.choice_colors(nb_bar))
504 for line in data_by_year.keys():
506 # sum data and save it in a list. An item for a fields
507 for d in data_by_year[line]:
508 for idx in range(len(fields)-1):
509 fields_bar.append({})
510 if fields_bar[idx].has_key(d[0]):
511 fields_bar[idx][d[0]] += d[idx+1]
513 fields_bar[idx][d[0]] = d[idx+1]
514 for idx in range(len(fields)-1):
516 for k in fields_bar[idx].keys():
518 data[k] += fields_bar[idx][k]
520 data[k] = fields_bar[idx][k]
525 # cumulate if necessary
527 data_cum.append([k, float(data[k])+float(prev)])
528 if fields[idx+1]['cumulate']:
532 plot = bar_plot.T(label=fields[idx+1]['name']+' '+str(line), data = data_cum, cluster=(idx0*(len(fields)-1)+idx,nb_bar), fill_style=colors[idx0*(len(fields)-1)+idx])
534 abscissa.update(fields_bar[idx])
536 abscissa = map(lambda x : [x, None], abscissa)
538 ar.x_coord = category_coord.T(abscissa,0)
542 self.obj = external_pdf(pdf_string.getvalue())
547 def _create_pie(self, cr, uid, ids, report, fields, results, context):
548 pdf_string = cStringIO.StringIO()
549 can = canvas.init(fname=pdf_string, format='pdf')
550 ar = area.T(size=(350,350), legend=legend.T(),
551 x_grid_style = None, y_grid_style = None)
552 colors = map(lambda x:fill_style.Plain(bgcolor=x), misc.choice_colors(len(results)))
554 if reduce(lambda x,y : x+y, map(lambda x : x[1],results)) == 0.0:
555 raise except_osv(_('Error'), _("The sum of the data (2nd field) is null.\nWe can't draw a pie chart !"))
557 plot = pie_plot.T(data=results, arc_offsets=[0,10,0,10],
558 shadow = (2, -2, fill_style.gray50),
560 arrow_style = arrow.a3,
565 self.obj = external_pdf(pdf_string.getvalue())
570 def _create_table(self, uid, ids, report, fields, tree, results, context):
571 pageSize=common.pageSize.get(report['print_format'], [210.0,297.0])
572 if report['print_orientation']=='landscape':
573 pageSize=[pageSize[1],pageSize[0]]
575 new_doc = etree.Element('report')
576 config = etree.SubElement(new_doc, 'config')
578 def _append_node(name, text):
579 n = etree.SubElement(config, name)
582 _append_node('date', time.strftime('%d/%m/%Y'))
583 _append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
584 _append_node('PageFormat', '%s' % report['print_format'])
585 _append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
586 _append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
588 length = pageSize[0]-30-reduce(lambda x,y:x+(y['width'] or 0), fields, 0)
591 if not f['width']: count+=1
594 f['width']=round((float(length)/count)-0.5)
596 _append_node('tableSize', '%s' % ','.join(map(lambda x: '%.2fmm' % (x['width'],), fields)))
597 _append_node('report-header', '%s' % (report['title'],))
598 _append_node('report-footer', '%s' % (report['footer'],))
600 header = etree.SubElement(new_doc, 'header')
602 field = etree.SubElement(header, 'field')
603 field.text = f['name']
605 lines = etree.SubElement(new_doc, 'lines')
607 node_line = etree.SubElement(lines, 'row')
608 for f in range(len(fields)):
609 col = etree.SubElement(node_line, 'col', tree='no')
611 col.text = line[f] or ''
615 transform = etree.XSLT(
616 etree.parse(os.path.join(tools.config['root_path'],
617 'addons/base/report/custom_new.xsl')))
618 rml = etree.tostring(transform(new_doc))
620 self.obj = render.rml(rml)
623 report_custom('report.custom')
626 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: