fp.close()
return data
-
class Reports(View):
_cp_path = "/web/report"
+ POLLING_DELAY = 0.25
+ TYPES_MAPPING = {
+ 'doc': 'application/vnd.ms-word',
+ 'html': 'text/html',
+ 'odt': 'application/vnd.oasis.opendocument.text',
+ 'pdf': 'application/pdf',
+ 'sxw': 'application/vnd.sun.xml.writer',
+ 'xls': 'application/vnd.ms-excel',
+ }
- @openerpweb.jsonrequest
- def get_report(self, req, action):
- report_srv = req.session.proxy("report")
- context = req.session.eval_context(openerpweb.nonliterals.CompoundContext(req.context, \
- action["context"]))
+ @openerpweb.httprequest
+ def index(self, req, action, token):
+ action = simplejson.loads(action)
- args = [req.session._db, req.session._uid, req.session._password, action["report_name"], context["active_ids"], {"id": context["active_id"], "model": context["active_model"], "report_type": action["report_type"]}, context]
- report_id = report_srv.report(*args)
- report = None
+ report_srv = req.session.proxy("report")
+ context = req.session.eval_context(
+ openerpweb.nonliterals.CompoundContext(
+ req.context or {}, action[ "context"]))
+
+ report_data = {"id": context["active_id"], "model": context["active_model"]}
+ if 'report_type' in action:
+ report_data['report_type'] = action['report_type']
+ report_id = report_srv.report(
+ req.session._db, req.session._uid, req.session._password,
+ action["report_name"], context["active_ids"],
+ report_data, context)
+
+ report_struct = None
while True:
- args2 = [req.session._db, req.session._uid, req.session._password, report_id]
- report = report_srv.report_get(*args2)
- if report["state"]:
+ report_struct = report_srv.report_get(
+ req.session._db, req.session._uid, req.session._password, report_id)
+ if report_struct["state"]:
break
+
- time.sleep(_REPORT_POLLER_DELAY)
+ time.sleep(self.POLLING_DELAY)
+
+ report = base64.b64decode(report_struct['result'])
+ if report_struct.get('code') == 'zlib':
+ report = zlib.decompress(report)
+ report_mimetype = self.TYPES_MAPPING.get(
+ report_struct['format'], 'octet-stream')
+ return req.make_response(report,
+ headers=[
+ ('Content-Disposition', 'attachment; filename="%s.%s"' % (action['report_name'], report_struct['format'])),
+ ('Content-Type', report_mimetype),
+ ('Content-Length', len(report))],
+ cookies={'fileToken': int(token)})
+
- #TODO: ok now we've got the report, and so what?
- return False
+
+class Import(View):
+ _cp_path = "/web/import"
+
+ def fields_get(self, req, model):
+ Model = req.session.model(model)
+ fields = Model.fields_get(False, req.session.eval_context(req.context))
+ return fields
+
+ @openerpweb.httprequest
+ def detect_data(self, req, csvfile, csvsep, csvdel, csvcode, csvskip, **params):
+
+ _fields = {}
+ _fields_invert = {}
+ req_field = []
+ error = None
+ fields = req.session.model(params.get('model')).fields_get(False, req.session.eval_context(req.context))
+ fields.update({'id': {'string': 'ID'}, '.id': {'string': 'Database ID'}})
+
+ for field in fields:
+ value = fields[field]
+ if value.get('required'):
+ req_field.append(field)
+
+ def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2):
+ def str_comp(x,y):
+ if x<y: return 1
+ elif x>y: return -1
+ else: return 0
+
+ fields_order = fields.keys()
+ fields_order.sort(lambda x,y: str_comp(fields[x].get('string', ''), fields[y].get('string', '')))
+ for field in fields_order:
+ if (fields[field].get('type','') not in ('reference',))\
+ and (not fields[field].get('readonly')\
+ or not dict(fields[field].get('states', {}).get(
+ 'draft', [('readonly', True)])).get('readonly',True)):
+
+ st_name = prefix_value+fields[field]['string'] or field
+ _fields[prefix_node+field] = st_name
+ _fields_invert[st_name] = prefix_node+field
+
+ if fields[field].get('type')=='one2many' and level>0:
+ fields2 = self.fields_get(req, fields[field]['relation'])
+ model_populate(fields2, prefix_node+field+'/', None, st_name+'/', level-1)
+
+ if fields[field].get('relation',False) and level>0:
+ model_populate({'/id': {'type': 'char', 'string': 'ID'}, '.id': {'type': 'char', 'string': 'Database ID'}},
+ prefix_node+field, None, st_name+'/', level-1)
+ fields.update({'id':{'string':'ID'},'.id':{'string':'Database ID'}})
+ model_populate(fields)
+ all_fields = fields.keys()
+ all_fields.sort()
+
+ try:
+ data = csv.reader(csvfile, quotechar=str(csvdel), delimiter=str(csvsep))
+ except:
+ error={'message': 'error opening .CSV file. Input Error.'}
+ return simplejson.dumps({'error':error})
+
+ records = []
+ count = 0
+ header_fields = []
+ word=''
+
+ try:
+ for rec in itertools.islice(data,0,4):
+ records.append(rec)
+
+ headers = itertools.islice(records,1)
+ line = headers.next()
+
+ for word in line:
+ word = str(word.decode(csvcode))
+ if word in _fields:
+ header_fields.append((word, _fields[word]))
+ elif word in _fields_invert.keys():
+ header_fields.append((_fields_invert[word], word))
+ else:
+ count = count + 1
+ header_fields.append((word, word))
+
+ if len(line) == count:
+ error = {'message':"File has not any column header."}
+ except:
+ error = {'message':('Error processing the first line of the file. Field "%s" is unknown') % (word,)}
+
+ if error:
+ csvfile.seek(0)
+ error=dict(error, preview=csvfile.read(200))
+ return simplejson.dumps({'error':error})
+
+ return simplejson.dumps({'records':records[1:],'header':header_fields,'all_fields':all_fields,'req_field':req_field})
+
+ @openerpweb.httprequest
+ def import_data(self, req, csvfile, csvsep, csvdel, csvcode, csvskip, **params):
+
+ _fields = {}
+ _fields_invert = {}
+ prefix_node=''
+ prefix_value = ''
+
+ context = req.session.eval_context(req.context)
+ modle_obj = req.session.model(params.get('model'))
+ res = None
+
+ limit = 0
+ data = []
+
+ if not (csvdel and len(csvdel) == 1):
+ error={'message': "The CSV delimiter must be a single character"}
+ return simplejson.dumps({'error':error})
+
+ try:
+ data_record = csv.reader(csvfile, quotechar=str(csvdel), delimiter=str(csvsep))
+ for rec in itertools.islice(data_record,0,None):
+ data.append(rec)
+
+ headers = itertools.islice(data,1)
+ fields = headers.next()
+
+ except csv.Error, e:
+ error={'message': str(e),'title': 'File Format Error'}
+ return simplejson.dumps({'error':error})
+
+ datas = []
+ ctx = context
+
+ if not isinstance(fields, list):
+ fields = [fields]
+
+ flds = modle_obj.fields_get(False, req.session.eval_context(req.context))
+ flds.update({'id':{'string':'ID'},'.id':{'string':'Database ID'}})
+ fields_order = flds.keys()
+ for field in fields_order:
+ st_name = prefix_value+flds[field]['string'] or field
+ _fields[prefix_node+field] = st_name
+ _fields_invert[st_name] = prefix_node+field
+
+ unmatch_field = []
+ for fld in fields:
+ if ((fld not in _fields) and (fld not in _fields_invert)):
+ unmatch_field.append(fld)
+
+ if unmatch_field:
+ error = {'message':("You cannot import the fields '%s',because we cannot auto-detect it." % (unmatch_field))}
+ return simplejson.dumps({'error':error})
+
+ for line in data[1:]:
+ try:
+ datas.append(map(lambda x:x.decode(csvcode).encode('utf-8'), line))
+ except:
+ datas.append(map(lambda x:x.decode('latin').encode('utf-8'), line))
+
+ # If the file contains nothing,
+ if not datas:
+ error = {'message': 'The file is empty !', 'title': 'Importation !'}
+ return simplejson.dumps({'error':error})
+
+ #Inverting the header into column names
+ try:
+ res = modle_obj.import_data(fields, datas, 'init', '', False, ctx)
+ except xmlrpclib.Fault, e:
+ error = {"message":e.faultCode}
+ return simplejson.dumps({'error':error})
+
+ if res[0]>=0:
+ success={'message':'Imported %d objects' % (res[0],)}
+ return simplejson.dumps({'success':success})
+
+ d = ''
+ for key,val in res[1].items():
+ d+= ('%s: %s' % (str(key),str(val)))
+ msg = 'Error trying to import this record:%s. ErrorMessage:%s %s' % (d,res[2],res[3])
+ error = {'message':str(msg), 'title':'ImportationError'}
- return simplejson.dumps({'error':error})
++ return simplejson.dumps({'error':error})