1 # -*- encoding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 ##############################################################################
29 from psycopg2 import Binary
32 from tools.translate import _
33 from tools import config
34 from tools.safe_eval import safe_eval as eval
36 from osv import osv, fields
37 from osv.orm import except_orm
42 from content_index import content_index
46 d = [random.choice(string.ascii_letters) for x in xrange(10) ]
51 # Unsupported WebDAV Commands:
60 # An object that represent an uri
61 # path: the uri of the object
62 # content: the Content it belongs to (_print.pdf)
63 # type: content or collection
64 # content: objct = res.partner
65 # collection: object = directory, object2 = res.partner
66 # file: objct = ir.attachement
67 # root: if we are at the first directory of a ressource
69 INVALID_CHARS={'*':str(hash('*')), '|':str(hash('|')) , "\\":str(hash("\\")), '/':'__', ':':str(hash(':')), '"':str(hash('"')), '<':str(hash('<')) , '>':str(hash('>')) , '?':str(hash('?'))}
72 class node_class(object):
73 def __init__(self, cr, uid, path, object, object2=False, context=None, content=False, type='collection', root=False):
78 self.object2 = object2
79 self.context = context
80 if self.context is None:
82 self.content = content
86 def _file_get(self, nodename=False):
89 pool = pooler.get_pool(self.cr.dbname)
90 fobj = pool.get('ir.attachment')
94 where.append( ('res_model','=',self.object2._name) )
95 where.append( ('res_id','=',self.object2.id) )
97 where.append( ('parent_id','=',self.object.id) )
98 where.append( ('res_id','=',False) )
100 where.append( (fobj._rec_name,'=',nodename) )
101 for content in self.object.content_ids:
102 if self.object2 or not content.include_name:
103 if content.include_name:
104 content_name = self.object2.name
105 obj = pool.get(self.object.ressource_type_id.model)
106 name_for = obj._name.split('.')[-1]
107 if content_name and content_name.find(name_for) == 0 :
108 content_name = content_name.replace(name_for,'')
109 test_nodename = content_name + (content.suffix or '') + (content.extension or '')
111 test_nodename = (content.suffix or '') + (content.extension or '')
112 if test_nodename.find('/'):
113 test_nodename=test_nodename.replace('/', '_')
114 path = self.path+'/'+test_nodename
116 n = node_class(self.cr, self.uid,path, self.object2, False, context=self.context, content=content, type='content', root=False)
119 if nodename == test_nodename:
120 n = node_class(self.cr, self.uid, path, self.object2, False, context=self.context, content=content, type='content', root=False)
123 ids = fobj.search(self.cr, self.uid, where+[ ('parent_id','=',self.object and self.object.id or False) ])
124 if self.object and self.root and (self.object.type=='ressource'):
125 ids += fobj.search(self.cr, self.uid, where+[ ('parent_id','=',False) ])
126 res = fobj.browse(self.cr, self.uid, ids, context=self.context)
127 return map(lambda x: node_class(self.cr, self.uid, self.path+'/'+eval('x.'+fobj._rec_name, {'x' : x}), x, False, context=self.context, type='file', root=False), res) + res2
129 def get_translation(self,value,lang):
131 #TODO : to get translation term
134 def directory_list_for_child(self,nodename,parent=False):
135 pool = pooler.get_pool(self.cr.dbname)
138 nodename = self.get_translation(nodename, self.context['lang'])
139 where.append(('name','=',nodename))
140 if (self.object and self.object.type=='directory') or not self.object2:
141 where.append(('parent_id','=',self.object and self.object.id or False))
143 where.append(('parent_id','=',False))
145 where.append(('ressource_parent_type_id','=',self.object.ressource_type_id.id))
147 where.append(('ressource_parent_type_id','=',False))
149 ids = pool.get('document.directory').search(self.cr, self.uid, where+[('ressource_id','=',0)])
151 ids += pool.get('document.directory').search(self.cr, self.uid, where+[('ressource_id','=',self.object2.id)])
152 res = pool.get('document.directory').browse(self.cr, self.uid, ids, self.context)
155 def _child_get(self, nodename=False):
156 if self.type not in ('collection','database'):
158 res = self.directory_list_for_child(nodename)
159 result= map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name, x, x.type=='directory' and self.object2 or False, context=self.context, root=self.root), res)
160 if self.type=='database':
161 pool = pooler.get_pool(self.cr.dbname)
162 fobj = pool.get('ir.attachment')
163 vargs = [('parent_id','=',False),('res_id','=',False)]
165 vargs.append((fobj._rec_name,'=',nodename))
166 file_ids=fobj.search(self.cr,self.uid,vargs)
168 res = fobj.browse(self.cr, self.uid, file_ids, context=self.context)
169 result +=map(lambda x: node_class(self.cr, self.uid, self.path+'/'+eval('x.'+fobj._rec_name, {'x' : x}), x, False, context=self.context, type='file', root=self.root), res)
170 if self.type=='collection' and self.object.type=="ressource":
171 where = self.object.domain and eval(self.object.domain, {'active_id':self.root, 'uid':self.uid}) or []
172 pool = pooler.get_pool(self.cr.dbname)
173 obj = pool.get(self.object.ressource_type_id.model)
174 _dirname_field = obj._rec_name
175 if len(obj.fields_get(self.cr, self.uid, ['dirname'])):
176 _dirname_field = 'dirname'
178 name_for = obj._name.split('.')[-1]
179 if nodename and nodename.find(name_for) == 0 :
180 id = int(nodename.replace(name_for,''))
181 where.append(('id','=',id))
183 if nodename.find('__') :
184 nodename=nodename.replace('__','/')
185 for invalid in INVALID_CHARS:
186 if nodename.find(INVALID_CHARS[invalid]) :
187 nodename=nodename.replace(INVALID_CHARS[invalid],invalid)
188 nodename = self.get_translation(nodename, self.context['lang'])
189 where.append((_dirname_field,'=',nodename))
191 if self.object.ressource_tree:
192 if obj._parent_name in obj.fields_get(self.cr,self.uid):
193 where.append((obj._parent_name,'=',self.object2 and self.object2.id or False))
194 ids = obj.search(self.cr, self.uid, where)
195 res = obj.browse(self.cr, self.uid, ids,self.context)
196 result+= map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name.replace('/','__'), self.object, x, context=self.context, root=x.id), res)
206 ids = obj.search(self.cr, self.uid, where)
207 res = obj.browse(self.cr, self.uid, ids,self.context)
209 if len(obj.fields_get(self.cr, self.uid, [_dirname_field])):
210 r.name = eval('r.'+_dirname_field, {'r' : r})
214 r.name = name_for + '%d'%r.id
215 for invalid in INVALID_CHARS:
216 if r.name.find(invalid) :
217 r.name = r.name.replace(invalid,INVALID_CHARS[invalid])
218 result2 = map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name.replace('/','__'), self.object, x, context=self.context, root=x.id), res)
220 if self.object.ressource_tree:
227 return self._child_get() + self._file_get()
229 def child(self, name):
230 res = self._child_get(name)
233 res = self._file_get(name)
240 if self.path[0]=='/':
244 class document_directory(osv.osv):
245 _name = 'document.directory'
246 _description = 'Document directory'
248 'name': fields.char('Name', size=64, required=True, select=1),
249 'write_date': fields.datetime('Date Modified', readonly=True),
250 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
251 'create_date': fields.datetime('Date Created', readonly=True),
252 'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
253 'file_type': fields.char('Content Type', size=32),
254 'domain': fields.char('Domain', size=128, help="Use a domain if you want to apply an automatic filter on visible resources."),
255 'user_id': fields.many2one('res.users', 'Owner'),
256 'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
257 'parent_id': fields.many2one('document.directory', 'Parent Item'),
258 'child_ids': fields.one2many('document.directory', 'parent_id', 'Children'),
259 'file_ids': fields.one2many('ir.attachment', 'parent_id', 'Files'),
260 'content_ids': fields.one2many('document.directory.content', 'directory_id', 'Virtual Files'),
261 'type': fields.selection([('directory','Static Directory'),('ressource','Other Resources')], 'Type', required=True),
262 'ressource_type_id': fields.many2one('ir.model', 'Directories Mapped to Objects',
263 help="Select an object here and Open ERP will create a mapping for each of these " \
264 "objects, using the given domain, when browsing through FTP."),
265 'ressource_parent_type_id': fields.many2one('ir.model', 'Parent Model',
266 help="If you put an object here, this directory template will appear bellow all of these objects. " \
267 "Don't put a parent directory if you select a parent model."),
268 'ressource_id': fields.integer('Resource ID'),
269 'ressource_tree': fields.boolean('Tree Structure',
270 help="Check this if you want to use the same tree structure as the object selected in the system."),
273 'user_id': lambda self,cr,uid,ctx: uid,
274 'domain': lambda self,cr,uid,ctx: '[]',
275 'type': lambda *args: 'directory',
276 'ressource_id': lambda *a: 0
279 ('dirname_uniq', 'unique (name,parent_id,ressource_id,ressource_parent_type_id)', 'The directory name must be unique !')
282 def get_resource_path(self,cr,uid,dir_id,res_model,res_id):
283 # this method will be used in process module
284 # to be need test and Improvement if resource dir has parent resource (link resource)
286 def _parent(dir_id,path):
287 parent=self.browse(cr,uid,dir_id)
288 if parent.parent_id and not parent.ressource_parent_type_id:
289 _parent(parent.parent_id.id,path)
290 path.append(parent.name)
292 path.append(parent.name)
295 directory=self.browse(cr,uid,dir_id)
296 model_ids=self.pool.get('ir.model').search(cr,uid,[('model','=',res_model)])
300 path.append(self.pool.get(directory.ressource_type_id.model).browse(cr,uid,res_id).name)
301 user=self.pool.get('res.users').browse(cr,uid,uid)
302 return "ftp://%s:%s@localhost:%s/%s/%s"%(user.login,user.password,config.get('ftp_server_port',8021),cr.dbname,'/'.join(path))
305 def _check_recursion(self, cr, uid, ids):
308 cr.execute('SELECT DISTINCT parent_id FROM document_directory '\
309 'WHERE id in %s', (tuple(ids),))
310 ids = filter(None, map(lambda x:x[0], cr.fetchall()))
317 (_check_recursion, 'Error! You can not create recursive Directories.', ['parent_id'])
319 def __init__(self, *args, **kwargs):
320 res = super(document_directory, self).__init__(*args, **kwargs)
323 def onchange_content_id(self, cr, uid, ids, ressource_type_id):
326 def _get_childs(self, cr, uid, node, nodename=False, context=None):
329 nodename = self.get_translation(nodename, self.context['lang'])
330 where.append(('name','=',nodename))
332 where.append(('parent_id','=',object.id))
333 ids = self.search(cr, uid, where, context)
334 return self.browse(cr, uid, ids, context), False
338 uri: of the form "Sales Order/SO001"
341 object: the object.directory or object.directory.content
342 object2: the other object linked (if object.directory.content)
344 def get_object(self, cr, uid, uri, context=None):
347 #TODO : set user's context_lang in context
348 context.update({'lang':False})
350 return node_class(cr, uid, '', False, context=context, type='database')
352 if False and (turi in self._cache):
353 (path, oo, oo2, context, content,type,root) = self._cache[turi]
355 object = self.pool.get(oo[0]).browse(cr, uid, oo[1], context)
359 object2 = self.pool.get(oo2[0]).browse(cr, uid, oo2[1], context)
362 node = node_class(cr, uid, '/', False, context=context, type='database')
365 node = node_class(cr, uid, '/', False, context=context, type='database')
368 node = node.child(path)
371 oo = node.object and (node.object._name, node.object.id) or False
372 oo2 = node.object2 and (node.object2._name, node.object2.id) or False
373 self._cache[turi] = (node.path, oo, oo2, node.context, node.content,node.type,node.root)
376 def get_childs(self, cr, uid, uri, context=None):
377 node = self.get_object(cr, uid, uri, context)
379 children = node.children()
382 result = map(lambda node: node.path_get(), children)
383 #childs,object2 = self._get_childs(cr, uid, object, False, context)
384 #result = map(lambda x: urlparse.urljoin(path+'/',x.name), childs)
387 def copy(self, cr, uid, id, default=None, context=None):
390 name = self.read(cr, uid, [id])[0]['name']
391 default.update({'name': name+ " (copy)"})
392 return super(document_directory,self).copy(cr,uid,id,default,context)
394 def _check_duplication(self, cr, uid,vals,ids=[],op='create'):
395 name=vals.get('name',False)
396 parent_id=vals.get('parent_id',False)
397 ressource_parent_type_id=vals.get('ressource_parent_type_id',False)
398 ressource_id=vals.get('ressource_id',0)
400 for directory in self.browse(cr,uid,ids):
404 parent_id=directory.parent_id and directory.parent_id.id or False
405 if not ressource_parent_type_id:
406 ressource_parent_type_id=directory.ressource_parent_type_id and directory.ressource_parent_type_id.id or False
408 ressource_id=directory.ressource_id and directory.ressource_id or 0
409 res=self.search(cr,uid,[('id','<>',directory.id),('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id)])
413 res=self.search(cr,uid,[('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id)])
417 def write(self, cr, uid, ids, vals, context=None):
418 if not self._check_duplication(cr,uid,vals,ids,op='write'):
419 raise osv.except_osv(_('ValidateError'), _('Directory name must be unique!'))
420 return super(document_directory,self).write(cr,uid,ids,vals,context=context)
422 def create(self, cr, uid, vals, context=None):
423 if not self._check_duplication(cr,uid,vals):
424 raise osv.except_osv(_('ValidateError'), _('Directory name must be unique!'))
425 if vals.get('name',False) and (vals.get('name').find('/')+1 or vals.get('name').find('@')+1 or vals.get('name').find('$')+1 or vals.get('name').find('#')+1) :
426 raise osv.except_osv(_('ValidateError'), _('Directory name contains special characters!'))
427 return super(document_directory,self).create(cr, uid, vals, context)
431 class document_directory_node(osv.osv):
432 _inherit = 'process.node'
434 'directory_id': fields.many2one('document.directory', 'Document directory', ondelete="set null"),
436 document_directory_node()
438 class document_directory_content_type(osv.osv):
439 _name = 'document.directory.content.type'
440 _description = 'Directory Content Type'
442 'name': fields.char('Content Type', size=64, required=True),
443 'code': fields.char('Extension', size=4),
444 'active': fields.boolean('Active'),
447 'active': lambda *args: 1
449 document_directory_content_type()
451 class document_directory_content(osv.osv):
452 _name = 'document.directory.content'
453 _description = 'Directory Content'
455 def _extension_get(self, cr, uid, context=None):
456 cr.execute('select code,name from document_directory_content_type where active')
460 'name': fields.char('Content Name', size=64, required=True),
461 'sequence': fields.integer('Sequence', size=16),
462 'suffix': fields.char('Suffix', size=16),
463 'report_id': fields.many2one('ir.actions.report.xml', 'Report'),
464 'extension': fields.selection(_extension_get, 'Document Type', required=True, size=4),
465 'include_name': fields.boolean('Include Record Name', help="Check this field if you want that the name of the file start by the record name."),
466 'directory_id': fields.many2one('document.directory', 'Directory'),
469 'extension': lambda *args: '.pdf',
470 'sequence': lambda *args: 1,
471 'include_name': lambda *args: 1,
473 def process_write_pdf(self, cr, uid, node, context=None):
475 def process_read_pdf(self, cr, uid, node, context=None):
476 report = self.pool.get('ir.actions.report.xml').browse(cr, uid, node.content.report_id.id)
477 srv = netsvc.LocalService('report.'+report.report_name)
478 pdf,pdftype = srv.create(cr, uid, [node.object.id], {}, {})
479 s = StringIO.StringIO(pdf)
482 document_directory_content()
484 class ir_action_report_xml(osv.osv):
485 _name="ir.actions.report.xml"
486 _inherit ="ir.actions.report.xml"
488 def _model_get(self, cr, uid, ids, name, arg, context):
490 model_pool = self.pool.get('ir.model')
491 for data in self.read(cr,uid,ids,['model']):
492 model = data.get('model',False)
494 model_id =model_pool.search(cr,uid,[('model','=',model)])
496 res[data.get('id')] = model_id[0]
498 res[data.get('id')] = False
501 def _model_search(self, cr, uid, obj, name, args, context):
507 model = self.pool.get('ir.model').read(cr,uid,[model_id])[0]['model']
508 report_id = self.search(cr,uid,[('model','=',model)])
510 return [('id','=','0')]
511 return [('id','in',report_id)]
514 'model_id' : fields.function(_model_get,fnct_search=_model_search,method=True,string='Model Id'),
517 ir_action_report_xml()
519 def create_directory(path):
520 dir_name = random_name()
521 path = os.path.join(path,dir_name)
525 class document_file(osv.osv):
526 _inherit = 'ir.attachment'
527 _rec_name = 'datas_fname'
528 def _get_filestore(self, cr):
529 return os.path.join(tools.config['root_path'], 'filestore', cr.dbname)
531 def _data_get(self, cr, uid, ids, name, arg, context):
533 cr.execute('SELECT id, store_fname, link FROM ir_attachment '\
534 'WHERE id IN %s', (tuple(ids),))
535 for id,r,l in cr.fetchall():
537 value = file(os.path.join(self._get_filestore(cr), r), 'rb').read()
538 result[id] = base64.encodestring(value)
541 # if context.get('bin_size', False):
542 # result[id] = tools.human_size(result[id])
546 # This code can be improved
548 def _data_set(self, cr, uid, id, name, value, args=None, context=None):
550 filename = self.browse(cr, uid, id, context).store_fname
552 os.unlink(os.path.join(self._get_filestore(cr), filename))
555 cr.execute('update ir_attachment set store_fname=NULL WHERE id=%s', (id,) )
557 #if (not context) or context.get('store_method','fs')=='fs':
559 path = self._get_filestore(cr)
560 if not os.path.isdir(path):
564 raise except_orm(_('Permission Denied !'), _('You do not permissions to write on the server side.'))
567 # This can be improved
568 for dirs in os.listdir(path):
569 if os.path.isdir(os.path.join(path,dirs)) and len(os.listdir(os.path.join(path,dirs)))<4000:
572 flag = flag or create_directory(path)
573 filename = random_name()
574 fname = os.path.join(path, flag, filename)
575 fp = file(fname,'wb')
576 v = base64.decodestring(value)
578 filesize = os.stat(fname).st_size
579 cr.execute('update ir_attachment set store_fname=%s,store_method=%s,file_size=%s where id=%s', (os.path.join(flag,filename),'fs',len(v),id))
582 raise except_orm(_('Error!'), str(e))
585 'user_id': fields.many2one('res.users', 'Owner', select=1),
586 'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
587 'parent_id': fields.many2one('document.directory', 'Directory', select=1),
588 'file_size': fields.integer('File Size', required=True),
589 'file_type': fields.char('Content Type', size=32),
590 'index_content': fields.text('Indexed Content'),
591 'write_date': fields.datetime('Date Modified', readonly=True),
592 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
593 'create_date': fields.datetime('Date Created', readonly=True),
594 'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
595 'store_method': fields.selection([('db','Database'),('fs','Filesystem'),('link','Link')], "Storing Method"),
596 'datas': fields.function(_data_get,method=True,fnct_inv=_data_set,string='File Content',type="binary"),
597 'store_fname': fields.char('Stored Filename', size=200),
598 'res_model': fields.char('Attached Model', size=64), #res_model
599 'res_id': fields.integer('Attached ID'), #res_id
600 'partner_id':fields.many2one('res.partner', 'Partner', select=1),
601 'title': fields.char('Resource Title',size=64),
605 'user_id': lambda self,cr,uid,ctx:uid,
606 'file_size': lambda self,cr,uid,ctx:0,
607 'store_method': lambda *args: 'db'
610 ('filename_uniq', 'unique (name,parent_id,res_id,res_model)', 'The file name must be unique !')
612 def _check_duplication(self, cr, uid,vals,ids=[],op='create'):
613 name=vals.get('name',False)
614 parent_id=vals.get('parent_id',False)
615 res_model=vals.get('res_model',False)
616 res_id=vals.get('res_id',0)
618 for file in self.browse(cr,uid,ids):
622 parent_id=file.parent_id and file.parent_id.id or False
624 res_model=file.res_model and file.res_model or False
626 res_id=file.res_id and file.res_id or 0
627 res=self.search(cr,uid,[('id','<>',file.id),('name','=',name),('parent_id','=',parent_id),('res_model','=',res_model),('res_id','=',res_id)])
631 res=self.search(cr,uid,[('name','=',name),('parent_id','=',parent_id),('res_id','=',res_id),('res_model','=',res_model)])
635 def copy(self, cr, uid, id, default=None, context=None):
638 name = self.read(cr, uid, [id])[0]['name']
639 default.update({'name': name+ " (copy)"})
640 return super(document_file,self).copy(cr,uid,id,default,context)
641 def write(self, cr, uid, ids, vals, context=None):
642 res=self.search(cr,uid,[('id','in',ids)])
645 if not self._check_duplication(cr,uid,vals,ids,'write'):
646 raise except_orm(_('ValidateError'), _('File name must be unique!'))
647 result = super(document_file,self).write(cr,uid,ids,vals,context=context)
650 for f in self.browse(cr, uid, ids, context=context):
651 #if 'datas' not in vals:
652 # vals['datas']=f.datas
653 res = content_index(base64.decodestring(vals['datas']), f.datas_fname, f.file_type or None)
654 super(document_file,self).write(cr, uid, ids, {
662 def create(self, cr, uid, vals, context=None):
665 vals['title']=vals['name']
666 vals['parent_id'] = context.get('parent_id',False) or vals.get('parent_id',False)
667 if not vals.get('res_id', False) and context.get('default_res_id',False):
668 vals['res_id']=context.get('default_res_id',False)
669 if not vals.get('res_model', False) and context.get('default_res_model',False):
670 vals['res_model']=context.get('default_res_model',False)
672 if vals.get('res_id', False) and vals.get('res_model',False):
673 obj_model=self.pool.get(vals['res_model'])
674 result = obj_model.read(cr, uid, [vals['res_id']], context=context)
677 if obj.get('name',False):
678 vals['title'] = (obj.get('name',''))[:60]
679 if obj_model._name=='res.partner':
680 vals['partner_id']=obj['id']
681 elif obj.get('address_id',False):
682 if isinstance(obj['address_id'],tuple) or isinstance(obj['address_id'],list):
683 address_id=obj['address_id'][0]
685 address_id=obj['address_id']
686 address=self.pool.get('res.partner.address').read(cr,uid,[address_id],context=context)
688 vals['partner_id']=address[0]['partner_id'][0] or False
689 elif obj.get('partner_id',False):
690 if isinstance(obj['partner_id'],tuple) or isinstance(obj['partner_id'],list):
691 vals['partner_id']=obj['partner_id'][0]
693 vals['partner_id']=obj['partner_id']
696 if vals.get('link',False) :
698 datas=base64.encodestring(urllib.urlopen(vals['link']).read())
700 datas = vals.get('datas',False)
702 vals['file_size']= datas and len(datas) or 0
703 if not self._check_duplication(cr,uid,vals):
704 raise except_orm(_('ValidateError'), _('File name must be unique!'))
705 result = super(document_file,self).create(cr, uid, vals, context)
708 res = content_index(base64.decodestring(datas), vals['datas_fname'], vals.get('content_type', None))
709 super(document_file,self).write(cr, uid, [result], {
710 'index_content' : res,
717 def unlink(self,cr, uid, ids, context=None):
718 for f in self.browse(cr, uid, ids, context):
719 #if f.store_method=='fs':
721 os.unlink(os.path.join(self._get_filestore(cr), f.store_fname))
724 return super(document_file, self).unlink(cr, uid, ids, context)
727 class document_configuration_wizard(osv.osv_memory):
728 _name='document.configuration.wizard'
729 _rec_name = 'Auto Directory configuration'
731 'host': fields.char('Server Address', size=64, help="Put here the server address or IP. " \
732 "Keep localhost if you don't know what to write.", required=True)
735 def detect_ip_addr(self, cr, uid, context=None):
736 def _detect_ip_addr(self, cr, uid, context=None):
737 from array import array
739 from struct import pack, unpack
746 if not fcntl: # not UNIX:
747 host = socket.gethostname()
748 ip_addr = socket.gethostbyname(host)
750 # get all interfaces:
752 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
753 names = array('B', '\0' * nbytes)
754 outbytes = unpack('iL', fcntl.ioctl( s.fileno(), 0x8912, pack('iL', nbytes, names.buffer_info()[0])))[0]
755 namestr = names.tostring()
756 ifaces = [namestr[i:i+32].split('\0', 1)[0] for i in range(0, outbytes, 32)]
758 for ifname in [iface for iface in ifaces if iface != 'lo']:
759 ip_addr = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, pack('256s', ifname[:15]))[20:24])
764 ip_addr = _detect_ip_addr(self, cr, uid, context)
766 ip_addr = 'localhost'
770 'host': detect_ip_addr,
773 def action_cancel(self,cr,uid,ids,conect=None):
777 'res_model': 'ir.actions.configuration.wizard',
778 'type': 'ir.actions.act_window',
782 def action_config(self, cr, uid, ids, context=None):
783 conf = self.browse(cr, uid, ids[0], context)
784 obj=self.pool.get('document.directory')
785 objid=self.pool.get('ir.model.data')
787 if self.pool.get('sale.order'):
788 id = objid._get_id(cr, uid, 'document', 'dir_sale_order_all')
789 id = objid.browse(cr, uid, id, context=context).res_id
790 mid = self.pool.get('ir.model').search(cr, uid, [('model','=','sale.order')])
791 obj.write(cr, uid, [id], {
793 'ressource_type_id': mid[0],
796 aid = objid._get_id(cr, uid, 'sale', 'report_sale_order')
797 aid = objid.browse(cr, uid, aid, context=context).res_id
799 self.pool.get('document.directory.content').create(cr, uid, {
800 'name': "Print Order",
807 id = objid._get_id(cr, uid, 'document', 'dir_sale_order_quote')
808 id = objid.browse(cr, uid, id, context=context).res_id
809 obj.write(cr, uid, [id], {
811 'ressource_type_id': mid[0],
812 'domain': "[('state','=','draft')]",
815 if self.pool.get('product.product'):
816 id = objid._get_id(cr, uid, 'document', 'dir_product')
817 id = objid.browse(cr, uid, id, context=context).res_id
818 mid = self.pool.get('ir.model').search(cr, uid, [('model','=','product.product')])
819 obj.write(cr, uid, [id], {
821 'ressource_type_id': mid[0],
824 if self.pool.get('stock.location'):
825 aid = objid._get_id(cr, uid, 'stock', 'report_product_history')
826 aid = objid.browse(cr, uid, aid, context=context).res_id
828 self.pool.get('document.directory.content').create(cr, uid, {
829 'name': "Product Stock",
830 'suffix': "_stock_forecast",
837 if self.pool.get('account.analytic.account'):
838 id = objid._get_id(cr, uid, 'document', 'dir_project')
839 id = objid.browse(cr, uid, id, context=context).res_id
840 mid = self.pool.get('ir.model').search(cr, uid, [('model','=','account.analytic.account')])
841 obj.write(cr, uid, [id], {
843 'ressource_type_id': mid[0],
848 aid = objid._get_id(cr, uid, 'document', 'action_document_browse')
849 aid = objid.browse(cr, uid, aid, context=context).res_id
850 self.pool.get('ir.actions.url').write(cr, uid, [aid], {'url': 'ftp://'+(conf.host or 'localhost')+':8021/'})
855 'res_model': 'ir.actions.configuration.wizard',
856 'type': 'ir.actions.act_window',
859 document_configuration_wizard()