1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
24 from osv import osv, fields
25 from osv.orm import except_orm
31 from content_index import content_index
37 from psycopg2 import Binary
38 from tools import config
40 from tools.translate import _
44 d = [random.choice(string.ascii_letters) for x in xrange(10) ]
49 # Unsupported WebDAV Commands:
58 # An object that represent an uri
59 # path: the uri of the object
60 # content: the Content it belongs to (_print.pdf)
61 # type: content or collection
62 # content: objct = res.partner
63 # collection: object = directory, object2 = res.partner
64 # file: objct = ir.attachement
65 # root: if we are at the first directory of a ressource
67 INVALID_CHARS={'*':str(hash('*')), '|':str(hash('|')) , "\\":str(hash("\\")), '/':'__', ':':str(hash(':')), '"':str(hash('"')), '<':str(hash('<')) , '>':str(hash('>')) , '?':str(hash('?'))}
70 class node_class(object):
71 def __init__(self, cr, uid, path, object, object2=False, context={}, content=False, type='collection', root=False):
76 self.object2 = object2
77 self.context = context
78 self.content = content
82 def _file_get(self, nodename=False):
85 pool = pooler.get_pool(self.cr.dbname)
86 fobj = pool.get('ir.attachment')
90 where.append( ('res_model','=',self.object2._name) )
91 where.append( ('res_id','=',self.object2.id) )
93 where.append( ('parent_id','=',self.object.id) )
94 where.append( ('res_id','=',False) )
96 where.append( (fobj._rec_name,'=',nodename) )
97 for content in self.object.content_ids:
98 if self.object2 or not content.include_name:
99 if content.include_name:
100 content_name = self.object2.name
101 obj = pool.get(self.object.ressource_type_id.model)
102 name_for = obj._name.split('.')[-1]
103 if content_name and content_name.find(name_for) == 0 :
104 content_name = content_name.replace(name_for,'')
105 test_nodename = content_name + (content.suffix or '') + (content.extension or '')
107 test_nodename = (content.suffix or '') + (content.extension or '')
108 if test_nodename.find('/'):
109 test_nodename=test_nodename.replace('/', '_')
110 path = self.path+'/'+test_nodename
112 n = node_class(self.cr, self.uid,path, self.object2, False, context=self.context, content=content, type='content', root=False)
115 if nodename == test_nodename:
116 n = node_class(self.cr, self.uid, path, self.object2, False, context=self.context, content=content, type='content', root=False)
119 ids = fobj.search(self.cr, self.uid, where+[ ('parent_id','=',self.object and self.object.id or False) ])
120 if self.object and self.root and (self.object.type=='ressource'):
121 ids += fobj.search(self.cr, self.uid, where+[ ('parent_id','=',False) ])
122 res = fobj.browse(self.cr, self.uid, ids, context=self.context)
123 return map(lambda x: node_class(self.cr, self.uid, self.path+'/'+eval('x.'+fobj._rec_name), x, False, context=self.context, type='file', root=False), res) + res2
125 def get_translation(self,value,lang):
127 #TODO : to get translation term
130 def directory_list_for_child(self,nodename,parent=False):
131 pool = pooler.get_pool(self.cr.dbname)
134 nodename = self.get_translation(nodename, self.context['lang'])
135 where.append(('name','=',nodename))
136 if (self.object and self.object.type=='directory') or not self.object2:
137 where.append(('parent_id','=',self.object and self.object.id or False))
139 where.append(('parent_id','=',False))
141 where.append(('ressource_parent_type_id','=',self.object.ressource_type_id.id))
143 where.append(('ressource_parent_type_id','=',False))
145 ids = pool.get('document.directory').search(self.cr, self.uid, where+[('ressource_id','=',0)])
147 ids += pool.get('document.directory').search(self.cr, self.uid, where+[('ressource_id','=',self.object2.id)])
148 res = pool.get('document.directory').browse(self.cr, self.uid, ids, self.context)
151 def _child_get(self, nodename=False):
152 if self.type not in ('collection','database'):
154 res = self.directory_list_for_child(nodename)
155 result= map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name, x, x.type=='directory' and self.object2 or False, context=self.context, root=self.root), res)
156 if self.type=='database':
157 pool = pooler.get_pool(self.cr.dbname)
158 fobj = pool.get('ir.attachment')
159 vargs = [('parent_id','=',False),('res_id','=',False)]
161 vargs.append((fobj._rec_name,'=',nodename))
162 file_ids=fobj.search(self.cr,self.uid,vargs)
164 res = fobj.browse(self.cr, self.uid, file_ids, context=self.context)
165 result +=map(lambda x: node_class(self.cr, self.uid, self.path+'/'+eval('x.'+fobj._rec_name), x, False, context=self.context, type='file', root=self.root), res)
166 if self.type=='collection' and self.object.type=="ressource":
167 where = self.object.domain and eval(self.object.domain, {'active_id':self.root, 'uid':self.uid}) or []
168 pool = pooler.get_pool(self.cr.dbname)
169 obj = pool.get(self.object.ressource_type_id.model)
170 _dirname_field = obj._rec_name
171 if len(obj.fields_get(self.cr, self.uid, ['dirname'])):
172 _dirname_field = 'dirname'
174 name_for = obj._name.split('.')[-1]
175 if nodename and nodename.find(name_for) == 0 :
176 id = int(nodename.replace(name_for,''))
177 where.append(('id','=',id))
179 if nodename.find('__') :
180 nodename=nodename.replace('__','/')
181 for invalid in INVALID_CHARS:
182 if nodename.find(INVALID_CHARS[invalid]) :
183 nodename=nodename.replace(INVALID_CHARS[invalid],invalid)
184 nodename = self.get_translation(nodename, self.context['lang'])
185 where.append((_dirname_field,'=',nodename))
187 if self.object.ressource_tree:
188 if obj._parent_name in obj.fields_get(self.cr,self.uid):
189 where.append((obj._parent_name,'=',self.object2 and self.object2.id or False))
190 ids = obj.search(self.cr, self.uid, where)
191 res = obj.browse(self.cr, self.uid, ids,self.context)
192 result+= map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name.replace('/','__'), self.object, x, context=self.context, root=x.id), res)
202 ids = obj.search(self.cr, self.uid, where)
203 res = obj.browse(self.cr, self.uid, ids,self.context)
205 if len(obj.fields_get(self.cr, self.uid, [_dirname_field])):
206 r.name = eval('r.'+_dirname_field)
210 r.name = name_for + '%d'%r.id
211 for invalid in INVALID_CHARS:
212 if r.name.find(invalid) :
213 r.name = r.name.replace(invalid,INVALID_CHARS[invalid])
214 result2 = map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name.replace('/','__'), self.object, x, context=self.context, root=x.id), res)
216 if self.object.ressource_tree:
223 return self._child_get() + self._file_get()
225 def child(self, name):
226 res = self._child_get(name)
229 res = self._file_get(name)
236 if self.path[0]=='/':
240 class document_directory(osv.osv):
241 _name = 'document.directory'
242 _description = 'Document directory'
244 'name': fields.char('Name', size=64, required=True, select=1),
245 'write_date': fields.datetime('Date Modified', readonly=True),
246 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
247 'create_date': fields.datetime('Date Created', readonly=True),
248 'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
249 'file_type': fields.char('Content Type', size=32),
250 'domain': fields.char('Domain', size=128, help="Use a domain if you want to apply an automatic filter on visible resources."),
251 'user_id': fields.many2one('res.users', 'Owner'),
252 'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
253 'parent_id': fields.many2one('document.directory', 'Parent Item'),
254 'child_ids': fields.one2many('document.directory', 'parent_id', 'Children'),
255 'file_ids': fields.one2many('ir.attachment', 'parent_id', 'Files'),
256 'content_ids': fields.one2many('document.directory.content', 'directory_id', 'Virtual Files'),
257 'type': fields.selection([('directory','Static Directory'),('ressource','Other Resources')], 'Type', required=True),
258 'ressource_type_id': fields.many2one('ir.model', 'Directories Mapped to Objects',
259 help="Select an object here and Open ERP will create a mapping for each of these " \
260 "objects, using the given domain, when browsing through FTP."),
261 'ressource_parent_type_id': fields.many2one('ir.model', 'Parent Model',
262 help="If you put an object here, this directory template will appear bellow all of these objects. " \
263 "Don't put a parent directory if you select a parent model."),
264 'ressource_id': fields.integer('Resource ID'),
265 'ressource_tree': fields.boolean('Tree Structure',
266 help="Check this if you want to use the same tree structure as the object selected in the system."),
269 'user_id': lambda self,cr,uid,ctx: uid,
270 'domain': lambda self,cr,uid,ctx: '[]',
271 'type': lambda *args: 'directory',
272 'ressource_id': lambda *a: 0
275 ('dirname_uniq', 'unique (name,parent_id,ressource_id,ressource_parent_type_id)', 'The directory name must be unique !')
278 def get_resource_path(self,cr,uid,dir_id,res_model,res_id):
279 # this method will be used in process module
280 # to be need test and Improvement if resource dir has parent resource (link resource)
282 def _parent(dir_id,path):
283 parent=self.browse(cr,uid,dir_id)
284 if parent.parent_id and not parent.ressource_parent_type_id:
285 _parent(parent.parent_id.id,path)
286 path.append(parent.name)
288 path.append(parent.name)
291 directory=self.browse(cr,uid,dir_id)
292 model_ids=self.pool.get('ir.model').search(cr,uid,[('model','=',res_model)])
296 path.append(self.pool.get(directory.ressource_type_id.model).browse(cr,uid,res_id).name)
297 user=self.pool.get('res.users').browse(cr,uid,uid)
298 return "ftp://%s:%s@localhost:%s/%s/%s"%(user.login,user.password,config.get('ftp_server_port',8021),cr.dbname,'/'.join(path))
301 def _check_recursion(self, cr, uid, ids):
304 cr.execute('select distinct parent_id from document_directory where id in ('+','.join(map(str, ids))+')')
305 ids = filter(None, map(lambda x:x[0], cr.fetchall()))
312 (_check_recursion, 'Error! You can not create recursive Directories.', ['parent_id'])
314 def __init__(self, *args, **kwargs):
315 res = super(document_directory, self).__init__(*args, **kwargs)
318 def onchange_content_id(self, cr, uid, ids, ressource_type_id):
321 def _get_childs(self, cr, uid, node, nodename=False, context={}):
324 nodename = self.get_translation(nodename, self.context['lang'])
325 where.append(('name','=',nodename))
327 where.append(('parent_id','=',object.id))
328 ids = self.search(cr, uid, where, context)
329 return self.browse(cr, uid, ids, context), False
333 uri: of the form "Sales Order/SO001"
336 object: the object.directory or object.directory.content
337 object2: the other object linked (if object.directory.content)
339 def get_object(self, cr, uid, uri, context={}):
340 #TODO : set user's context_lang in context
341 context.update({'lang':False})
343 return node_class(cr, uid, '', False, context=context, type='database')
345 if False and (turi in self._cache):
346 (path, oo, oo2, context, content,type,root) = self._cache[turi]
348 object = self.pool.get(oo[0]).browse(cr, uid, oo[1], context)
352 object2 = self.pool.get(oo2[0]).browse(cr, uid, oo2[1], context)
355 node = node_class(cr, uid, '/', False, context=context, type='database')
358 node = node_class(cr, uid, '/', False, context=context, type='database')
361 node = node.child(path)
364 oo = node.object and (node.object._name, node.object.id) or False
365 oo2 = node.object2 and (node.object2._name, node.object2.id) or False
366 self._cache[turi] = (node.path, oo, oo2, node.context, node.content,node.type,node.root)
369 def get_childs(self, cr, uid, uri, context={}):
370 node = self.get_object(cr, uid, uri, context)
372 children = node.children()
375 result = map(lambda node: node.path_get(), children)
376 #childs,object2 = self._get_childs(cr, uid, object, False, context)
377 #result = map(lambda x: urlparse.urljoin(path+'/',x.name), childs)
380 def copy(self, cr, uid, id, default=None, context=None):
383 name = self.read(cr, uid, [id])[0]['name']
384 default.update({'name': name+ " (copy)"})
385 return super(document_directory,self).copy(cr,uid,id,default,context)
387 def _check_duplication(self, cr, uid,vals,ids=[],op='create'):
388 name=vals.get('name',False)
389 parent_id=vals.get('parent_id',False)
390 ressource_parent_type_id=vals.get('ressource_parent_type_id',False)
391 ressource_id=vals.get('ressource_id',0)
393 for directory in self.browse(cr,uid,ids):
397 parent_id=directory.parent_id and directory.parent_id.id or False
398 if not ressource_parent_type_id:
399 ressource_parent_type_id=directory.ressource_parent_type_id and directory.ressource_parent_type_id.id or False
401 ressource_id=directory.ressource_id and directory.ressource_id or 0
402 res=self.search(cr,uid,[('id','<>',directory.id),('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id)])
406 res=self.search(cr,uid,[('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id)])
410 def write(self, cr, uid, ids, vals, context=None):
411 if not self._check_duplication(cr,uid,vals,ids,op='write'):
412 raise osv.except_osv(_('ValidateError'), _('Directory name must be unique!'))
413 return super(document_directory,self).write(cr,uid,ids,vals,context=context)
415 def create(self, cr, uid, vals, context=None):
416 if not self._check_duplication(cr,uid,vals):
417 raise osv.except_osv(_('ValidateError'), _('Directory name must be unique!'))
418 if vals.get('name',False) and (vals.get('name').find('/')+1 or vals.get('name').find('@')+1 or vals.get('name').find('$')+1 or vals.get('name').find('#')+1) :
419 raise osv.except_osv(_('ValidateError'), _('Directory name contains special characters!'))
420 return super(document_directory,self).create(cr, uid, vals, context)
424 class document_directory_node(osv.osv):
425 _inherit = 'process.node'
427 'directory_id': fields.many2one('document.directory', 'Document directory', ondelete="set null"),
429 document_directory_node()
431 class document_directory_content_type(osv.osv):
432 _name = 'document.directory.content.type'
433 _description = 'Directory Content Type'
435 'name': fields.char('Content Type', size=64, required=True),
436 'code': fields.char('Extension', size=4),
437 'active': fields.boolean('Active', help="If the active field is set to true, it will allow you to hide the directory content type without removing it."),
440 'active': lambda *args: 1
442 document_directory_content_type()
444 class document_directory_content(osv.osv):
445 _name = 'document.directory.content'
446 _description = 'Directory Content'
448 def _extension_get(self, cr, uid, context={}):
449 cr.execute('select code,name from document_directory_content_type where active')
453 'name': fields.char('Content Name', size=64, required=True),
454 'sequence': fields.integer('Sequence', size=16),
455 'suffix': fields.char('Suffix', size=16),
456 'report_id': fields.many2one('ir.actions.report.xml', 'Report'),
457 'extension': fields.selection(_extension_get, 'Document Type', required=True, size=4),
458 'include_name': fields.boolean('Include Record Name', help="Check this field if you want that the name of the file start by the record name."),
459 'directory_id': fields.many2one('document.directory', 'Directory'),
462 'extension': lambda *args: '.pdf',
463 'sequence': lambda *args: 1,
464 'include_name': lambda *args: 1,
466 def process_write_pdf(self, cr, uid, node, context={}):
468 def process_read_pdf(self, cr, uid, node, context={}):
469 report = self.pool.get('ir.actions.report.xml').browse(cr, uid, node.content.report_id.id)
470 srv = netsvc.LocalService('report.'+report.report_name)
471 pdf,pdftype = srv.create(cr, uid, [node.object.id], {}, {})
472 s = StringIO.StringIO(pdf)
475 document_directory_content()
477 class ir_action_report_xml(osv.osv):
478 _name="ir.actions.report.xml"
479 _inherit ="ir.actions.report.xml"
481 def _model_get(self, cr, uid, ids, name, arg, context):
483 model_pool = self.pool.get('ir.model')
484 for data in self.read(cr,uid,ids,['model']):
485 model = data.get('model',False)
487 model_id =model_pool.search(cr,uid,[('model','=',model)])
489 res[data.get('id')] = model_id[0]
491 res[data.get('id')] = False
494 def _model_search(self, cr, uid, obj, name, args):
500 model = self.pool.get('ir.model').read(cr,uid,[model_id])[0]['model']
501 report_id = self.search(cr,uid,[('model','=',model)])
503 return [('id','=','0')]
504 return [('id','in',report_id)]
507 'model_id' : fields.function(_model_get,fnct_search=_model_search,method=True,string='Model Id'),
510 ir_action_report_xml()
512 def create_directory(path):
513 dir_name = random_name()
514 path = os.path.join(path,dir_name)
518 class document_file(osv.osv):
519 _inherit = 'ir.attachment'
520 _rec_name = 'datas_fname'
521 def _get_filestore(self, cr):
522 return os.path.join(tools.config['root_path'], 'filestore', cr.dbname)
524 def _data_get(self, cr, uid, ids, name, arg, context):
526 cr.execute('select id,store_fname,link from ir_attachment where id in ('+','.join(map(str, ids))+')')
527 for id,r,l in cr.fetchall():
529 value = file(os.path.join(self._get_filestore(cr), r), 'rb').read()
530 result[id] = base64.encodestring(value)
533 # if context.get('bin_size', False):
534 # result[id] = tools.human_size(result[id])
538 # This code can be improved
540 def _data_set(self, cr, uid, id, name, value, args=None, context={}):
542 filename = self.browse(cr, uid, id, context).store_fname
544 os.unlink(os.path.join(self._get_filestore(cr), filename))
547 cr.execute('update ir_attachment set store_fname=NULL WHERE id=%s', (id,) )
549 #if (not context) or context.get('store_method','fs')=='fs':
551 path = self._get_filestore(cr)
552 if not os.path.isdir(path):
556 raise except_orm(_('Permission Denied !'), _('You do not permissions to write on the server side.'))
559 # This can be improved
560 for dirs in os.listdir(path):
561 if os.path.isdir(os.path.join(path,dirs)) and len(os.listdir(os.path.join(path,dirs)))<4000:
564 flag = flag or create_directory(path)
565 filename = random_name()
566 fname = os.path.join(path, flag, filename)
567 fp = file(fname,'wb')
568 v = base64.decodestring(value)
570 filesize = os.stat(fname).st_size
571 cr.execute('update ir_attachment set store_fname=%s,store_method=%s,file_size=%s where id=%s', (os.path.join(flag,filename),'fs',len(v),id))
574 raise except_orm(_('Error!'), str(e))
577 'user_id': fields.many2one('res.users', 'Owner', select=1),
578 'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
579 'parent_id': fields.many2one('document.directory', 'Directory', select=1),
580 'file_size': fields.integer('File Size', required=True),
581 'file_type': fields.char('Content Type', size=32),
582 'index_content': fields.text('Indexed Content'),
583 'write_date': fields.datetime('Date Modified', readonly=True),
584 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
585 'create_date': fields.datetime('Date Created', readonly=True),
586 'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
587 'store_method': fields.selection([('db','Database'),('fs','Filesystem'),('link','Link')], "Storing Method"),
588 'datas': fields.function(_data_get,method=True,fnct_inv=_data_set,string='File Content',type="binary"),
589 'store_fname': fields.char('Stored Filename', size=200),
590 'res_model': fields.char('Attached Model', size=64), #res_model
591 'res_id': fields.integer('Attached ID'), #res_id
592 'partner_id':fields.many2one('res.partner', 'Partner', select=1),
593 'title': fields.char('Resource Title',size=64),
597 'user_id': lambda self,cr,uid,ctx:uid,
598 'file_size': lambda self,cr,uid,ctx:0,
599 'store_method': lambda *args: 'db'
602 ('filename_uniq', 'unique (name,parent_id,res_id,res_model)', 'The file name must be unique !')
604 def _check_duplication(self, cr, uid,vals,ids=[],op='create'):
605 name=vals.get('name',False)
606 parent_id=vals.get('parent_id',False)
607 res_model=vals.get('res_model',False)
608 res_id=vals.get('res_id',0)
610 for file in self.browse(cr,uid,ids):
614 parent_id=file.parent_id and file.parent_id.id or False
616 res_model=file.res_model and file.res_model or False
618 res_id=file.res_id and file.res_id or 0
619 res=self.search(cr,uid,[('id','<>',file.id),('name','=',name),('parent_id','=',parent_id),('res_model','=',res_model),('res_id','=',res_id)])
623 res=self.search(cr,uid,[('name','=',name),('parent_id','=',parent_id),('res_id','=',res_id),('res_model','=',res_model)])
627 def copy(self, cr, uid, id, default=None, context=None):
630 name = self.read(cr, uid, [id])[0]['name']
631 default.update({'name': name+ " (copy)"})
632 return super(document_file,self).copy(cr,uid,id,default,context)
633 def write(self, cr, uid, ids, vals, context=None):
634 res=self.search(cr,uid,[('id','in',ids)])
637 if not self._check_duplication(cr,uid,vals,ids,'write'):
638 raise except_orm(_('ValidateError'), _('File name must be unique!'))
639 result = super(document_file,self).write(cr,uid,ids,vals,context=context)
642 for f in self.browse(cr, uid, ids, context=context):
643 #if 'datas' not in vals:
644 # vals['datas']=f.datas
645 res = content_index(base64.decodestring(vals['datas']), f.datas_fname, f.file_type or None)
646 super(document_file,self).write(cr, uid, ids, {
654 def create(self, cr, uid, vals, context={}):
655 vals['title']=vals['name']
656 vals['parent_id'] = context.get('parent_id',False) or vals.get('parent_id',False)
657 if not vals.get('res_id', False) and context.get('default_res_id',False):
658 vals['res_id']=context.get('default_res_id',False)
659 if not vals.get('res_model', False) and context.get('default_res_model',False):
660 vals['res_model']=context.get('default_res_model',False)
661 if vals.get('res_id', False) and vals.get('res_model',False):
662 obj_model=self.pool.get(vals['res_model'])
663 result = obj_model.read(cr, uid, [vals['res_id']], context=context)
666 if obj.get('name',False):
667 vals['title'] = (obj.get('name',''))[:60]
668 if obj_model._name=='res.partner':
669 vals['partner_id']=obj['id']
670 elif obj.get('address_id',False):
671 if isinstance(obj['address_id'],tuple) or isinstance(obj['address_id'],list):
672 address_id=obj['address_id'][0]
674 address_id=obj['address_id']
675 address=self.pool.get('res.partner.address').read(cr,uid,[address_id],context=context)
677 vals['partner_id']=address[0]['partner_id'][0] or False
678 elif obj.get('partner_id',False):
679 if isinstance(obj['partner_id'],tuple) or isinstance(obj['partner_id'],list):
680 vals['partner_id']=obj['partner_id'][0]
682 vals['partner_id']=obj['partner_id']
685 if vals.get('link',False) :
687 datas=base64.encodestring(urllib.urlopen(vals['link']).read())
689 datas = vals.get('datas',False)
691 vals['file_size']= datas and len(datas) or 0
692 if not self._check_duplication(cr,uid,vals):
693 raise except_orm(_('ValidateError'), _('File name must be unique!'))
694 result = super(document_file,self).create(cr, uid, vals, context)
697 res = content_index(base64.decodestring(datas), vals['datas_fname'], vals.get('content_type', None))
698 super(document_file,self).write(cr, uid, [result], {
699 'index_content': res,
706 def unlink(self,cr, uid, ids, context={}):
707 for f in self.browse(cr, uid, ids, context):
708 #if f.store_method=='fs':
710 os.unlink(os.path.join(self._get_filestore(cr), f.store_fname))
713 return super(document_file, self).unlink(cr, uid, ids, context)
716 class document_configuration_wizard(osv.osv_memory):
717 _name='document.configuration.wizard'
718 _rec_name = 'Auto Directory configuration'
720 'host': fields.char('Server Address', size=64, help="Put here the server address or IP. " \
721 "Keep localhost if you don't know what to write.", required=True),
722 'suggested_host': fields.char('Suggested Server Address', size=64, readonly=True, help="This is the guessed server address"),
723 'port': fields.char('Server Port', size=5, help="Put here the server port. " \
724 "Keep 8021 if you don't know what to write.", required=True)
727 def get_ftp_server_address(self, cr, uid, context=None):
728 default_address = config.get('ftp_server_address', None)
729 if default_address is None:
730 default_address = tools.misc.detect_ip_addr()
732 return default_address
734 def get_ftp_server_port(self, cr, uid, context=None):
735 return config.get('ftp_server_port', 8021)
737 def get_suggested_ftp_server_address(self, cr, uid, context=None):
738 return tools.misc.detect_ip_addr()
741 'host': get_ftp_server_address,
742 'suggested_host': get_suggested_ftp_server_address,
743 'port': get_ftp_server_port,
746 def action_cancel(self,cr,uid,ids,conect=None):
750 'res_model': 'ir.actions.configuration.wizard',
751 'type': 'ir.actions.act_window',
755 def action_config(self, cr, uid, ids, context=None):
756 conf = self.browse(cr, uid, ids[0], context)
757 obj=self.pool.get('document.directory')
758 objid=self.pool.get('ir.model.data')
760 if self.pool.get('sale.order'):
761 id = objid._get_id(cr, uid, 'document', 'dir_sale_order_all')
762 id = objid.browse(cr, uid, id, context=context).res_id
763 mid = self.pool.get('ir.model').search(cr, uid, [('model','=','sale.order')])
764 obj.write(cr, uid, [id], {
766 'ressource_type_id': mid[0],
769 aid = objid._get_id(cr, uid, 'sale', 'report_sale_order')
770 aid = objid.browse(cr, uid, aid, context=context).res_id
772 self.pool.get('document.directory.content').create(cr, uid, {
773 'name': "Print Order",
780 id = objid._get_id(cr, uid, 'document', 'dir_sale_order_quote')
781 id = objid.browse(cr, uid, id, context=context).res_id
782 obj.write(cr, uid, [id], {
784 'ressource_type_id': mid[0],
785 'domain': "[('state','=','draft')]",
788 if self.pool.get('product.product'):
789 id = objid._get_id(cr, uid, 'document', 'dir_product')
790 id = objid.browse(cr, uid, id, context=context).res_id
791 mid = self.pool.get('ir.model').search(cr, uid, [('model','=','product.product')])
792 obj.write(cr, uid, [id], {
794 'ressource_type_id': mid[0],
797 if self.pool.get('stock.location'):
798 aid = objid._get_id(cr, uid, 'stock', 'report_product_history')
799 aid = objid.browse(cr, uid, aid, context=context).res_id
801 self.pool.get('document.directory.content').create(cr, uid, {
802 'name': "Product Stock",
803 'suffix': "_stock_forecast",
810 if self.pool.get('account.analytic.account'):
811 id = objid._get_id(cr, uid, 'document', 'dir_project')
812 id = objid.browse(cr, uid, id, context=context).res_id
813 mid = self.pool.get('ir.model').search(cr, uid, [('model','=','account.analytic.account')])
814 obj.write(cr, uid, [id], {
816 'ressource_type_id': mid[0],
821 aid = objid._get_id(cr, uid, 'document', 'action_document_browse')
822 aid = objid.browse(cr, uid, aid, context=context).res_id
823 self.pool.get('ir.actions.url').write(cr, uid, [aid], {'url': 'ftp://'+(conf.host or 'localhost')+':'+conf.port+'/'})
825 config['ftp_server_address'] = conf.host
826 config['ftp_server_port'] = conf.port
832 'res_model': 'ir.actions.configuration.wizard',
833 'type': 'ir.actions.act_window',
837 document_configuration_wizard()
840 class document_configuration_ftpserver_wizard(osv.osv_memory):
841 _name='document.configuration.ftp_server.wizard'
842 _rec_name = 'Configure FTP server address'
844 'host': fields.char('Server Address', size=64, help="Put here the server address or IP. " \
845 "Keep localhost if you don't know what to write.", required=True),
846 'suggested_host': fields.char('Suggested Server Address', size=64, readonly=True, help="This is the guessed server address"),
847 'port': fields.char('Server Port', size=5, help="Put here the server port. " \
848 "Keep 8021 if you don't know what to write.", required=True)
851 def get_ftp_server_address(self, cr, uid, context=None):
852 default_address = tools.misc.detect_ip_addr()
853 return config.get('ftp_server_address', default_address)
855 def get_ftp_server_port(self, cr, uid, context=None):
856 return config.get('ftp_server_port', '8021')
858 def get_suggested_ftp_server_address(self, cr, uid, context=None):
859 return tools.misc.detect_ip_addr()
862 'host': get_ftp_server_address,
863 'suggested_host': get_suggested_ftp_server_address,
864 'port': get_ftp_server_port,
867 def action_cancel(self,cr,uid,ids,conect=None):
870 def action_config(self, cr, uid, ids, context=None):
871 conf = self.browse(cr, uid, ids[0], context)
872 obj = self.pool.get('ir.model.data')
874 aid = obj._get_id(cr, uid, 'document', 'action_document_browse')
875 aid = obj.browse(cr, uid, aid, context=context).res_id
876 self.pool.get('ir.actions.url').write(cr, uid, [aid], {'url': 'ftp://'+(conf.host or 'localhost')+':'+conf.port+'/'})
878 config['ftp_server_address'] = conf.host
879 config['ftp_server_port'] = conf.port
884 document_configuration_ftpserver_wizard()