1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
24 from osv import osv, fields
33 from psycopg2 import Binary
34 #from tools import config
36 from tools.translate import _
39 DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config['root_path'], 'filestore'))
41 class document_file(osv.osv):
42 _inherit = 'ir.attachment'
43 _rec_name = 'datas_fname'
44 def _get_filestore(self, cr):
45 return os.path.join(DMS_ROOT_PATH, cr.dbname)
47 def _data_get(self, cr, uid, ids, name, arg, context):
48 fbrl = self.browse(cr, uid, ids, context=context)
49 nctx = nodes.get_node_context(cr, uid, context)
51 bin_size = context.get('bin_size', False)
53 fnode = nodes.node_file(None, None, nctx, fbro)
55 data = fnode.get_data(cr, fbro)
56 result[fbro.id] = base64.encodestring(data or '')
58 result[fbro.id] = fnode.get_data_len(cr, fbro)
63 # This code can be improved
65 def _data_set(self, cr, uid, id, name, value, arg, context):
68 fbro = self.browse(cr, uid, id, context=context)
69 nctx = nodes.get_node_context(cr, uid, context)
70 fnode = nodes.node_file(None, None, nctx, fbro)
71 res = fnode.set_data(cr, base64.decodestring(value), fbro)
75 'user_id': fields.many2one('res.users', 'Owner', select=1),
76 'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
77 # the directory id now is mandatory. It can still be computed automatically.
78 'parent_id': fields.many2one('document.directory', 'Directory', select=1, required=True),
79 'file_size': fields.integer('File Size', required=True),
80 'file_type': fields.char('Content Type', size=64),
81 # If ir.attachment contained any data before document is installed, preserve
82 # the data, don't drop the column!
83 'db_datas': fields.binary('Data', oldname='datas'),
84 'index_content': fields.text('Indexed Content'),
85 'write_date': fields.datetime('Date Modified', readonly=True),
86 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
87 'create_date': fields.datetime('Date Created', readonly=True),
88 'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
89 'store_method': fields.selection([('db', 'Database'), ('fs', 'Filesystem'), ('link', 'Link')], "Storing Method"),
90 'datas': fields.function(_data_get, method=True, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True),
91 'store_fname': fields.char('Stored Filename', size=200),
92 'res_model': fields.char('Attached Model', size=64), #res_model
93 'res_id': fields.integer('Attached ID'), #res_id
94 'partner_id':fields.many2one('res.partner', 'Partner', select=1),
95 'title': fields.char('Resource Title', size=64),
98 def __get_def_directory(self, cr, uid, context=None):
99 dirobj = self.pool.get('document.directory')
100 return dirobj._get_root_directory(cr, uid, context)
103 'user_id': lambda self, cr, uid, ctx:uid,
104 'file_size': lambda self, cr, uid, ctx:0,
105 'store_method': lambda *args: 'db',
106 'parent_id': __get_def_directory
109 ('filename_uniq', 'unique (name,parent_id,res_id,res_model)', 'The file name must be unique !')
111 def _check_duplication(self, cr, uid, vals, ids=[], op='create'):
112 name = vals.get('name', False)
113 parent_id = vals.get('parent_id', False)
114 res_model = vals.get('res_model', False)
115 res_id = vals.get('res_id', 0)
117 for file in self.browse(cr, uid, ids):
121 parent_id = file.parent_id and file.parent_id.id or False
123 res_model = file.res_model and file.res_model or False
125 res_id = file.res_id and file.res_id or 0
126 res = self.search(cr, uid, [('id', '<>', file.id), ('name', '=', name), ('parent_id', '=', parent_id), ('res_model', '=', res_model), ('res_id', '=', res_id)])
130 res = self.search(cr, uid, [('name', '=', name), ('parent_id', '=', parent_id), ('res_id', '=', res_id), ('res_model', '=', res_model)])
135 def copy(self, cr, uid, id, default=None, context=None):
138 if 'name' not in default:
139 name = self.read(cr, uid, [id])[0]['name']
140 default.update({'name': name + " (copy)"})
141 return super(document_file, self).copy(cr, uid, id, default, context)
143 def write(self, cr, uid, ids, vals, context=None):
144 if not isinstance(ids, list):
146 res = self.search(cr, uid, [('id', 'in', ids)])
149 if not self._check_duplication(cr, uid, vals, ids, 'write'):
150 raise osv.except_osv(_('ValidateError'), _('File name must be unique!'))
151 result = super(document_file, self).write(cr, uid, ids, vals, context=context)
155 def create(self, cr, uid, vals, context=None):
158 vals['title'] = vals['name']
159 vals['parent_id'] = context.get('parent_id', False) or vals.get('parent_id', False)
160 if not vals['parent_id']:
161 vals['parent_id'] = self.pool.get('document.directory')._get_root_directory(cr,uid, context)
162 if not vals.get('res_id', False) and context.get('default_res_id', False):
163 vals['res_id'] = context.get('default_res_id', False)
164 if not vals.get('res_model', False) and context.get('default_res_model', False):
165 vals['res_model'] = context.get('default_res_model', False)
166 if vals.get('res_id', False) and vals.get('res_model', False):
167 obj_model = self.pool.get(vals['res_model'])
168 result = obj_model.read(cr, uid, [vals['res_id']], context=context)
171 if obj.get('name', False):
172 vals['title'] = (obj.get('name', ''))[:60]
173 if obj_model._name == 'res.partner':
174 vals['partner_id'] = obj['id']
175 elif obj.get('address_id', False):
176 if isinstance(obj['address_id'], tuple) or isinstance(obj['address_id'], list):
177 address_id = obj['address_id'][0]
179 address_id = obj['address_id']
180 address = self.pool.get('res.partner.address').read(cr, uid, [address_id], context=context)
182 vals['partner_id'] = address[0]['partner_id'][0] or False
183 elif obj.get('partner_id', False):
184 if isinstance(obj['partner_id'], tuple) or isinstance(obj['partner_id'], list):
185 vals['partner_id'] = obj['partner_id'][0]
187 vals['partner_id'] = obj['partner_id']
190 if vals.get('link', False) :
192 datas = base64.encodestring(urllib.urlopen(vals['link']).read())
194 datas = vals.get('datas', False)
196 vals['file_size'] = datas and len(datas) or 0
197 if not self._check_duplication(cr, uid, vals):
198 raise osv.except_osv(_('ValidateError'), _('File name must be unique!'))
199 result = super(document_file, self).create(cr, uid, vals, context)
203 def unlink(self, cr, uid, ids, context={}):
204 stor = self.pool.get('document.storage')
206 # We have to do the unlink in 2 stages: prepare a list of actual
207 # files to be unlinked, update the db (safer to do first, can be
208 # rolled back) and then unlink the files. The list wouldn't exist
209 # after we discard the objects
211 for f in self.browse(cr, uid, ids, context):
212 # TODO: update the node cache
213 r = stor.prepare_unlink(cr, uid, f.parent_id.storage_id, f)
216 res = super(document_file, self).unlink(cr, uid, ids, context)
217 stor.do_unlink(cr, uid, unres)