#
##############################################################################
-# import base64
-# import StringIO
-from osv import osv, fields
-from osv.orm import except_orm
# import urlparse
import pooler
from tools.safe_eval import safe_eval
import errno
-import os
+# import os
import time
+import logging
+
+from StringIO import StringIO
#
# An object that represent an uri
# root: if we are at the first directory of a ressource
#
+logger = logging.getLogger('doc2.nodes')
+
+def _str2time(cre):
+ """ Convert a string with time representation (from db) into time (float)
+
+ Note: a place to fix if datetime is used in db.
+ """
+ if not cre:
+ return time.time()
+ frac = 0.0
+ if isinstance(cre, basestring) and '.' in cre:
+ fdot = cre.find('.')
+ frac = float(cre[fdot:])
+ cre = cre[:fdot]
+ return time.mktime(time.strptime(cre,'%Y-%m-%d %H:%M:%S')) + frac
+
def get_node_context(cr, uid, context):
return node_context(cr, uid, context)
""" This is the root node, representing access to some particular
context """
cached_roots = {}
+ node_file_class = None
def __init__(self, cr, uid, context=None):
self.dbname = cr.dbname
self.uid = uid
self.context = context
self._dirobj = pooler.get_pool(cr.dbname).get('document.directory')
+ self.node_file_class = node_file
+ self.extra_ctx = {} # Extra keys for context, that do _not_ trigger inequality
assert self._dirobj
+ self._dirobj._prepare_context(cr, uid, self, context=context)
self.rootdir = False #self._dirobj._get_root_directory(cr,uid,context)
def __eq__(self, other):
"""Create (or locate) a node for a directory
@param dbro a browse object of document.directory
"""
- fullpath = self._dirobj.get_full_path(cr, self.uid, dbro.id, self.context)
- if dbro.type == 'directory':
- return node_dir(fullpath, None ,self, dbro)
- elif dbro.type == 'ressource':
- assert dbro.ressource_parent_type_id == False
- return node_res_dir(fullpath, None, self, dbro)
- else:
- raise ValueError("dir node for %s type", dbro.type)
+
+ fullpath = dbro.get_full_path(context=self.context)
+ klass = dbro.get_node_class(dbro, context=self.context)
+ return klass(fullpath, None ,self, dbro)
def get_file_node(self, cr, fbro):
""" Create or locate a node for a static file
if fbro.parent_id:
parent = self.get_dir_node(cr, fbro.parent_id)
- return node_file(fbro.name,parent,self,fbro)
+ return self.node_file_class(fbro.name, parent, self, fbro)
class node_descriptor(object):
Nodes have attributes which contain usual file properties
"""
our_type = 'baseclass'
+ DAV_PROPS = None
+ DAV_M_NS = None
def __init__(self, path, parent, context):
assert isinstance(context,node_context)
return False
def get_data(self,cr):
- raise IOError(errno.EINVAL, 'no data for %s' % self.type)
+ raise TypeError('no data for %s'% self.type)
def open_data(self, cr, mode):
""" Open a node_descriptor object for this node.
For this class, there is no data, so no implementation. Each
child class that has data should override this.
"""
- raise IOError(errno.EINVAL, 'no data for %s' % self.type)
+ raise TypeError('no data for %s' % self.type)
def _get_storage(self,cr):
raise RuntimeError("no storage for base class")
see. http://tools.ietf.org/html/rfc2616#section-13.3.3 """
return self._get_ttag(cr) + ':' + self._get_wtag(cr)
- def _get_wtag(self,cr):
+ def _get_wtag(self, cr):
""" Return the modification time as a unique, compact string """
- if self.write_date:
- wtime = time.mktime(time.strptime(self.write_date,'%Y-%m-%d %H:%M:%S'))
- else: wtime = time.time()
- return str(wtime)
+ return str(_str2time(self.write_date))
def _get_ttag(self,cr):
""" Get a unique tag for this type/id of object.
Must be overriden, so that each node is uniquely identified.
"""
print "node_class.get_ttag()",self
- raise RuntimeError("get_etag stub()")
+ raise NotImplementedError("get_etag stub()")
def get_dav_props(self, cr):
""" If this class has special behaviour for GroupDAV etc, export
its capabilities """
- return {}
+ # This fn is placed here rather than WebDAV, because we want the
+ # baseclass methods to apply to all node subclasses
+ return self.DAV_PROPS or {}
def match_dav_eprop(self, cr, match, ns, prop):
res = self.get_dav_eprop(cr, ns, prop)
return False
def get_dav_eprop(self, cr, ns, prop):
+ if not self.DAV_M_NS:
+ return None
+
+ if self.DAV_M_NS.has_key(ns):
+ prefix = self.DAV_M_NS[ns]
+ else:
+ logger.debug('No namespace: %s ("%s")',ns, prop)
+ return None
+
+ mname = prefix + "_" + prop.replace('-','_')
+
+ if not hasattr(self, mname):
+ return None
+
+ try:
+ m = getattr(self, mname)
+ r = m(cr)
+ return r
+ except AttributeError:
+ logger.debug('Property %s not supported' % prop, exc_info=True)
return None
+ def get_dav_resourcetype(self, cr):
+ """ Get the DAV resource type.
+
+ Is here because some nodes may exhibit special behaviour, like
+ CalDAV/GroupDAV collections
+ """
+ raise NotImplementedError
+
def move_to(self, cr, ndir_node, new_name=False, fil_obj=None, ndir_obj=None, in_write=False):
""" Move this node to a new parent directory.
@param ndir_node the collection that this node should be moved under
Move operations, as instructed from APIs (eg. request from DAV) could
use this function.
"""
- raise NotImplementedError
+ raise NotImplementedError(repr(self))
+
+ def create_child(self, cr, path, data=None):
+ """ Create a regular file under this node
+ """
+ raise NotImplementedError(repr(self))
+
+ def create_child_collection(self, cr, objname):
+ """ Create a child collection (directory) under self
+ """
+ raise NotImplementedError(repr(self))
def rm(self, cr):
- raise RuntimeError("Not Implemented")
+ raise NotImplementedError(repr(self))
def rmcol(self, cr):
- raise RuntimeError("Not Implemented")
+ raise NotImplementedError(repr(self))
def get_domain(self, cr, filters):
+ # TODO Document
return []
def check_perms(self, perms):
if not domain:
domain = []
- where2 = where + domain + [('type', '=', 'directory')]
+ where2 = where + domain + ['|', ('type', '=', 'directory'), \
+ '&', ('type', '=', 'ressource'), ('ressource_parent_type_id','=',False)]
ids = dirobj.search(cr, uid, where2, context=ctx)
res = []
for dirr in dirobj.browse(cr, uid, ids, context=ctx):
- res.append(node_dir(dirr.name, self, self.context,dirr))
-
- where2 = where + domain + [('type', '=', 'ressource'), ('ressource_parent_type_id','=',False)]
- ids = dirobj.search(cr, uid, where2, context=ctx)
- for dirr in dirobj.browse(cr, uid, ids, context=ctx):
- res.append(node_res_dir(dirr.name, self, self.context, dirr))
+ klass = dirr.get_node_class(dirr, context=ctx)
+ res.append(klass(dirr.name, self, self.context,dirr))
fil_obj = dirobj.pool.get('ir.attachment')
ids = fil_obj.search(cr, uid, where, context=ctx)
if ids:
for fil in fil_obj.browse(cr, uid, ids, context=ctx):
- res.append(node_file(fil.name, self, self.context, fil))
+ klass = self.context.node_file_class
+ res.append(klass(fil.name, self, self.context, fil))
return res
def _file_get(self,cr, nodename=False):
for dfld in dirr.dctx_ids:
try:
self.dctx['dctx_' + dfld.field] = safe_eval(dfld.expr,dc2)
- except Exception,e:
+ except Exception:
print "Cannot eval %s" % dfld.expr
print e
pass
return dirobj.create(cr, uid, val)
- def create_child(self, cr, path, data):
+ def create_child(self, cr, path, data=None):
""" API function to create a child file object and node
Return the node_* created
"""
fnode.set_data(cr, data, fil)
return fnode
- def get_etag(self, cr):
- """ Get a tag, unique per object + modification.
-
- see. http://tools.ietf.org/html/rfc2616#section-13.3.3 """
- return self._get_ttag(cr) + ':' + self._get_wtag(cr)
-
- def _get_wtag(self, cr):
- """ Return the modification time as a unique, compact string """
- if self.write_date:
- wtime = time.mktime(time.strptime(self.write_date, '%Y-%m-%d %H:%M:%S'))
- else: wtime = time.time()
- return str(wtime)
-
def _get_ttag(self,cr):
return 'dir-%d' % self.dir_id
if (not self.parent) and ndir_node:
if not dbro.parent_id:
raise IOError(errno.EPERM, "Cannot move the root directory!")
- self.parent = self.context.get_dir_node(cr, dbro.parent_id.id)
+ self.parent = self.context.get_dir_node(cr, dbro.parent_id)
assert self.parent
-
- # TODO: test if parent is writable.
if self.parent != ndir_node:
logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
ret = {}
if new_name and (new_name != dbro.name):
+ if ndir_node.child(cr, new_name):
+ raise IOError(errno.EEXIST, "Destination path already exists")
ret['name'] = new_name
del dbro
if not in_write:
# We have to update the data ourselves
if ret:
- dir_obj.write(cr, self.context.uid, [self.dir_id,], ret, self.context.context)
+ ctx = self.context.context.copy()
+ ctx['__from_node'] = True
+ dir_obj.write(cr, self.context.uid, [self.dir_id,], ret, ctx)
ret = True
return ret
class node_res_dir(node_class):
- """ A special sibling to node_dir, which does only contain dynamically
+ """ A folder containing dynamic folders
+ A special sibling to node_dir, which does only contain dynamically
created folders foreach resource in the foreign model.
All folders should be of type node_res_obj and merely behave like
node_dirs (with limited domain).
"""
our_type = 'collection'
+ res_obj_class = None
def __init__(self, path, parent, context, dirr, dctx=None ):
super(node_res_dir,self).__init__(path, parent, context)
self.dir_id = dirr.id
self.uidperms = dirr.get_dir_permissions()
self.res_model = dirr.ressource_type_id and dirr.ressource_type_id.model or False
self.resm_id = dirr.ressource_id
+ self.res_find_all = dirr.resource_find_all
self.namefield = dirr.resource_field.name or 'name'
self.displayname = dirr.name
# Important: the domain is evaluated using the *parent* dctx!
continue
# Yes! we can't do better but skip nameless records.
- res.append(node_res_obj(name, self.dir_id, self, self.context, self.res_model, bo))
+ res.append(self.res_obj_class(name, self.dir_id, self, self.context, self.res_model, bo))
return res
def _get_ttag(self,cr):
return 'rdir-%d' % self.dir_id
class node_res_obj(node_class):
- """ A special sibling to node_dir, which does only contain dynamically
+ """ A dynamically created folder.
+ A special sibling to node_dir, which does only contain dynamically
created folders foreach resource in the foreign model.
All folders should be of type node_res_obj and merely behave like
node_dirs (with limited domain).
self.write_date = parent.write_date
self.content_length = 0
self.unixperms = 040750
- self.uidperms = parent.uidperms & 0x15
+ self.uidperms = parent.uidperms & 15
self.uuser = parent.uuser
self.ugroup = parent.ugroup
self.res_model = res_model
self.domain = parent.domain
self.displayname = path
self.dctx_dict = parent.dctx_dict
+ self.res_find_all = parent.res_find_all
if res_bo:
self.res_id = res_bo.id
- dc2 = self.context.context
+ dc2 = self.context.context.copy()
dc2.update(self.dctx)
dc2['res_model'] = res_model
dc2['res_id'] = res_bo.id
for fld,expr in self.dctx_dict.items():
try:
self.dctx[fld] = safe_eval(expr, dc2)
- except Exception,e:
+ except Exception:
print "Cannot eval %s for %s" % (expr, fld)
print e
pass
return False
if self.domain != other.domain:
return False
+ if self.res_find_all != other.res_find_all:
+ return False
if self.dctx != other.dctx:
return False
return self.dir_id == other.dir_id
return res
def get_dav_props(self, cr):
+ # Deprecated! (but document_ics must be cleaned, first)
res = {}
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
return res
def get_dav_eprop(self, cr, ns, prop):
+ # Deprecated!
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
- print "Who asked for %s:%s?" % (ns, prop)
+ logger.warning("Who asked for %s:%s?" % (ns, prop))
return None
- res = {}
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
ctx = self.context.context.copy()
where = [('directory_id','=',self.dir_id) ]
ids = cntobj.search(cr,uid,where,context=ctx)
for content in cntobj.browse(cr, uid, ids, context=ctx):
+ # TODO: remove relic of GroupDAV
if content.extension == '.ics': # FIXME: call the content class!
return ('vevent-collection','http://groupdav.org/')
return None
res_name = getattr(bo, namefield)
if not res_name:
continue
- res.append(node_res_obj(res_name, self.dir_id, self, self.context, self.res_model, res_bo = bo))
+ # TODO Revise
+ klass = directory.get_node_class(directory, dynamic=True, context=ctx)
+ res.append(klass(res_name, self.dir_id, self, self.context, self.res_model, res_bo = bo))
where2 = where + [('parent_id','=',self.dir_id) ]
ids = dirobj.search(cr, uid, where2, context=ctx)
for dirr in dirobj.browse(cr, uid, ids, context=ctx):
if dirr.type == 'directory':
- res.append(node_res_obj(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = None, res_id = self.res_id))
+ klass = dirr.get_node_class(dirr, dynamic=True, context=ctx)
+ res.append(klass(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = None, res_id = self.res_id))
elif dirr.type == 'ressource':
# child resources can be controlled by properly set dctx
- res.append(node_res_dir(dirr.name,self,self.context, dirr, {'active_id': self.res_id}))
-
-
-
+ klass = dirr.get_node_class(dirr, context=ctx)
+ res.append(klass(dirr.name,self,self.context, dirr, {'active_id': self.res_id}))
fil_obj = dirobj.pool.get('ir.attachment')
+ if self.res_find_all:
+ where2 = where
where3 = where2 + [('res_model', '=', self.res_model), ('res_id','=',self.res_id)]
# print "where clause for dir_obj", where2
ids = fil_obj.search(cr, uid, where3, context=ctx)
if ids:
for fil in fil_obj.browse(cr, uid, ids, context=ctx):
- res.append(node_file(fil.name, self, self.context, fil))
+ klass = self.context.node_file_class
+ res.append(klass(fil.name, self, self.context, fil))
# Get Child Ressource Directories
dirids = dirids + dirobj.search(cr,uid, where5)
for dirr in dirobj.browse(cr, uid, dirids, context=ctx):
if dirr.type == 'directory' and not dirr.parent_id:
- res.append(node_res_obj(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = None, res_id = self.res_id))
+ klass = dirr.get_node_class(dirr, dynamic=True, context=ctx)
+ res.append(klass(dirr.name, dirr.id, self, self.context, self.res_model, res_bo = None, res_id = self.res_id))
if dirr.type == 'ressource':
- res.append(node_res_dir(dirr.name, self, self.context, dirr, {'active_id': self.res_id}))
+ klass = dirr.get_node_class(dirr, context=ctx)
+ res.append(klass(dirr.name, self, self.context, dirr, {'active_id': self.res_id}))
return res
def create_child_collection(self, cr, objname):
return dirobj.create(cr, uid, val)
- def create_child(self, cr, path, data):
+ def create_child(self, cr, path, data=None):
""" API function to create a child file object and node
Return the node_* created
"""
val = {
'name': path,
'datas_fname': path,
- 'parent_id': self.dir_id,
'res_model': self.res_model,
'res_id': self.res_id,
# Datas are not set here
}
+ if not self.res_find_all:
+ val['parent_id'] = self.dir_id
fil_id = fil_obj.create(cr, uid, val, context=ctx)
fil = fil_obj.browse(cr, uid, fil_id, context=ctx)
- fnode = node_file(path, self, self.context, fil)
+ klass = self.context.node_file_class
+ fnode = klass(path, self, self.context, fil)
if data is not None:
fnode.set_data(cr, data, fil)
return fnode
def _get_ttag(self,cr):
return 'rodir-%d-%d' % (self.dir_id, self.res_id)
+node_res_dir.res_obj_class = node_res_obj
+
class node_file(node_class):
our_type = 'file'
def __init__(self, path, parent, context, fil):
return 'file-%d' % self.file_id
def move_to(self, cr, ndir_node, new_name=False, fil_obj=None, ndir_obj=None, in_write=False):
- if ndir_node.context != self.context:
+ if ndir_node and ndir_node.context != self.context:
raise NotImplementedError("Cannot move files between contexts")
if (not self.check_perms(8)) and ndir_node.check_perms(2):
if (not self.parent):
# there *must* be a parent node for this one
- self.parent = self.context.get_dir_node(cr, dbro.parent_id.id)
+ self.parent = self.context.get_dir_node(cr, dbro.parent_id)
assert self.parent
ret = {}
- if self.parent != ndir_node:
+ if ndir_node and self.parent != ndir_node:
if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)):
logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move files between dynamic folders')
if not in_write:
# We have to update the data ourselves
if ret:
- doc_obj.write(cr, self.context.uid, [self.file_id,], ret, self.context.context)
+ ctx = self.context.context.copy()
+ ctx['__from_node'] = True
+ doc_obj.write(cr, self.context.uid, [self.file_id,], ret, ctx )
ret = True
return ret
self.dctx.update(dctx)
self.act_id = act_id
- def open(self, cr, mode=False):
- raise DeprecationWarning()
-
def fill_fields(self, cr, dctx = None):
""" Try to read the object and fill missing fields, like mimetype,
dates etc.
self.content_length = len(data)
return data
+ def open_data(self, cr, mode):
+ if mode.endswith('b'):
+ mode = mode[:-1]
+ if mode in ('r', 'w'):
+ cperms = mode[:1]
+ elif mode in ('r+', 'w+'):
+ cperms = 'rw'
+ else:
+ raise IOError(errno.EINVAL, "Cannot open at mode %s" % mode)
+
+ if not self.check_perms(cperms):
+ raise IOError(errno.EPERM, "Permission denied")
+
+ ctx = self.context.context.copy()
+ ctx.update(self.dctx)
+
+ return nodefd_content(self, cr, mode, ctx)
+
def get_data_len(self, cr, fil_obj = None):
+ # FIXME : here, we actually generate the content twice!!
+ # we should have cached the generated content, but it is
+ # not advisable to do keep it in memory, until we have a cache
+ # expiration logic.
if not self.content_length:
self.get_data(cr,fil_obj)
return self.content_length
def _get_ttag(self,cr):
return 'cnt-%d%s' % (self.cnt_id,(self.act_id and ('-' + str(self.act_id))) or '')
+
+ def get_dav_resourcetype(self, cr):
+ return ''
+
+class nodefd_content(StringIO, node_descriptor):
+ """ A descriptor to content nodes
+ """
+ def __init__(self, parent, cr, mode, ctx):
+ node_descriptor.__init__(self, parent)
+ self._context=ctx
+
+ if mode in ('r', 'r+'):
+ cntobj = parent.context._dirobj.pool.get('document.directory.content')
+ data = cntobj.process_read(cr, parent.context.uid, parent, ctx)
+ if data:
+ parent.content_length = len(data)
+ StringIO.__init__(self, data)
+ elif mode in ('w', 'w+'):
+ StringIO.__init__(self, None)
+ # at write, we start at 0 (= overwrite), but have the original
+ # data available, in case of a seek()
+ elif mode == 'a':
+ StringIO.__init__(self, None)
+ else:
+ logging.getLogger('document.content').error("Incorrect mode %s specified", mode)
+ raise IOError(errno.EINVAL, "Invalid file mode")
+ self.mode = mode
+
+ def close(self):
+ # we now open a *separate* cursor, to update the data.
+ # FIXME: this may be improved, for concurrency handling
+ if self.mode == 'r':
+ StringIO.close(self)
+ return
+
+ par = self._get_parent()
+ uid = par.context.uid
+ cr = pooler.get_db(par.context.dbname).cursor()
+ try:
+ if self.mode in ('w', 'w+', 'r+'):
+ data = self.getvalue()
+ cntobj = par.context._dirobj.pool.get('document.directory.content')
+ cntobj.process_write(cr, uid, par, data, par.context.context)
+ elif self.mode == 'a':
+ raise NotImplementedError
+ cr.commit()
+ except Exception:
+ logging.getLogger('document.content').exception('Cannot update db content #%d for close:', par.cnt_id)
+ raise
+ finally:
+ cr.close()
+ StringIO.close(self)
+
+#eof
\ No newline at end of file