[MERGE] project: copy attachments when delegate task
[odoo/odoo.git] / addons / project / project.py
index 9cb367e..0046dea 100644 (file)
@@ -114,6 +114,7 @@ class project(osv.osv):
             if task.project_id: result[task.project_id.id] = True
         return result.keys()
 
+    #dead code
     def _get_project_work(self, cr, uid, ids, context=None):
         result = {}
         for work in self.pool.get('project.task.work').browse(cr, uid, ids, context=context):
@@ -360,7 +361,7 @@ def Project():
     working_days = %s
     resource = %s
 """       % (
-            project.id, 
+            project.id,
             project.date_start, working_days,
             '|'.join(['User_'+str(x) for x in puids])
         )
@@ -424,21 +425,55 @@ class task(osv.osv):
     _log_create = True
     _date_name = "date_start"
 
-    def _read_group_type_id(self, cr, uid, ids, domain, context=None):
-        context = context or {}
+
+    def _resolve_project_id_from_context(self, cr, uid, context=None):
+        """Return ID of project based on the value of 'project_id'
+           context key, or None if it cannot be resolved to a single project.
+        """
+        if context is None: context = {}
+        if type(context.get('project_id')) in (int, long):
+            project_id = context['project_id']
+            return project_id
+        if isinstance(context.get('project_id'), basestring):
+            project_name = context['project_id']
+            project_ids = self.pool.get('project.project').name_search(cr, uid, name=project_name)
+            if len(project_ids) == 1:
+                return project_ids[0][0]
+
+    def _read_group_type_id(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None):
         stage_obj = self.pool.get('project.task.type')
-        stage_ids = stage_obj.search(cr, uid, ['|',('id','in',ids)] + [('project_default','=',1)], context=context)
-        return stage_obj.name_get(cr, uid, stage_ids, context=context)
+        project_id = self._resolve_project_id_from_context(cr, uid, context=context)
+        order = stage_obj._order
+        access_rights_uid = access_rights_uid or uid
+        if read_group_order == 'type_id desc':
+            # lame way to allow reverting search, should just work in the trivial case
+            order = '%s desc' % order
+        if project_id:
+            domain = ['|', ('id','in',ids), ('project_ids','in',project_id)]
+        else:
+            domain = ['|', ('id','in',ids), ('project_default','=',1)]
+        stage_ids = stage_obj._search(cr, uid, domain, order=order, access_rights_uid=access_rights_uid, context=context)
+        result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context)
+        # restore order of the search
+        result.sort(lambda x,y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0])))
+        return result
 
-    def _read_group_user_id(self, cr, uid, ids, domain, context={}):
-        context = context or {}
-        if type(context.get('project_id', None)) not in (int, long):
-            return None
-        proj = self.pool.get('project.project').browse(cr, uid, context['project_id'], context=context)
-        ids += map(lambda x: x.id, proj.members)
-        stage_obj = self.pool.get('res.users')
-        stage_ids = stage_obj.search(cr, uid, [('id','in',ids)], context=context)
-        return stage_obj.name_get(cr, uid, ids, context=context)
+    def _read_group_user_id(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None):
+        res_users = self.pool.get('res.users')
+        project_id = self._resolve_project_id_from_context(cr, uid, context=context)
+        access_rights_uid = access_rights_uid or uid
+        if project_id:
+            ids += self.pool.get('project.project').read(cr, access_rights_uid, project_id, ['members'], context=context)['members']
+            order = res_users._order
+            # lame way to allow reverting search, should just work in the trivial case
+            if read_group_order == 'user_id desc':
+                order = '%s desc' % order
+            # de-duplicate and apply search order
+            ids = res_users._search(cr, uid, [('id','in',ids)], order=order, access_rights_uid=access_rights_uid, context=context)
+        result = res_users.name_get(cr, access_rights_uid, ids, context=context)
+        # restore order of the search
+        result.sort(lambda x,y: cmp(ids.index(x[0]), ids.index(y[0])))
+        return result
 
     _group_by_full = {
         'type_id': _read_group_type_id,
@@ -560,7 +595,12 @@ class task(osv.osv):
         'state': fields.selection([('draft', 'New'),('open', 'In Progress'),('pending', 'Pending'), ('done', 'Done'), ('cancelled', 'Cancelled')], 'State', readonly=True, required=True,
                                   help='If the task is created the state is \'Draft\'.\n If the task is started, the state becomes \'In Progress\'.\n If review is needed the task is in \'Pending\' state.\
                                   \n If the task is over, the states is set to \'Done\'.'),
-        'kanban_state': fields.selection([('blocked', 'Blocked'),('normal', 'Normal'),('done', 'Done')], 'Kanban State', readonly=True, required=False),
+        'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready To Pull')], 'Kanban State',
+                                         help="A task's kanban state indicates special situations affecting it:\n"
+                                              " * Normal is the default situation\n"
+                                              " * Blocked indicates something is preventing the progress of this task\n"
+                                              " * Ready To Pull indicates the task is ready to be pulled to the next stage",
+                                         readonly=True, required=False),
         'create_date': fields.datetime('Create Date', readonly=True,select=True),
         'date_start': fields.datetime('Starting Date',select=True),
         'date_end': fields.datetime('Ending Date',select=True),
@@ -840,14 +880,24 @@ class task(osv.osv):
         self.write(cr, uid, ids, {'state': 'draft'}, context=context)
         return True
 
+
+    def _delegate_task_attachments(self, cr, uid, task_id, delegated_task_id, context=None):
+        attachment = self.pool.get('ir.attachment')
+        attachment_ids = attachment.search(cr, uid, [('res_model', '=', self._name), ('res_id', '=', task_id)], context=context)
+        new_attachment_ids = []
+        for attachment_id in attachment_ids:
+            new_attachment_ids.append(attachment.copy(cr, uid, attachment_id, default={'res_id': delegated_task_id}, context=context))
+        return new_attachment_ids
+        
+
     def do_delegate(self, cr, uid, ids, delegate_data={}, context=None):
         """
         Delegate Task to another users.
         """
         assert delegate_data['user_id'], _("Delegated User should be specified")
-        delegrated_tasks = {}
+        delegated_tasks = {}
         for task in self.browse(cr, uid, ids, context=context):
-            delegrated_task_id = self.copy(cr, uid, task.id, {
+            delegated_task_id = self.copy(cr, uid, task.id, {
                 'name': delegate_data['name'],
                 'project_id': delegate_data['project_id'] and delegate_data['project_id'][0] or False,
                 'user_id': delegate_data['user_id'] and delegate_data['user_id'][0] or False,
@@ -858,6 +908,7 @@ class task(osv.osv):
                 'child_ids': [],
                 'work_ids': []
             }, context=context)
+            self._delegate_task_attachments(cr, uid, task.id, delegated_task_id, context=context)
             newname = delegate_data['prefix'] or ''
             task.write({
                 'remaining_hours': delegate_data['planned_hours_me'],
@@ -871,8 +922,8 @@ class task(osv.osv):
             
             message = _("The task '%s' has been delegated to %s.") % (delegate_data['name'], delegate_data['user_id'][1])
             self.log(cr, uid, task.id, message)
-            delegrated_tasks[task.id] = delegrated_task_id
-        return delegrated_tasks
+            delegated_tasks[task.id] = delegated_task_id
+        return delegated_tasks
 
     def do_pending(self, cr, uid, ids, context={}):
         self.write(cr, uid, ids, {'state': 'pending'}, context=context)
@@ -881,21 +932,24 @@ class task(osv.osv):
             self.log(cr, uid, id, message)
         return True
 
-    def set_remaining_time_1(self, cr, uid, ids, context=None):
-        self.write(cr, uid, ids, {'remaining_hours': 1.0}, context=context)
+    def set_remaining_time(self, cr, uid, ids, remaining_time=1.0, context=None):
+        for task in self.browse(cr, uid, ids, context=context):
+            if (task.state=='draft') or (task.planned_hours==0.0):
+                self.write(cr, uid, [task.id], {'planned_hours': remaining_time}, context=context)
+        self.write(cr, uid, ids, {'remaining_hours': remaining_time}, context=context)
         return True
 
+    def set_remaining_time_1(self, cr, uid, ids, context=None):
+        return self.set_remaining_time(cr, uid, ids, 1.0, context)
+
     def set_remaining_time_2(self, cr, uid, ids, context=None):
-        self.write(cr, uid, ids, {'remaining_hours': 2.0}, context=context)
-        return True
+        return self.set_remaining_time(cr, uid, ids, 2.0, context)
 
     def set_remaining_time_5(self, cr, uid, ids, context=None):
-        self.write(cr, uid, ids, {'remaining_hours': 5.0}, context=context)
-        return True
+        return self.set_remaining_time(cr, uid, ids, 5.0, context)
 
     def set_remaining_time_10(self, cr, uid, ids, context=None):
-        self.write(cr, uid, ids, {'remaining_hours': 10.0}, context=context)
-        return True
+        return self.set_remaining_time(cr, uid, ids, 10.0, context)
 
     def set_kanban_state_blocked(self, cr, uid, ids, context=None):
         self.write(cr, uid, ids, {'kanban_state': 'blocked'}, context=context)
@@ -935,6 +989,43 @@ class task(osv.osv):
     def prev_type(self, cr, uid, ids, *args):
         return self._change_type(cr, uid, ids, False, *args)
 
+    def _store_history(self, cr, uid, ids, context=None):
+        for task in self.browse(cr, uid, ids, context=context):
+            self.pool.get('project.task.history').create(cr, uid, {
+                'task_id': task.id,
+                'remaining_hours': task.remaining_hours,
+                'planned_hours': task.planned_hours,
+                'kanban_state': task.kanban_state,
+                'type_id': task.type_id.id,
+                'state': task.state,
+                'user_id': task.user_id.id
+
+            }, context=context)
+        return True
+
+    def create(self, cr, uid, vals, context=None):
+        result = super(task, self).create(cr, uid, vals, context=context)
+        self._store_history(cr, uid, [result], context=context)
+        return result
+
+    # Overridden to reset the kanban_state to normal whenever
+    # the stage (type_id) of the task changes.
+    def write(self, cr, uid, ids, vals, context=None):
+        if isinstance(ids, (int, long)):
+            ids = [ids]
+        if vals and not 'kanban_state' in vals and 'type_id' in vals:
+            new_stage = vals.get('type_id')
+            vals_reset_kstate = dict(vals, kanban_state='normal')
+            for t in self.browse(cr, uid, ids, context=context):
+                write_vals = vals_reset_kstate if t.type_id != new_stage else vals 
+                super(task,self).write(cr, uid, [t.id], write_vals, context=context)
+            result = True
+        else:
+            result = super(task,self).write(cr, uid, ids, vals, context=context)
+        if ('type_id' in vals) or ('remaining_hours' in vals) or ('user_id' in vals) or ('state' in vals) or ('kanban_state' in vals):
+            self._store_history(cr, uid, ids, context=context)
+        return result
+
     def unlink(self, cr, uid, ids, context=None):
         if context == None:
             context = {}
@@ -1021,7 +1112,7 @@ class account_analytic_account(osv.osv):
         if vals.get('child_ids', False) and context.get('analytic_project_copy', False):
             vals['child_ids'] = []
         return super(account_analytic_account, self).create(cr, uid, vals, context=context)
-    
+
     def unlink(self, cr, uid, ids, *args, **kwargs):
         project_obj = self.pool.get('project.project')
         analytic_ids = project_obj.search(cr, uid, [('analytic_account_id','in',ids)])
@@ -1031,4 +1122,92 @@ class account_analytic_account(osv.osv):
 
 account_analytic_account()
 
-# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
+#
+# Tasks History, used for cumulative flow charts (Lean/Agile)
+#
+
+class project_task_history(osv.osv):
+    _name = 'project.task.history'
+    _description = 'History of Tasks'
+    _rec_name = 'task_id'
+    _log_access = False
+    def _get_date(self, cr, uid, ids, name, arg, context=None):
+        result = {}
+        for history in self.browse(cr, uid, ids, context=context):
+            if history.state in ('done','cancelled'):
+                result[history.id] = history.date
+                continue
+            cr.execute('''select
+                    date
+                from
+                    project_task_history
+                where
+                    task_id=%s and
+                    id>%s
+                order by id limit 1''', (history.task_id.id, history.id))
+            res = cr.fetchone()
+            result[history.id] = res and res[0] or False
+        return result
+
+    def _get_related_date(self, cr, uid, ids, context=None):
+        result = []
+        for history in self.browse(cr, uid, ids, context=context):
+            cr.execute('''select
+                    id
+                from 
+                    project_task_history
+                where
+                    task_id=%s and
+                    id<%s
+                order by id desc limit 1''', (history.task_id.id, history.id))
+            res = cr.fetchone()
+            if res:
+                result.append(res[0])
+        return result
+
+    _columns = {
+        'task_id': fields.many2one('project.task', 'Task', ondelete='cascade', required=True, select=True),
+        'type_id': fields.many2one('project.task.type', 'Stage'),
+        'state': fields.selection([('draft', 'New'),('open', 'In Progress'),('pending', 'Pending'), ('done', 'Done'), ('cancelled', 'Cancelled')], 'State'),
+        'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready To Pull')], 'Kanban State', required=False),
+        'date': fields.date('Date', select=True),
+        'end_date': fields.function(_get_date, string='End Date', type="date", store={
+            'project.task.history': (_get_related_date, None, 20)
+        }),
+        'remaining_hours': fields.float('Remaining Time', digits=(16,2)),
+        'planned_hours': fields.float('Planned Time', digits=(16,2)),
+        'user_id': fields.many2one('res.users', 'Responsible'),
+    }
+    _defaults = {
+        'date': lambda s,c,u,ctx: time.strftime('%Y-%m-%d')
+    }
+project_task_history()
+
+class project_task_history_cumulative(osv.osv):
+    _name = 'project.task.history.cumulative'
+    _table = 'project_task_history_cumulative'
+    _inherit = 'project.task.history'
+    _auto = False
+    _columns = {
+        'end_date': fields.date('End Date'),
+        'project_id': fields.related('task_id', 'project_id', string='Project', type='many2one', relation='project.project')
+    }
+    def init(self, cr):
+        cr.execute(""" CREATE OR REPLACE VIEW project_task_history_cumulative AS (
+            SELECT
+                history.date::varchar||'-'||history.history_id::varchar as id,
+                history.date as end_date,
+                *
+            FROM (
+                SELECT
+                    id as history_id,
+                    date+generate_series(0, CAST((coalesce(end_date,DATE 'tomorrow')::date - date)AS integer)-1) as date,
+                    task_id, type_id, user_id, kanban_state, state,
+                    remaining_hours, planned_hours
+                FROM
+                    project_task_history
+            ) as history
+        )
+        """)
+project_task_history_cumulative()
+