data['gray'] = False
data['url'] = node.help_url
+ # chech whether directory_id from inherited from document2 is available
+ if 'directory_id' in node and node.directory_id:
+ data['directory'] = "ftp://localhost:8021" #TODO: implement document_directory.get_url
+
+ # get assosiated workflow
+ if data['model']:
+ wkf_ids = self.pool.get('workflow').search(cr, uid, [('osv', '=', data['model'])])
+ data['workflow'] = (wkf_ids or False) and wkf_ids[0]
+
if node.menu_id:
data['menu'] = {'name': node.menu_id.complete_name, 'id': node.menu_id.id}
rel = refobj[n]
if rel and isinstance(rel, list) :
rel = rel[0]
- _id = (rel or False) and rel.id
- _model = node['model']
- update_relatives(r, _id, _model)
+ if isinstance(rel, basestring):
+ print "XXX: ", rel
+ try: # XXXXXXXXXXXXXXXXXXX
+ _id = (rel or False) and rel.id
+ _model = node['model']
+ update_relatives(r, _id, _model)
+ except:
+ pass
for nid, node in nodes.items():
- if node['active'] or node['model'] == res_model:
+ if not node['gray'] and (node['active'] or node['model'] == res_model):
update_relatives(nid, res_id, res_model)
break
return dict(title=title, perm=perm, notes=notes, nodes=nodes, transitions=transitions)
+ def copy(self, cr, uid, id, default=None, context={}):
+ """ Deep copy the entire process.
+ """
+
+ if not default:
+ default = {}
+
+ pool = pooler.get_pool(cr.dbname)
+ process = pool.get('process.process').browse(cr, uid, [id], context)[0]
+
+ nodes = {}
+ transitions = {}
+
+ # first copy all nodes and and map the new nodes with original for later use in transitions
+ for node in process.node_ids:
+ for t in node.transition_in:
+ tr = transitions.setdefault(t.id, {})
+ tr['target'] = node.id
+ for t in node.transition_out:
+ tr = transitions.setdefault(t.id, {})
+ tr['source'] = node.id
+ nodes[node.id] = pool.get('process.node').copy(cr, uid, node.id, context=context)
+
+ # then copy transitions with new nodes
+ for tid, tr in transitions.items():
+ vals = {
+ 'source_node_id': nodes[tr['source']],
+ 'target_node_id': nodes[tr['target']]
+ }
+ tr = pool.get('process.transition').copy(cr, uid, tid, default=vals, context=context)
+
+ # and finally copy the process itself with new nodes
+ default.update({
+ 'active': True,
+ 'node_ids': [(6, 0, nodes.values())]
+ })
+ return super(process_process, self).copy(cr, uid, id, default, context)
+
process_process()
class process_node(osv.osv):
'model_states': lambda *args: False,
'flow_start': lambda *args: False,
}
+
+ def copy(self, cr, uid, id, default=None, context={}):
+ if not default:
+ default = {}
+ default.update({
+ 'transition_in': [],
+ 'transition_out': []
+ })
+ return super(process_node, self).copy(cr, uid, id, default, context)
+
process_node()
class process_node_condition(osv.osv):
_defaults = {
'state': lambda *args: 'dummy',
}
+
+ def copy(self, cr, uid, id, default=None, context={}):
+ if not default:
+ default = {}
+
+ state = self.pool.get('process.transition.action').browse(cr, uid, [id], context)[0].state
+ if state:
+ default['state'] = state
+
+ return super(process_transition_action, self).copy(cr, uid, id, default, context)
+
process_transition_action()