_defaults = {
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c),
- 'lot_input_id': _default_lot_input_stock_id,
- 'lot_stock_id': _default_lot_input_stock_id,
- 'lot_output_id': _default_lot_output_id,
+ 'lot_stock_id': _default_stock_id,
+ 'reception_steps': 'one_step',
+ 'delivery_steps': 'ship_only',
}
+ _sql_constraints = [
+ ('warehouse_name_uniq', 'unique(name, company_id)', 'The name of the warehouse must be unique per company!'),
+ ('warehouse_code_uniq', 'unique(code, company_id)', 'The code of the warehouse must be unique per company!'),
+ ]
+ def _get_partner_locations(self, cr, uid, ids, context=None):
+ ''' returns a tuple made of the browse record of customer location and the browse record of supplier location'''
+ data_obj = self.pool.get('ir.model.data')
+ location_obj = self.pool.get('stock.location')
+ try:
+ customer_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_customers')[1]
+ supplier_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_suppliers')[1]
+ except:
+ customer_loc = location_obj.search(cr, uid, [('usage', '=', 'customer')], context=context)
+ customer_loc = customer_loc and customer_loc[0] or False
+ supplier_loc = location_obj.search(cr, uid, [('usage', '=', 'supplier')], context=context)
+ supplier_loc = supplier_loc and supplier_loc[0] or False
+ if not (customer_loc and supplier_loc):
+ raise osv.except_osv(_('Error!'), _('Can\'t find any customer or supplier location.'))
+ return location_obj.browse(cr, uid, [customer_loc, supplier_loc], context=context)
+
+ def switch_location(self, cr, uid, ids, warehouse, new_reception_step=False, new_delivery_step=False, context=None):
+ location_obj = self.pool.get('stock.location')
-#----------------------------------------------------------
-# "Empty" Classes that are used to vary from the original stock.picking (that are dedicated to the internal pickings)
-# in order to offer a different usability with different views, labels, available reports/wizards...
-#----------------------------------------------------------
-class stock_picking_in(osv.osv):
- _name = "stock.picking.in"
- _inherit = "stock.picking"
- _table = "stock_picking"
- _description = "Incoming Shipments"
-
- def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
- return self.pool.get('stock.picking').search(cr, user, args, offset, limit, order, context, count)
-
- def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
- return self.pool.get('stock.picking').read(cr, uid, ids, fields=fields, context=context, load=load)
-
- def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
- return self.pool['stock.picking'].read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby)
-
- def check_access_rights(self, cr, uid, operation, raise_exception=True):
- #override in order to redirect the check of acces rights on the stock.picking object
- return self.pool.get('stock.picking').check_access_rights(cr, uid, operation, raise_exception=raise_exception)
-
- def check_access_rule(self, cr, uid, ids, operation, context=None):
- #override in order to redirect the check of acces rules on the stock.picking object
- return self.pool.get('stock.picking').check_access_rule(cr, uid, ids, operation, context=context)
-
- def create_workflow(self, cr, uid, ids, context=None):
- # overridden in order to trigger the workflow of stock.picking at the end of create,
- # write and unlink operation instead of its own workflow (which is not existing)
- return self.pool.get('stock.picking').create_workflow(cr, uid, ids, context=context)
-
- def delete_workflow(self, cr, uid, ids, context=None):
- # overridden in order to trigger the workflow of stock.picking at the end of create,
- # write and unlink operation instead of its own workflow (which is not existing)
- return self.pool.get('stock.picking').delete_workflow(cr, uid, ids, context=context)
-
- def step_workflow(self, cr, uid, ids, context=None):
- # overridden in order to trigger the workflow of stock.picking at the end of create,
- # write and unlink operation instead of its own workflow (which is not existing)
- return self.pool.get('stock.picking').step_workflow(cr, uid, ids, context=context)
-
- def signal_workflow(self, cr, uid, ids, signal, context=None):
- # overridden in order to fire the workflow signal on given stock.picking workflow instance
- # instead of its own workflow (which is not existing)
- return self.pool.get('stock.picking').signal_workflow(cr, uid, ids, signal, context=context)
-
- def message_post(self, *args, **kwargs):
- """Post the message on stock.picking to be able to see it in the form view when using the chatter"""
- return self.pool.get('stock.picking').message_post(*args, **kwargs)
-
- def message_subscribe(self, *args, **kwargs):
- """Send the subscribe action on stock.picking model as it uses _name in request"""
- return self.pool.get('stock.picking').message_subscribe(*args, **kwargs)
-
- def message_unsubscribe(self, *args, **kwargs):
- """Send the unsubscribe action on stock.picking model to match with subscribe"""
- return self.pool.get('stock.picking').message_unsubscribe(*args, **kwargs)
-
- def default_get(self, cr, uid, fields_list, context=None):
- # merge defaults from stock.picking with possible defaults defined on stock.picking.in
- defaults = self.pool['stock.picking'].default_get(cr, uid, fields_list, context=context)
- in_defaults = super(stock_picking_in, self).default_get(cr, uid, fields_list, context=context)
- defaults.update(in_defaults)
- return defaults
+ new_reception_step = new_reception_step or warehouse.reception_steps
+ new_delivery_step = new_delivery_step or warehouse.delivery_steps
+ if warehouse.reception_steps != new_reception_step:
+ location_obj.write(cr, uid, [warehouse.wh_input_stock_loc_id.id, warehouse.wh_qc_stock_loc_id.id], {'active': False}, context=context)
+ if new_reception_step != 'one_step':
+ location_obj.write(cr, uid, warehouse.wh_input_stock_loc_id.id, {'active': True}, context=context)
+ if new_reception_step == 'three_steps':
+ location_obj.write(cr, uid, warehouse.wh_qc_stock_loc_id.id, {'active': True}, context=context)
+
+ if warehouse.delivery_steps != new_delivery_step:
+ location_obj.write(cr, uid, [warehouse.wh_output_stock_loc_id.id, warehouse.wh_pack_stock_loc_id.id], {'active': False}, context=context)
+ if new_delivery_step != 'ship_only':
+ location_obj.write(cr, uid, warehouse.wh_output_stock_loc_id.id, {'active': True}, context=context)
+ if new_delivery_step == 'pick_pack_ship':
+ location_obj.write(cr, uid, warehouse.wh_pack_stock_loc_id.id, {'active': True}, context=context)
+ return True
+
+ def _get_reception_delivery_route(self, cr, uid, warehouse, route_name, context=None):
+ return {
+ 'name': self._format_routename(cr, uid, warehouse, route_name, context=context),
+ 'product_categ_selectable': True,
+ 'product_selectable': False,
+ 'sequence': 10,
+ }
+
+ def _get_supply_pull_rules(self, cr, uid, supplied_warehouse, values, new_route_id, context=None):
+ pull_rules_list = []
+ for from_loc, dest_loc, pick_type_id, warehouse in values:
+ pull_rules_list.append({
+ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context),
+ 'location_src_id': from_loc.id,
+ 'location_id': dest_loc.id,
+ 'route_id': new_route_id,
+ 'action': 'move',
+ 'picking_type_id': pick_type_id,
+ 'procure_method': warehouse.lot_stock_id.id != from_loc.id and 'make_to_order' or 'make_to_stock', # first part of the resuply route is MTS
+ 'warehouse_id': supplied_warehouse.id,
+ 'propagate_warehouse_id': warehouse.id,
+ })
+ return pull_rules_list
+
+ def _get_push_pull_rules(self, cr, uid, warehouse, active, values, new_route_id, context=None):
+ first_rule = True
+ push_rules_list = []
+ pull_rules_list = []
+ for from_loc, dest_loc, pick_type_id in values:
+ push_rules_list.append({
+ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context),
+ 'location_from_id': from_loc.id,
+ 'location_dest_id': dest_loc.id,
+ 'route_id': new_route_id,
+ 'auto': 'manual',
+ 'picking_type_id': pick_type_id,
+ 'active': active,
+ 'warehouse_id': warehouse.id,
+ })
+ pull_rules_list.append({
+ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context),
+ 'location_src_id': from_loc.id,
+ 'location_id': dest_loc.id,
+ 'route_id': new_route_id,
+ 'action': 'move',
+ 'picking_type_id': pick_type_id,
+ 'procure_method': first_rule is True and 'make_to_stock' or 'make_to_order',
+ 'active': active,
+ 'warehouse_id': warehouse.id,
+ })
+ first_rule = False
+ return push_rules_list, pull_rules_list
+
+ def _get_mto_route(self, cr, uid, context=None):
+ route_obj = self.pool.get('stock.location.route')
+ data_obj = self.pool.get('ir.model.data')
+ try:
+ mto_route_id = data_obj.get_object_reference(cr, uid, 'stock', 'route_warehouse0_mto')[1]
+ except:
+ mto_route_id = route_obj.search(cr, uid, [('name', 'like', _('Make To Order'))], context=context)
+ mto_route_id = mto_route_id and mto_route_id[0] or False
+ if not mto_route_id:
+ raise osv.except_osv(_('Error!'), _('Can\'t find any generic Make To Order route.'))
+ return mto_route_id
+
+ def _check_remove_mto_resupply_rules(self, cr, uid, warehouse, context=None):
+ """ Checks that the moves from the different """
+ pull_obj = self.pool.get('procurement.rule')
+ mto_route_id = self._get_mto_route(cr, uid, context=context)
+ rules = pull_obj.search(cr, uid, ['&', ('location_src_id', '=', warehouse.lot_stock_id.id), ('location_id.usage', '=', 'transit')], context=context)
+ pull_obj.unlink(cr, uid, rules, context=context)
+
+ def _get_mto_pull_rule(self, cr, uid, warehouse, values, context=None):
+ mto_route_id = self._get_mto_route(cr, uid, context=context)
+ from_loc, dest_loc, pick_type_id = values[0]
+ return {
+ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context) + _(' MTO'),
+ 'location_src_id': from_loc.id,
+ 'location_id': dest_loc.id,
+ 'route_id': mto_route_id,
+ 'action': 'move',
+ 'picking_type_id': pick_type_id,
+ 'procure_method': 'make_to_order',
+ 'active': True,
+ 'warehouse_id': warehouse.id,
+ }
+
+ def _get_crossdock_route(self, cr, uid, warehouse, route_name, context=None):
+ return {
+ 'name': self._format_routename(cr, uid, warehouse, route_name, context=context),
+ 'warehouse_selectable': False,
+ 'product_selectable': True,
+ 'product_categ_selectable': True,
+ 'active': warehouse.delivery_steps != 'ship_only' and warehouse.reception_steps != 'one_step',
+ 'sequence': 20,
+ }
+
+ def create_routes(self, cr, uid, ids, warehouse, context=None):
+ wh_route_ids = []
+ route_obj = self.pool.get('stock.location.route')
+ pull_obj = self.pool.get('procurement.rule')
+ push_obj = self.pool.get('stock.location.path')
+ routes_dict = self.get_routes_dict(cr, uid, ids, warehouse, context=context)
+ #create reception route and rules
+ route_name, values = routes_dict[warehouse.reception_steps]
+ route_vals = self._get_reception_delivery_route(cr, uid, warehouse, route_name, context=context)
+ reception_route_id = route_obj.create(cr, uid, route_vals, context=context)
+ wh_route_ids.append((4, reception_route_id))
+ push_rules_list, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, reception_route_id, context=context)
+ #create the push/pull rules
+ for push_rule in push_rules_list:
+ push_obj.create(cr, uid, vals=push_rule, context=context)
+ for pull_rule in pull_rules_list:
+ #all pull rules in reception route are mto, because we don't want to wait for the scheduler to trigger an orderpoint on input location
+ pull_rule['procure_method'] = 'make_to_order'
+ pull_obj.create(cr, uid, vals=pull_rule, context=context)
+
+ #create MTS route and pull rules for delivery and a specific route MTO to be set on the product
+ route_name, values = routes_dict[warehouse.delivery_steps]
+ route_vals = self._get_reception_delivery_route(cr, uid, warehouse, route_name, context=context)
+ #create the route and its pull rules
+ delivery_route_id = route_obj.create(cr, uid, route_vals, context=context)
+ wh_route_ids.append((4, delivery_route_id))
+ dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, delivery_route_id, context=context)
+ for pull_rule in pull_rules_list:
+ pull_obj.create(cr, uid, vals=pull_rule, context=context)
+ #create MTO pull rule and link it to the generic MTO route
+ mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, values, context=context)
+ mto_pull_id = pull_obj.create(cr, uid, mto_pull_vals, context=context)
+
+ #create a route for cross dock operations, that can be set on products and product categories
+ route_name, values = routes_dict['crossdock']
+ crossdock_route_vals = self._get_crossdock_route(cr, uid, warehouse, route_name, context=context)
+ crossdock_route_id = route_obj.create(cr, uid, vals=crossdock_route_vals, context=context)
+ wh_route_ids.append((4, crossdock_route_id))
+ dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, warehouse.delivery_steps != 'ship_only' and warehouse.reception_steps != 'one_step', values, crossdock_route_id, context=context)
+ for pull_rule in pull_rules_list:
+ # Fixed cross-dock is logically mto
+ pull_rule['procure_method'] = 'make_to_order'
+ pull_obj.create(cr, uid, vals=pull_rule, context=context)
+
+ #create route selectable on the product to resupply the warehouse from another one
+ self._create_resupply_routes(cr, uid, warehouse, warehouse.resupply_wh_ids, warehouse.default_resupply_wh_id, context=context)
+
+ #return routes and mto pull rule to store on the warehouse
+ return {
+ 'route_ids': wh_route_ids,
+ 'mto_pull_id': mto_pull_id,
+ 'reception_route_id': reception_route_id,
+ 'delivery_route_id': delivery_route_id,
+ 'crossdock_route_id': crossdock_route_id,
+ }
+
+ def change_route(self, cr, uid, ids, warehouse, new_reception_step=False, new_delivery_step=False, context=None):
+ picking_type_obj = self.pool.get('stock.picking.type')
+ pull_obj = self.pool.get('procurement.rule')
+ push_obj = self.pool.get('stock.location.path')
+ route_obj = self.pool.get('stock.location.route')
+ new_reception_step = new_reception_step or warehouse.reception_steps
+ new_delivery_step = new_delivery_step or warehouse.delivery_steps
+
+ #change the default source and destination location and (de)activate picking types
+ input_loc = warehouse.wh_input_stock_loc_id
+ if new_reception_step == 'one_step':
+ input_loc = warehouse.lot_stock_id
+ output_loc = warehouse.wh_output_stock_loc_id
+ if new_delivery_step == 'ship_only':
+ output_loc = warehouse.lot_stock_id
+ picking_type_obj.write(cr, uid, warehouse.in_type_id.id, {'default_location_dest_id': input_loc.id}, context=context)
+ picking_type_obj.write(cr, uid, warehouse.out_type_id.id, {'default_location_src_id': output_loc.id}, context=context)
+ picking_type_obj.write(cr, uid, warehouse.pick_type_id.id, {'active': new_delivery_step != 'ship_only'}, context=context)
+ picking_type_obj.write(cr, uid, warehouse.pack_type_id.id, {'active': new_delivery_step == 'pick_pack_ship'}, context=context)
+
+ routes_dict = self.get_routes_dict(cr, uid, ids, warehouse, context=context)
+ #update delivery route and rules: unlink the existing rules of the warehouse delivery route and recreate it
+ pull_obj.unlink(cr, uid, [pu.id for pu in warehouse.delivery_route_id.pull_ids], context=context)
+ route_name, values = routes_dict[new_delivery_step]
+ route_obj.write(cr, uid, warehouse.delivery_route_id.id, {'name': self._format_routename(cr, uid, warehouse, route_name, context=context)}, context=context)
+ dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, warehouse.delivery_route_id.id, context=context)
+ #create the pull rules
+ for pull_rule in pull_rules_list:
+ pull_obj.create(cr, uid, vals=pull_rule, context=context)
+
+ #update reception route and rules: unlink the existing rules of the warehouse reception route and recreate it
+ pull_obj.unlink(cr, uid, [pu.id for pu in warehouse.reception_route_id.pull_ids], context=context)
+ push_obj.unlink(cr, uid, [pu.id for pu in warehouse.reception_route_id.push_ids], context=context)
+ route_name, values = routes_dict[new_reception_step]
+ route_obj.write(cr, uid, warehouse.reception_route_id.id, {'name': self._format_routename(cr, uid, warehouse, route_name, context=context)}, context=context)
+ push_rules_list, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, warehouse.reception_route_id.id, context=context)
+ #create the push/pull rules
+ for push_rule in push_rules_list:
+ push_obj.create(cr, uid, vals=push_rule, context=context)
+ for pull_rule in pull_rules_list:
+ #all pull rules in reception route are mto, because we don't want to wait for the scheduler to trigger an orderpoint on input location
+ pull_rule['procure_method'] = 'make_to_order'
+ pull_obj.create(cr, uid, vals=pull_rule, context=context)
+
+ route_obj.write(cr, uid, warehouse.crossdock_route_id.id, {'active': new_reception_step != 'one_step' and new_delivery_step != 'ship_only'}, context=context)
+
+ #change MTO rule
+ dummy, values = routes_dict[new_delivery_step]
+ mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, values, context=context)
+ pull_obj.write(cr, uid, warehouse.mto_pull_id.id, mto_pull_vals, context=context)
+ return True
+
+ def create(self, cr, uid, vals, context=None):
+ if context is None:
+ context = {}
+ if vals is None:
+ vals = {}
+ data_obj = self.pool.get('ir.model.data')
+ seq_obj = self.pool.get('ir.sequence')
+ picking_type_obj = self.pool.get('stock.picking.type')
+ location_obj = self.pool.get('stock.location')
+
+ #create view location for warehouse
+ wh_loc_id = location_obj.create(cr, uid, {
+ 'name': _(vals.get('code')),
+ 'usage': 'view',
+ 'location_id': data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_locations')[1]
+ }, context=context)
+ vals['view_location_id'] = wh_loc_id
+ #create all location
+ def_values = self.default_get(cr, uid, {'reception_steps', 'delivery_steps'})
+ reception_steps = vals.get('reception_steps', def_values['reception_steps'])
+ delivery_steps = vals.get('delivery_steps', def_values['delivery_steps'])
+ context_with_inactive = context.copy()
+ context_with_inactive['active_test'] = False
+ sub_locations = [
+ {'name': _('Stock'), 'active': True, 'field': 'lot_stock_id'},
+ {'name': _('Input'), 'active': reception_steps != 'one_step', 'field': 'wh_input_stock_loc_id'},
+ {'name': _('Quality Control'), 'active': reception_steps == 'three_steps', 'field': 'wh_qc_stock_loc_id'},
+ {'name': _('Output'), 'active': delivery_steps != 'ship_only', 'field': 'wh_output_stock_loc_id'},
+ {'name': _('Packing Zone'), 'active': delivery_steps == 'pick_pack_ship', 'field': 'wh_pack_stock_loc_id'},
+ ]
+ for values in sub_locations:
+ location_id = location_obj.create(cr, uid, {
+ 'name': values['name'],
+ 'usage': 'internal',
+ 'location_id': wh_loc_id,
+ 'active': values['active'],
+ }, context=context_with_inactive)
+ vals[values['field']] = location_id
+
+ #create new sequences
+ in_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': vals.get('name', '') + _(' Sequence in'), 'prefix': vals.get('code', '') + '/IN/', 'padding': 5}, context=context)
+ out_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': vals.get('name', '') + _(' Sequence out'), 'prefix': vals.get('code', '') + '/OUT/', 'padding': 5}, context=context)
+ pack_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': vals.get('name', '') + _(' Sequence packing'), 'prefix': vals.get('code', '') + '/PACK/', 'padding': 5}, context=context)
+ pick_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': vals.get('name', '') + _(' Sequence picking'), 'prefix': vals.get('code', '') + '/PICK/', 'padding': 5}, context=context)
+ int_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': vals.get('name', '') + _(' Sequence internal'), 'prefix': vals.get('code', '') + '/INT/', 'padding': 5}, context=context)
+
+ #create WH
+ new_id = super(stock_warehouse, self).create(cr, uid, vals=vals, context=context)
+
+ warehouse = self.browse(cr, uid, new_id, context=context)
+ wh_stock_loc = warehouse.lot_stock_id
+ wh_input_stock_loc = warehouse.wh_input_stock_loc_id
+ wh_output_stock_loc = warehouse.wh_output_stock_loc_id
+ wh_pack_stock_loc = warehouse.wh_pack_stock_loc_id
+
+ #fetch customer and supplier locations, for references
+ customer_loc, supplier_loc = self._get_partner_locations(cr, uid, new_id, context=context)
+
+ #create in, out, internal picking types for warehouse
+ input_loc = wh_input_stock_loc
+ if warehouse.reception_steps == 'one_step':
+ input_loc = wh_stock_loc
+ output_loc = wh_output_stock_loc
+ if warehouse.delivery_steps == 'ship_only':
+ output_loc = wh_stock_loc
+
+ #choose the next available color for the picking types of this warehouse
+ color = 0
+ available_colors = [c%9 for c in range(3, 12)] # put flashy colors first
+ all_used_colors = self.pool.get('stock.picking.type').search_read(cr, uid, [('warehouse_id', '!=', False), ('color', '!=', False)], ['color'], order='color')
+ #don't use sets to preserve the list order
+ for x in all_used_colors:
+ if x['color'] in available_colors:
+ available_colors.remove(x['color'])
+ if available_colors:
+ color = available_colors[0]
+
+ #order the picking types with a sequence allowing to have the following suit for each warehouse: reception, internal, pick, pack, ship.
+ max_sequence = self.pool.get('stock.picking.type').search_read(cr, uid, [], ['sequence'], order='sequence desc')
+ max_sequence = max_sequence and max_sequence[0]['sequence'] or 0
+
+ in_type_id = picking_type_obj.create(cr, uid, vals={
+ 'name': _('Receptions'),
+ 'warehouse_id': new_id,
+ 'code': 'incoming',
+ 'sequence_id': in_seq_id,
+ 'default_location_src_id': supplier_loc.id,
+ 'default_location_dest_id': input_loc.id,
+ 'sequence': max_sequence + 1,
+ 'color': color}, context=context)
+ out_type_id = picking_type_obj.create(cr, uid, vals={
+ 'name': _('Delivery Orders'),
+ 'warehouse_id': new_id,
+ 'code': 'outgoing',
+ 'sequence_id': out_seq_id,
+ 'return_picking_type_id': in_type_id,
+ 'default_location_src_id': output_loc.id,
+ 'default_location_dest_id': customer_loc.id,
+ 'sequence': max_sequence + 4,
+ 'color': color}, context=context)
+ picking_type_obj.write(cr, uid, [in_type_id], {'return_picking_type_id': out_type_id}, context=context)
+ int_type_id = picking_type_obj.create(cr, uid, vals={
+ 'name': _('Internal Transfers'),
+ 'warehouse_id': new_id,
+ 'code': 'internal',
+ 'sequence_id': int_seq_id,
+ 'default_location_src_id': wh_stock_loc.id,
+ 'default_location_dest_id': wh_stock_loc.id,
+ 'active': True,
+ 'sequence': max_sequence + 2,
+ 'color': color}, context=context)
+ pack_type_id = picking_type_obj.create(cr, uid, vals={
+ 'name': _('Pack'),
+ 'warehouse_id': new_id,
+ 'code': 'internal',
+ 'sequence_id': pack_seq_id,
+ 'default_location_src_id': wh_pack_stock_loc.id,
+ 'default_location_dest_id': output_loc.id,
+ 'active': delivery_steps == 'pick_pack_ship',
+ 'sequence': max_sequence + 3,
+ 'color': color}, context=context)
+ pick_type_id = picking_type_obj.create(cr, uid, vals={
+ 'name': _('Pick'),
+ 'warehouse_id': new_id,
+ 'code': 'internal',
+ 'sequence_id': pick_seq_id,
+ 'default_location_src_id': wh_stock_loc.id,
+ 'default_location_dest_id': wh_pack_stock_loc.id,
+ 'active': delivery_steps != 'ship_only',
+ 'sequence': max_sequence + 2,
+ 'color': color}, context=context)
+
+ #write picking types on WH
+ vals = {
+ 'in_type_id': in_type_id,
+ 'out_type_id': out_type_id,
+ 'pack_type_id': pack_type_id,
+ 'pick_type_id': pick_type_id,
+ 'int_type_id': int_type_id,
+ }
+ super(stock_warehouse, self).write(cr, uid, new_id, vals=vals, context=context)
+ warehouse.refresh()
+
+ #create routes and push/pull rules
+ new_objects_dict = self.create_routes(cr, uid, new_id, warehouse, context=context)
+ self.write(cr, uid, warehouse.id, new_objects_dict, context=context)
+ return new_id
+
+ def _format_rulename(self, cr, uid, obj, from_loc, dest_loc, context=None):
+ return obj.code + ': ' + from_loc.name + ' -> ' + dest_loc.name
+
+ def _format_routename(self, cr, uid, obj, name, context=None):
+ return obj.name + ': ' + name
+
+ def get_routes_dict(self, cr, uid, ids, warehouse, context=None):
+ #fetch customer and supplier locations, for references
+ customer_loc, supplier_loc = self._get_partner_locations(cr, uid, ids, context=context)
+
+ return {
+ 'one_step': (_('Reception in 1 step'), []),
+ 'two_steps': (_('Reception in 2 steps'), [(warehouse.wh_input_stock_loc_id, warehouse.lot_stock_id, warehouse.int_type_id.id)]),
+ 'three_steps': (_('Reception in 3 steps'), [(warehouse.wh_input_stock_loc_id, warehouse.wh_qc_stock_loc_id, warehouse.int_type_id.id), (warehouse.wh_qc_stock_loc_id, warehouse.lot_stock_id, warehouse.int_type_id.id)]),
+ 'crossdock': (_('Cross-Dock'), [(warehouse.wh_input_stock_loc_id, warehouse.wh_output_stock_loc_id, warehouse.int_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]),
+ 'ship_only': (_('Ship Only'), [(warehouse.lot_stock_id, customer_loc, warehouse.out_type_id.id)]),
+ 'pick_ship': (_('Pick + Ship'), [(warehouse.lot_stock_id, warehouse.wh_output_stock_loc_id, warehouse.pick_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]),
+ 'pick_pack_ship': (_('Pick + Pack + Ship'), [(warehouse.lot_stock_id, warehouse.wh_pack_stock_loc_id, warehouse.pick_type_id.id), (warehouse.wh_pack_stock_loc_id, warehouse.wh_output_stock_loc_id, warehouse.pack_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]),
+ }
+
+ def _handle_renaming(self, cr, uid, warehouse, name, code, context=None):
+ location_obj = self.pool.get('stock.location')
+ route_obj = self.pool.get('stock.location.route')
+ pull_obj = self.pool.get('procurement.rule')
+ push_obj = self.pool.get('stock.location.path')
+ #rename location
+ location_id = warehouse.lot_stock_id.location_id.id
+ location_obj.write(cr, uid, location_id, {'name': code}, context=context)
+ #rename route and push-pull rules
+ for route in warehouse.route_ids:
+ route_obj.write(cr, uid, route.id, {'name': route.name.replace(warehouse.name, name, 1)}, context=context)
+ for pull in route.pull_ids:
+ pull_obj.write(cr, uid, pull.id, {'name': pull.name.replace(warehouse.name, name, 1)}, context=context)
+ for push in route.push_ids:
+ push_obj.write(cr, uid, push.id, {'name': pull.name.replace(warehouse.name, name, 1)}, context=context)
+ #change the mto pull rule name
+ pull_obj.write(cr, uid, warehouse.mto_pull_id.id, {'name': warehouse.mto_pull_id.name.replace(warehouse.name, name, 1)}, context=context)
+
+ def _check_delivery_resupply(self, cr, uid, warehouse, new_location, change_to_multiple, context=None):
+ """ Will check if the resupply routes from this warehouse follow the changes of number of delivery steps """
+ #Check routes that are being delivered by this warehouse and change the rule going to transit location
+ route_obj = self.pool.get("stock.location.route")
+ pull_obj = self.pool.get("procurement.rule")
+ routes = route_obj.search(cr, uid, [('supplier_wh_id','=', warehouse.id)], context=context)
+ pulls= pull_obj.search(cr, uid, ['&', ('route_id', 'in', routes), ('location_id.usage', '=', 'transit')], context=context)
+ if pulls:
+ pull_obj.write(cr, uid, pulls, {'location_src_id': new_location, 'procure_method': change_to_multiple and "make_to_order" or "make_to_stock"}, context=context)
+ # Create or clean MTO rules
+ mto_route_id = self._get_mto_route(cr, uid, context=context)
+ if not change_to_multiple:
+ # If single delivery we should create the necessary MTO rules for the resupply
+ # pulls = pull_obj.search(cr, uid, ['&', ('route_id', '=', mto_route_id), ('location_id.usage', '=', 'transit'), ('location_src_id', '=', warehouse.lot_stock_id.id)], context=context)
+ pull_recs = pull_obj.browse(cr, uid, pulls, context=context)
+ transfer_locs = list(set([x.location_id for x in pull_recs]))
+ vals = [(warehouse.lot_stock_id , x, warehouse.out_type_id.id) for x in transfer_locs]
+ mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, vals, context=context)
+ pull_obj.create(cr, uid, mto_pull_vals, context=context)
+ else:
+ # We need to delete all the MTO pull rules, otherwise they risk to be used in the system
+ pulls = pull_obj.search(cr, uid, ['&', ('route_id', '=', mto_route_id), ('location_id.usage', '=', 'transit'), ('location_src_id', '=', warehouse.lot_stock_id.id)], context=context)
+ if pulls:
+ pull_obj.unlink(cr, uid, pulls, context=context)
+
+ def _check_reception_resupply(self, cr, uid, warehouse, new_location, context=None):
+ """
+ Will check if the resupply routes to this warehouse follow the changes of number of reception steps
+ """
+ #Check routes that are being delivered by this warehouse and change the rule coming from transit location
+ route_obj = self.pool.get("stock.location.route")
+ pull_obj = self.pool.get("procurement.rule")
+ routes = route_obj.search(cr, uid, [('supplied_wh_id','=', warehouse.id)], context=context)
+ pulls= pull_obj.search(cr, uid, ['&', ('route_id', 'in', routes), ('location_src_id.usage', '=', 'transit')])
+ if pulls:
+ pull_obj.write(cr, uid, pulls, {'location_id': new_location}, context=context)
+
+ def _check_resupply(self, cr, uid, warehouse, reception_new, delivery_new, context=None):
+ if reception_new:
+ old_val = warehouse.reception_steps
+ new_val = reception_new
+ change_to_one = (old_val != 'one_step' and new_val == 'one_step')
+ change_to_multiple = (old_val == 'one_step' and new_val != 'one_step')
+ if change_to_one or change_to_multiple:
+ new_location = change_to_one and warehouse.lot_stock_id.id or warehouse.wh_input_stock_loc_id.id
+ self._check_reception_resupply(cr, uid, warehouse, new_location, context=context)
+ if delivery_new:
+ old_val = warehouse.delivery_steps
+ new_val = delivery_new
+ change_to_one = (old_val != 'ship_only' and new_val == 'ship_only')
+ change_to_multiple = (old_val == 'ship_only' and new_val != 'ship_only')
+ if change_to_one or change_to_multiple:
+ new_location = change_to_one and warehouse.lot_stock_id.id or warehouse.wh_output_stock_loc_id.id
+ self._check_delivery_resupply(cr, uid, warehouse, new_location, change_to_multiple, context=context)
+
+ def write(self, cr, uid, ids, vals, context=None):
+ if context is None:
+ context = {}
+ if isinstance(ids, (int, long)):
+ ids = [ids]
+ seq_obj = self.pool.get('ir.sequence')
+ route_obj = self.pool.get('stock.location.route')
+ context_with_inactive = context.copy()
+ context_with_inactive['active_test'] = False
+ for warehouse in self.browse(cr, uid, ids, context=context_with_inactive):
+ #first of all, check if we need to delete and recreate route
+ if vals.get('reception_steps') or vals.get('delivery_steps'):
+ #activate and deactivate location according to reception and delivery option
+ self.switch_location(cr, uid, warehouse.id, warehouse, vals.get('reception_steps', False), vals.get('delivery_steps', False), context=context)
+ # switch between route
+ self.change_route(cr, uid, ids, warehouse, vals.get('reception_steps', False), vals.get('delivery_steps', False), context=context_with_inactive)
+ # Check if we need to change something to resupply warehouses and associated MTO rules
+ self._check_resupply(cr, uid, warehouse, vals.get('reception_steps'), vals.get('delivery_steps'), context=context)
+ warehouse.refresh()
+ if vals.get('code') or vals.get('name'):
+ name = warehouse.name
+ #rename sequence
+ if vals.get('name'):
+ name = vals.get('name', warehouse.name)
+ self._handle_renaming(cr, uid, warehouse, name, vals.get('code', warehouse.code), context=context_with_inactive)
+ seq_obj.write(cr, uid, warehouse.in_type_id.sequence_id.id, {'name': name + _(' Sequence in'), 'prefix': vals.get('code', warehouse.code) + '\IN\\'}, context=context)
+ seq_obj.write(cr, uid, warehouse.out_type_id.sequence_id.id, {'name': name + _(' Sequence out'), 'prefix': vals.get('code', warehouse.code) + '\OUT\\'}, context=context)
+ seq_obj.write(cr, uid, warehouse.pack_type_id.sequence_id.id, {'name': name + _(' Sequence packing'), 'prefix': vals.get('code', warehouse.code) + '\PACK\\'}, context=context)
+ seq_obj.write(cr, uid, warehouse.pick_type_id.sequence_id.id, {'name': name + _(' Sequence picking'), 'prefix': vals.get('code', warehouse.code) + '\PICK\\'}, context=context)
+ seq_obj.write(cr, uid, warehouse.int_type_id.sequence_id.id, {'name': name + _(' Sequence internal'), 'prefix': vals.get('code', warehouse.code) + '\INT\\'}, context=context)
+ if vals.get('resupply_wh_ids') and not vals.get('resupply_route_ids'):
+ for cmd in vals.get('resupply_wh_ids'):
+ if cmd[0] == 6:
+ new_ids = set(cmd[2])
+ old_ids = set([wh.id for wh in warehouse.resupply_wh_ids])
+ to_add_wh_ids = new_ids - old_ids
+ if to_add_wh_ids:
+ supplier_warehouses = self.browse(cr, uid, list(to_add_wh_ids), context=context)
+ self._create_resupply_routes(cr, uid, warehouse, supplier_warehouses, warehouse.default_resupply_wh_id, context=context)
+ to_remove_wh_ids = old_ids - new_ids
+ if to_remove_wh_ids:
+ to_remove_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', 'in', list(to_remove_wh_ids))], context=context)
+ if to_remove_route_ids:
+ route_obj.unlink(cr, uid, to_remove_route_ids, context=context)
+ else:
+ #not implemented
+ pass
+ if 'default_resupply_wh_id' in vals:
+ if vals.get('default_resupply_wh_id') == warehouse.id:
+ raise osv.except_osv(_('Warning'),_('The default resupply warehouse should be different than the warehouse itself!'))
+ if warehouse.default_resupply_wh_id:
+ #remove the existing resupplying route on the warehouse
+ to_remove_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', '=', warehouse.default_resupply_wh_id.id)], context=context)
+ for inter_wh_route_id in to_remove_route_ids:
+ self.write(cr, uid, [warehouse.id], {'route_ids': [(3, inter_wh_route_id)]})
+ if vals.get('default_resupply_wh_id'):
+ #assign the new resupplying route on all products
+ to_assign_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', '=', vals.get('default_resupply_wh_id'))], context=context)
+ for inter_wh_route_id in to_assign_route_ids:
+ self.write(cr, uid, [warehouse.id], {'route_ids': [(4, inter_wh_route_id)]})
+
+ return super(stock_warehouse, self).write(cr, uid, ids, vals=vals, context=context)
+
+ def get_all_routes_for_wh(self, cr, uid, warehouse, context=None):
+ route_obj = self.pool.get("stock.location.route")
+ all_routes = [route.id for route in warehouse.route_ids]
+ all_routes += route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id)], context=context)
+ all_routes += [warehouse.mto_pull_id.route_id.id]
+ return all_routes
+
+ def view_all_routes_for_wh(self, cr, uid, ids, context=None):
+ all_routes = []
+ for wh in self.browse(cr, uid, ids, context=context):
+ all_routes += self.get_all_routes_for_wh(cr, uid, wh, context=context)
+
+ domain = [('id', 'in', all_routes)]
+ return {
+ 'name': _('Warehouse\'s Routes'),
+ 'domain': domain,
+ 'res_model': 'stock.location.route',
+ 'type': 'ir.actions.act_window',
+ 'view_id': False,
+ 'view_mode': 'tree,form',
+ 'view_type': 'form',
+ 'limit': 20
+ }
+
+class stock_location_path(osv.osv):
+ _name = "stock.location.path"
+ _description = "Pushed Flows"
+ _order = "name"
+
+ def _get_rules(self, cr, uid, ids, context=None):
+ res = []
+ for route in self.browse(cr, uid, ids, context=context):
+ res += [x.id for x in route.push_ids]
+ return res
+
+ _columns = {
+ 'name': fields.char('Operation Name', size=64, required=True),
+ 'company_id': fields.many2one('res.company', 'Company'),
+ 'route_id': fields.many2one('stock.location.route', 'Route'),
+ 'location_from_id': fields.many2one('stock.location', 'Source Location', ondelete='cascade', select=1, required=True),
+ 'location_dest_id': fields.many2one('stock.location', 'Destination Location', ondelete='cascade', select=1, required=True),
+ 'delay': fields.integer('Delay (days)', help="Number of days to do this transition"),
+ 'picking_type_id': fields.many2one('stock.picking.type', 'Type of the new Operation', required=True, help="This is the picking type associated with the different pickings"),
+ 'auto': fields.selection(
+ [('auto','Automatic Move'), ('manual','Manual Operation'),('transparent','Automatic No Step Added')],
+ 'Automatic Move',
+ required=True, select=1,
+ help="This is used to define paths the product has to follow within the location tree.\n" \
+ "The 'Automatic Move' value will create a stock move after the current one that will be "\
+ "validated automatically. With 'Manual Operation', the stock move has to be validated "\
+ "by a worker. With 'Automatic No Step Added', the location is replaced in the original move."
+ ),
+ 'propagate': fields.boolean('Propagate cancel and split', help='If checked, when the previous move is cancelled or split, the move generated by this move will too'),
+ 'active': fields.boolean('Active', help="If unchecked, it will allow you to hide the rule without removing it."),
+ 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse'),
+ 'route_sequence': fields.related('route_id', 'sequence', string='Route Sequence',
+ store={
+ 'stock.location.route': (_get_rules, ['sequence'], 10),
+ 'stock.location.path': (lambda self, cr, uid, ids, c={}: ids, ['route_id'], 10),
+ }),
+ 'sequence': fields.integer('Sequence'),
+ }
+ _defaults = {
+ 'auto': 'auto',
+ 'delay': 0,
+ 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'procurement.order', context=c),
+ 'propagate': True,
+ 'active': True,
+ }
+
+ def _apply(self, cr, uid, rule, move, context=None):
+ move_obj = self.pool.get('stock.move')
+ newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
+ if rule.auto == 'transparent':
+ old_dest_location = move.location_dest_id.id
+ move_obj.write(cr, uid, [move.id], {
+ 'date': newdate,
+ 'date_expected': newdate,
+ 'location_dest_id': rule.location_dest_id.id
+ })
+ move.refresh()
+ #avoid looping if a push rule is not well configured
+ if rule.location_dest_id.id != old_dest_location:
+ #call again push_apply to see if a next step is defined
+ move_obj._push_apply(cr, uid, [move], context=context)
+ else:
+ move_id = move_obj.copy(cr, uid, move.id, {
+ 'location_id': move.location_dest_id.id,
+ 'location_dest_id': rule.location_dest_id.id,
+ 'date': newdate,
+ 'company_id': rule.company_id and rule.company_id.id or False,
+ 'date_expected': newdate,
+ 'picking_id': False,
+ 'picking_type_id': rule.picking_type_id and rule.picking_type_id.id or False,
+ 'propagate': rule.propagate,
+ 'push_rule_id': rule.id,
+ 'warehouse_id': rule.warehouse_id and rule.warehouse_id.id or False,
+ })
+ move_obj.write(cr, uid, [move.id], {
+ 'move_dest_id': move_id,
+ })
+ move_obj.action_confirm(cr, uid, [move_id], context=None)
+
+
+# -------------------------
+# Packaging related stuff
+# -------------------------
+
+from openerp.report import report_sxw
+report_sxw.report_sxw('report.stock.quant.package.barcode', 'stock.quant.package', 'addons/stock/report/package_barcode.rml')
+
+class stock_package(osv.osv):
+ """
+ These are the packages, containing quants and/or other packages
+ """
+ _name = "stock.quant.package"
+ _description = "Physical Packages"
+ _parent_name = "parent_id"
+ _parent_store = True
+ _parent_order = 'name'
+ _order = 'parent_left'
+
+ def name_get(self, cr, uid, ids, context=None):
+ res = self._complete_name(cr, uid, ids, 'complete_name', None, context=context)
+ return res.items()
+
+ def _complete_name(self, cr, uid, ids, name, args, context=None):
+ """ Forms complete name of location from parent location to child location.
+ @return: Dictionary of values
+ """
+ res = {}
+ for m in self.browse(cr, uid, ids, context=context):
+ res[m.id] = m.name
+ parent = m.parent_id
+ while parent:
+ res[m.id] = parent.name + ' / ' + res[m.id]
+ parent = parent.parent_id
+ return res
+
+ def _get_packages(self, cr, uid, ids, context=None):
+ """Returns packages from quants for store"""
+ res = set()
+ for quant in self.browse(cr, uid, ids, context=context):
+ if quant.package_id:
+ res.add(quant.package_id.id)
+ return list(res)
+
+ def _get_packages_to_relocate(self, cr, uid, ids, context=None):
+ res = set()
+ for pack in self.browse(cr, uid, ids, context=context):
+ res.add(pack.id)
+ if pack.parent_id:
+ res.add(pack.parent_id.id)
+ return list(res)
+
+ def _get_package_info(self, cr, uid, ids, name, args, context=None):
+ default_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
+ res = {}.fromkeys(ids, {'location_id': False, 'company_id': default_company_id, 'owner_id': False})
+ for pack in self.browse(cr, uid, ids, context=context):
+ if pack.quant_ids:
+ res[pack.id]['location_id'] = pack.quant_ids[0].location_id.id
+ res[pack.id]['owner_id'] = pack.quant_ids[0].owner_id and pack.quant_ids[0].owner_id.id or False
+ res[pack.id]['company_id'] = pack.quant_ids[0].company_id.id
+ elif pack.children_ids:
+ res[pack.id]['location_id'] = pack.children_ids[0].location_id and pack.children_ids[0].location_id.id or False
+ res[pack.id]['owner_id'] = pack.children_ids[0].owner_id and pack.children_ids[0].owner_id.id or False
+ res[pack.id]['company_id'] = pack.children_ids[0].company_id and pack.children_ids[0].company_id.id or False
+ return res
+
+ _columns = {
+ 'name': fields.char('Package Reference', size=64, select=True),
+ 'complete_name': fields.function(_complete_name, type='char', string="Package Name",),
+ 'parent_left': fields.integer('Left Parent', select=1),
+ 'parent_right': fields.integer('Right Parent', select=1),
+ 'packaging_id': fields.many2one('product.packaging', 'Packaging', help="This field should be completed only if everything inside the package share the same product, otherwise it doesn't really makes sense."),
+ 'ul_id': fields.many2one('product.ul', 'Logistic Unit'),
+ 'location_id': fields.function(_get_package_info, type='many2one', relation='stock.location', string='Location', multi="package",
+ store={
+ 'stock.quant': (_get_packages, ['location_id'], 10),
+ 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10),
+ }, readonly=True),
+ 'quant_ids': fields.one2many('stock.quant', 'package_id', 'Bulk Content', readonly=True),
+ 'parent_id': fields.many2one('stock.quant.package', 'Parent Package', help="The package containing this item", ondelete='restrict', readonly=True),
+ 'children_ids': fields.one2many('stock.quant.package', 'parent_id', 'Contained Packages', readonly=True),
+ 'company_id': fields.function(_get_package_info, type="many2one", relation='res.company', string='Company', multi="package",
+ store={
+ 'stock.quant': (_get_packages, ['company_id'], 10),
+ 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10),
+ }, readonly=True),
+ 'owner_id': fields.function(_get_package_info, type='many2one', relation='res.partner', string='Owner', multi="package",
+ store={
+ 'stock.quant': (_get_packages, ['owner_id'], 10),
+ 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10),
+ }, readonly=True),
+ }
+ _defaults = {
+ 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.quant.package') or _('Unknown Pack')
+ }
+
+ def _check_location_constraint(self, cr, uid, packs, context=None):
+ '''checks that all quants in a package are stored in the same location. This function cannot be used
+ as a constraint because it needs to be checked on pack operations (they may not call write on the
+ package)
+ '''
+ quant_obj = self.pool.get('stock.quant')
+ for pack in packs:
+ parent = pack
+ while parent.parent_id:
+ parent = parent.parent_id
+ quant_ids = self.get_content(cr, uid, [parent.id], context=context)
+ quants = [x for x in quant_obj.browse(cr, uid, quant_ids, context=context) if x.qty > 0]
+ location_id = quants and quants[0].location_id.id or False
+ if not [quant.location_id.id == location_id for quant in quants]:
+ raise osv.except_osv(_('Error'), _('Everything inside a package should be in the same location'))
+ return True
+
+ def action_print(self, cr, uid, ids, context=None):
+ if context is None:
+ context = {}
+ datas = {
+ 'ids': context.get('active_id') and [context.get('active_id')] or ids,
+ 'model': 'stock.quant.package',
+ 'form': self.read(cr, uid, ids)[0]
+ }
+ return {
+ 'type': 'ir.actions.report.xml',
+ 'report_name': 'stock.quant.package.barcode',
+ 'datas': datas
+ }
+
+ def unpack(self, cr, uid, ids, context=None):
+ quant_obj = self.pool.get('stock.quant')
+ for package in self.browse(cr, uid, ids, context=context):
+ quant_ids = [quant.id for quant in package.quant_ids]
+ quant_obj.write(cr, uid, quant_ids, {'package_id': package.parent_id.id or False}, context=context)
+ children_package_ids = [child_package.id for child_package in package.children_ids]
+ self.write(cr, uid, children_package_ids, {'parent_id': package.parent_id.id or False}, context=context)
+ #delete current package since it contains nothing anymore
+ self.unlink(cr, uid, ids, context=context)
+ return self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'stock', 'action_package_view', context=context)
+
+ def get_content(self, cr, uid, ids, context=None):
+ child_package_ids = self.search(cr, uid, [('id', 'child_of', ids)], context=context)
+ return self.pool.get('stock.quant').search(cr, uid, [('package_id', 'in', child_package_ids)], context=context)
+
+ def get_content_package(self, cr, uid, ids, context=None):
+ quants_ids = self.get_content(cr, uid, ids, context=context)
+ res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'stock', 'quantsact', context=context)
+ res['domain'] = [('id', 'in', quants_ids)]
+ return res
+
+ def _get_product_total_qty(self, cr, uid, package_record, product_id, context=None):
+ ''' find the total of given product 'product_id' inside the given package 'package_id'''
+ quant_obj = self.pool.get('stock.quant')
+ all_quant_ids = self.get_content(cr, uid, [package_record.id], context=context)
+ total = 0
+ for quant in quant_obj.browse(cr, uid, all_quant_ids, context=context):
+ if quant.product_id.id == product_id:
+ total += quant.qty
+ return total
+
+ def _get_all_products_quantities(self, cr, uid, package_id, context=None):
+ '''This function computes the different product quantities for the given package
+ '''
+ quant_obj = self.pool.get('stock.quant')
+ res = {}
+ for quant in quant_obj.browse(cr, uid, self.get_content(cr, uid, package_id, context=context)):
+ if quant.product_id.id not in res:
+ res[quant.product_id.id] = 0
+ res[quant.product_id.id] += quant.qty
+ return res
+
+ def copy(self, cr, uid, id, default=None, context=None):
+ if default is None:
+ default = {}
+ if not default.get('name'):
+ default['name'] = self.pool.get('ir.sequence').get(cr, uid, 'stock.quant.package') or _('Unknown Pack')
+ default['quant_ids'] = []
+ default['children_ids'] = []
+ return super(stock_package, self).copy(cr, uid, id, default, context=context)
+
+ def copy_pack(self, cr, uid, id, default_pack_values=None, default=None, context=None):
+ stock_pack_operation_obj = self.pool.get('stock.pack.operation')
+ if default is None:
+ default = {}
+ new_package_id = self.copy(cr, uid, id, default_pack_values, context=context)
+ default['result_package_id'] = new_package_id
+ op_ids = stock_pack_operation_obj.search(cr, uid, [('result_package_id', '=', id)], context=context)
+ for op_id in op_ids:
+ stock_pack_operation_obj.copy(cr, uid, op_id, default, context=context)
+
+
+class stock_pack_operation(osv.osv):
+ _name = "stock.pack.operation"
+ _description = "Packing Operation"
+
+ def _get_remaining_prod_quantities(self, cr, uid, operation, context=None):
+ '''Get the remaining quantities per product on an operation with a package. This function returns a dictionary'''
+ #if the operation doesn't concern a package, it's not relevant to call this function
+ if not operation.package_id or operation.product_id:
+ return {operation.product_id.id: operation.remaining_qty}
+ #get the total of products the package contains
+ res = self.pool.get('stock.quant.package')._get_all_products_quantities(cr, uid, operation.package_id.id, context=context)
+ #reduce by the quantities linked to a move
+ for record in operation.linked_move_operation_ids:
+ if record.move_id.product_id.id not in res:
+ res[record.move_id.product_id.id] = 0
+ res[record.move_id.product_id.id] -= record.qty
+ return res
+
+ def _get_remaining_qty(self, cr, uid, ids, name, args, context=None):
+ uom_obj = self.pool.get('product.uom')
+ res = {}
+ for ops in self.browse(cr, uid, ids, context=context):
+ res[ops.id] = 0
+ if ops.package_id and not ops.product_id:
+ #dont try to compute the remaining quantity for packages because it's not relevant (a package could include different products).
+ #should use _get_remaining_prod_quantities instead
+ continue
+ else:
+ qty = ops.product_qty
+ if ops.product_uom_id:
+ qty = uom_obj._compute_qty_obj(cr, uid, ops.product_uom_id, ops.product_qty, ops.product_id.uom_id, context=context)
+ for record in ops.linked_move_operation_ids:
+ qty -= record.qty
+ #converting the remaining quantity in the pack operation UoM
+ if ops.product_uom_id:
+ qty = uom_obj._compute_qty_obj(cr, uid, ops.product_id.uom_id, qty, ops.product_uom_id, context=context)
+ res[ops.id] = qty
+ return res
+
+ def product_id_change(self, cr, uid, ids, product_id, product_uom_id, product_qty, context=None):
+ res = self.on_change_tests(cr, uid, ids, product_id, product_uom_id, product_qty, context=context)
+ if product_id and not product_uom_id:
+ product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
+ res['value']['product_uom_id'] = product.uom_id.id
+ return res
+
+ def on_change_tests(self, cr, uid, ids, product_id, product_uom_id, product_qty, context=None):
+ res = {'value': {}}
+ uom_obj = self.pool.get('product.uom')
+ if product_id:
+ product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
+ product_uom_id = product_uom_id or product.uom_id.id
+ selected_uom = uom_obj.browse(cr, uid, product_uom_id, context=context)
+ if selected_uom.category_id.id != product.uom_id.category_id.id:
+ res['warning'] = {
+ 'title': _('Warning: wrong UoM!'),
+ 'message': _('The selected UoM for product %s is not compatible with the UoM set on the product form. \nPlease choose an UoM within the same UoM category.') % (product.name)
+ }
+ if product_qty and 'warning' not in res:
+ rounded_qty = uom_obj._compute_qty(cr, uid, product_uom_id, product_qty, product_uom_id, round=True)
+ if rounded_qty != product_qty:
+ res['warning'] = {
+ 'title': _('Warning: wrong quantity!'),
+ 'message': _('The chosen quantity for product %s is not compatible with the UoM rounding. It will be automatically converted at confirmation') % (product.name)
+ }
+ return res
+
+ _columns = {
+ 'picking_id': fields.many2one('stock.picking', 'Stock Picking', help='The stock operation where the packing has been made', required=True),
+ 'product_id': fields.many2one('product.product', 'Product', ondelete="CASCADE"), # 1
+ 'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure'),
+ 'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
+ 'qty_done': fields.float('Quantity Processed', digits_compute=dp.get_precision('Product Unit of Measure')),
+ 'package_id': fields.many2one('stock.quant.package', 'Package'), # 2
+ 'lot_id': fields.many2one('stock.production.lot', 'Lot/Serial Number'),
+ 'result_package_id': fields.many2one('stock.quant.package', 'Container Package', help="If set, the operations are packed into this package", required=False, ondelete='cascade'),
+ 'date': fields.datetime('Date', required=True),
+ 'owner_id': fields.many2one('res.partner', 'Owner', help="Owner of the quants"),
+ #'update_cost': fields.boolean('Need cost update'),
+ 'cost': fields.float("Cost", help="Unit Cost for this product line"),
+ 'currency': fields.many2one('res.currency', string="Currency", help="Currency in which Unit cost is expressed", ondelete='CASCADE'),
+ 'linked_move_operation_ids': fields.one2many('stock.move.operation.link', 'operation_id', string='Linked Moves', readonly=True, help='Moves impacted by this operation for the computation of the remaining quantities'),
+ 'remaining_qty': fields.function(_get_remaining_qty, type='float', string='Remaining Qty'),
+ 'location_id': fields.many2one('stock.location', 'Location From', required=True),
+ 'location_dest_id': fields.many2one('stock.location', 'Location To', required=True),
+ 'processed': fields.selection([('true','Yes'), ('false','No')],'Has been processed?', required=True),
+ }
+
+ _defaults = {
+ 'date': fields.date.context_today,
+ 'qty_done': 0,
+ 'processed': lambda *a: 'false',
+ }
+
+ def write(self, cr, uid, ids, vals, context=None):
+ context = context or {}
+ res = super(stock_pack_operation, self).write(cr, uid, ids, vals, context=context)
+ if isinstance(ids, (int, long)):
+ ids = [ids]
+ if not context.get("no_recompute"):
+ pickings = vals.get('picking_id') and [vals['picking_id']] or list(set([x.picking_id.id for x in self.browse(cr, uid, ids, context=context)]))
+ self.pool.get("stock.picking").do_recompute_remaining_quantities(cr, uid, pickings, context=context)
+ return res
+
+ def create(self, cr, uid, vals, context=None):
+ context = context or {}
+ res_id = super(stock_pack_operation, self).create(cr, uid, vals, context=context)
+ if vals.get("picking_id") and not context.get("no_recompute"):
+ self.pool.get("stock.picking").do_recompute_remaining_quantities(cr, uid, [vals['picking_id']], context=context)
+ return res_id
+
+ def action_drop_down(self, cr, uid, ids, context=None):
+ ''' Used by barcode interface to say that pack_operation has been moved from src location
+ to destination location, if qty_done is less than product_qty than we have to split the
+ operation in two to process the one with the qty moved
+ '''
+ processed_ids = []
+ for pack_op in self.browse(cr, uid, ids, context=None):
+ op = pack_op.id
+ if pack_op.qty_done < pack_op.product_qty:
+ # we split the operation in two
+ op = self.copy(cr, uid, pack_op.id, {'product_qty': pack_op.qty_done, 'qty_done': pack_op.qty_done}, context=context)
+ self.write(cr, uid, ids, {'product_qty': pack_op.product_qty - pack_op.qty_done, 'qty_done': 0}, context=context)
+ processed_ids.append(op)
+ self.write(cr, uid, processed_ids, {'processed': 'true'}, context=context)
+
+ def create_and_assign_lot(self, cr, uid, id, name, context=None):
+ ''' Used by barcode interface to create a new lot and assign it to the operation
+ '''
+ obj = self.browse(cr,uid,id,context)
+ product_id = obj.product_id.id
+ val = {'product_id': product_id}
+ if name:
+ val.update({'name': name})
+ if not obj.lot_id:
+ new_lot_id = self.pool.get('stock.production.lot').create(cr, uid, val, context=context)
+ self.write(cr, uid, id, {'lot_id': new_lot_id}, context=context)
+
+ def _search_and_increment(self, cr, uid, picking_id, domain, filter_visible=False, visible_op_ids=False, increment=True, context=None):
+ '''Search for an operation with given 'domain' in a picking, if it exists increment the qty (+1) otherwise create it
+
+ :param domain: list of tuple directly reusable as a domain
+ context can receive a key 'current_package_id' with the package to consider for this operation
+ returns True
+ '''
+ if context is None:
+ context = {}
+
+ #if current_package_id is given in the context, we increase the number of items in this package
+ package_clause = [('result_package_id', '=', context.get('current_package_id', False))]
+ existing_operation_ids = self.search(cr, uid, [('picking_id', '=', picking_id)] + domain + package_clause, context=context)
+ todo_operation_ids = []
+ if existing_operation_ids:
+ if filter_visible:
+ todo_operation_ids = [val for val in existing_operation_ids if val in visible_op_ids]
+ else:
+ todo_operation_ids = existing_operation_ids
+ if todo_operation_ids:
+ #existing operation found for the given domain and picking => increment its quantity
+ operation_id = todo_operation_ids[0]
+ op_obj = self.browse(cr, uid, operation_id, context=context)
+ qty = op_obj.qty_done
+ if increment:
+ qty += 1
+ else:
+ qty -= 1 if qty >= 1 else 0
+ if qty == 0 and op_obj.product_qty == 0:
+ #we have a line with 0 qty set, so delete it
+ self.unlink(cr, uid, [operation_id], context=context)
+ return False
+ self.write(cr, uid, [operation_id], {'qty_done': qty}, context=context)
+ else:
+ #no existing operation found for the given domain and picking => create a new one
+ values = {
+ 'picking_id': picking_id,
+ 'product_qty': 0,
+ 'qty_done': 1,
+ }
+ for key in domain:
+ var_name, dummy, value = key
+ uom_id = False
+ if var_name == 'product_id':
+ uom_id = self.pool.get('product.product').browse(cr, uid, value, context=context).uom_id.id
+ update_dict = {var_name: value}
+ if uom_id:
+ update_dict['product_uom_id'] = uom_id
+ values.update(update_dict)
+ operation_id = self.create(cr, uid, values, context=context)
+ return operation_id
+
+
+class stock_move_operation_link(osv.osv):
+ """
+ Table making the link between stock.moves and stock.pack.operations to compute the remaining quantities on each of these objects
+ """
+ _name = "stock.move.operation.link"
+ _description = "Link between stock moves and pack operations"
+
+ _columns = {
+ 'qty': fields.float('Quantity', help="Quantity of products to consider when talking about the contribution of this pack operation towards the remaining quantity of the move (and inverse). Given in the product main uom."),
+ 'operation_id': fields.many2one('stock.pack.operation', 'Operation', required=True, ondelete="cascade"),
+ 'move_id': fields.many2one('stock.move', 'Move', required=True, ondelete="cascade"),
+ 'reserved_quant_id': fields.many2one('stock.quant', 'Reserved Quant', help="Technical field containing the quant that created this link between an operation and a stock move. Used at the stock_move_obj.action_done() time to avoid seeking a matching quant again"),
+ }
+
+ def get_specific_domain(self, cr, uid, record, context=None):
+ '''Returns the specific domain to consider for quant selection in action_assign() or action_done() of stock.move,
+ having the record given as parameter making the link between the stock move and a pack operation'''
+
+ op = record.operation_id
+ domain = []
+ if op.package_id and op.product_id:
+ #if removing a product from a box, we restrict the choice of quants to this box
+ domain.append(('package_id', '=', op.package_id.id))
+ elif op.package_id:
+ #if moving a box, we allow to take everything from inside boxes as well
+ domain.append(('package_id', 'child_of', [op.package_id.id]))
+ else:
+ #if not given any information about package, we don't open boxes
+ domain.append(('package_id', '=', False))
+ #if lot info is given, we restrict choice to this lot otherwise we can take any
+ if op.lot_id:
+ domain.append(('lot_id', '=', op.lot_id.id))
+ #if owner info is given, we restrict to this owner otherwise we restrict to no owner
+ if op.owner_id:
+ domain.append(('owner_id', '=', op.owner_id.id))
+ else:
+ domain.append(('owner_id', '=', False))
+ return domain
+
+class stock_warehouse_orderpoint(osv.osv):
+ """
+ Defines Minimum stock rules.
+ """
+ _name = "stock.warehouse.orderpoint"
+ _description = "Minimum Inventory Rule"
+
+ def subtract_procurements(self, cr, uid, orderpoint, context=None):
+ '''This function returns quantity of product that needs to be deducted from the orderpoint computed quantity because there's already a procurement created with aim to fulfill it.
+ '''
+ qty = 0
+ uom_obj = self.pool.get("product.uom")
+ for procurement in orderpoint.procurement_ids:
+ if procurement.state in ('cancel', 'done'):
+ continue
+ procurement_qty = uom_obj._compute_qty_obj(cr, uid, procurement.product_uom, procurement.product_qty, procurement.product_id.uom_id, context=context)
+ for move in procurement.move_ids:
+ if move.state not in ('draft', 'cancel'):
+ #if move is already confirmed, assigned or done, the virtual stock is already taking this into account so it shouldn't be deducted
+ procurement_qty -= move.product_qty
+ qty += procurement_qty
+ return qty
+
+ def _check_product_uom(self, cr, uid, ids, context=None):
+ '''
+ Check if the UoM has the same category as the product standard UoM
+ '''
+ if not context:
+ context = {}
+
+ for rule in self.browse(cr, uid, ids, context=context):
+ if rule.product_id.uom_id.category_id.id != rule.product_uom.category_id.id:
+ return False
+
+ return True
+
+ def action_view_proc_to_process(self, cr, uid, ids, context=None):
+ act_obj = self.pool.get('ir.actions.act_window')
+ mod_obj = self.pool.get('ir.model.data')
+ proc_ids = self.pool.get('procurement.order').search(cr, uid, [('orderpoint_id', 'in', ids), ('state', 'not in', ('done', 'cancel'))], context=context)
+ result = mod_obj.get_object_reference(cr, uid, 'procurement', 'do_view_procurements')
+ if not result:
+ return False
+
+ result = act_obj.read(cr, uid, [result[1]], context=context)[0]
+ result['domain'] = "[('id', 'in', [" + ','.join(map(str, proc_ids)) + "])]"
+ return result
_columns = {
- 'backorder_id': fields.many2one('stock.picking.in', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
- 'state': fields.selection(
- [('draft', 'Draft'),
- ('auto', 'Waiting Another Operation'),
- ('confirmed', 'Waiting Availability'),
- ('assigned', 'Ready to Receive'),
- ('done', 'Received'),
- ('cancel', 'Cancelled'),],
- 'Status', readonly=True, select=True,
- help="""* Draft: not confirmed yet and will not be scheduled until confirmed\n
- * Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
- * Waiting Availability: still waiting for the availability of products\n
- * Ready to Receive: products reserved, simply waiting for confirmation.\n
- * Received: has been processed, can't be modified or cancelled anymore\n
- * Cancelled: has been cancelled, can't be confirmed anymore"""),
+ 'name': fields.char('Name', size=32, required=True),
+ 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the orderpoint without removing it."),
+ 'logic': fields.selection([('max', 'Order to Max'), ('price', 'Best price (not yet active!)')], 'Reordering Mode', required=True),
+ 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True, ondelete="cascade"),
+ 'location_id': fields.many2one('stock.location', 'Location', required=True, ondelete="cascade"),
+ 'product_id': fields.many2one('product.product', 'Product', required=True, ondelete='cascade', domain=[('type', '=', 'product')]),
+ 'product_uom': fields.related('product_id', 'uom_id', type='many2one', relation='product.uom', string='Product Unit of Measure', readonly=True, required=True),
+ 'product_min_qty': fields.float('Minimum Quantity', required=True,
+ help="When the virtual stock goes below the Min Quantity specified for this field, OpenERP generates "\
+ "a procurement to bring the forecasted quantity to the Max Quantity."),
+ 'product_max_qty': fields.float('Maximum Quantity', required=True,
+ help="When the virtual stock goes below the Min Quantity, OpenERP generates "\
+ "a procurement to bring the forecasted quantity to the Quantity specified as Max Quantity."),
+ 'qty_multiple': fields.integer('Qty Multiple', required=True,
+ help="The procurement quantity will be rounded up to this multiple."),
+ 'procurement_ids': fields.one2many('procurement.order', 'orderpoint_id', 'Created Procurements'),
+ 'group_id': fields.many2one('procurement.group', 'Procurement Group', help="Moves created through this orderpoint will be put in this procurement group. If none is given, the moves generated by procurement rules will be grouped into one big picking."),
+ 'company_id': fields.many2one('res.company', 'Company', required=True),
}
_defaults = {
- 'type': 'in',
+ 'active': lambda *a: 1,
+ 'logic': lambda *a: 'max',
+ 'qty_multiple': lambda *a: 1,
+ 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.orderpoint') or '',
+ 'product_uom': lambda self, cr, uid, context: context.get('product_uom', False),
+ 'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.warehouse.orderpoint', context=context)
}
+ _sql_constraints = [
+ ('qty_multiple_check', 'CHECK( qty_multiple > 0 )', 'Qty Multiple must be greater than zero.'),
+ ]
+ _constraints = [
+ (_check_product_uom, 'You have to select a product unit of measure in the same category than the default unit of measure of the product', ['product_id', 'product_uom']),
+ ]
+
+ def default_get(self, cr, uid, fields, context=None):
++ warehouse_obj = self.pool.get('stock.warehouse')
+ res = super(stock_warehouse_orderpoint, self).default_get(cr, uid, fields, context)
+ # default 'warehouse_id' and 'location_id'
+ if 'warehouse_id' not in res:
- warehouse = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'warehouse0', context)
- res['warehouse_id'] = warehouse.id
++ warehouse_ids = res.get('company_id') and warehouse_obj.search(cr, uid, [('company_id', '=', res['company_id'])], limit=1, context=context) or []
++ res['warehouse_id'] = warehouse_ids and warehouse_ids[0] or False
+ if 'location_id' not in res:
- warehouse = self.pool.get('stock.warehouse').browse(cr, uid, res['warehouse_id'], context)
- res['location_id'] = warehouse.lot_stock_id.id
++ res['location_id'] = res.get('warehouse_id') and warehouse_obj.browse(cr, uid, res['warehouse_id'], context).lot_stock_id.id or False
+ return res
+
+ def onchange_warehouse_id(self, cr, uid, ids, warehouse_id, context=None):
+ """ Finds location id for changed warehouse.
+ @param warehouse_id: Changed id of warehouse.
+ @return: Dictionary of values.
+ """
+ if warehouse_id:
+ w = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context)
+ v = {'location_id': w.lot_stock_id.id}
+ return {'value': v}
+ return {}
+
+ def onchange_product_id(self, cr, uid, ids, product_id, context=None):
+ """ Finds UoM for changed product.
+ @param product_id: Changed id of product.
+ @return: Dictionary of values.
+ """
+ if product_id:
+ prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
+ d = {'product_uom': [('category_id', '=', prod.uom_id.category_id.id)]}
+ v = {'product_uom': prod.uom_id.id}
+ return {'value': v, 'domain': d}
+ return {'domain': {'product_uom': []}}
+
+ def copy_data(self, cr, uid, id, default=None, context=None):
+ if not default:
+ default = {}
+ default.update({
+ 'name': self.pool.get('ir.sequence').get(cr, uid, 'stock.orderpoint') or '',
+ 'procurement_ids': [],
+ 'group_id': False
+ })
+ return super(stock_warehouse_orderpoint, self).copy_data(cr, uid, id, default, context=context)
+
+
+class stock_picking_type(osv.osv):
+ _name = "stock.picking.type"
+ _description = "The picking type determines the picking view"
+ _order = 'sequence'
+
+ def open_barcode_interface(self, cr, uid, ids, context=None):
+ final_url = "/barcode/web/#action=stock.ui&picking_type_id=" + str(ids[0]) if len(ids) else '0'
+ return {'type': 'ir.actions.act_url', 'url': final_url, 'target': 'self'}
+
+ def _get_tristate_values(self, cr, uid, ids, field_name, arg, context=None):
+ picking_obj = self.pool.get('stock.picking')
+ res = dict.fromkeys(ids, [])
+ for picking_type_id in ids:
+ #get last 10 pickings of this type
+ picking_ids = picking_obj.search(cr, uid, [('picking_type_id', '=', picking_type_id), ('state', '=', 'done')], order='date_done desc', limit=10, context=context)
+ tristates = []
+ for picking in picking_obj.browse(cr, uid, picking_ids, context=context):
+ if picking.date_done > picking.date:
+ tristates.insert(0, {'tooltip': picking.name + _(': Late'), 'value': -1})
+ elif picking.backorder_id:
+ tristates.insert(0, {'tooltip': picking.name + _(': Backorder exists'), 'value': 0})
+ else:
+ tristates.insert(0, {'tooltip': picking.name + _(': OK'), 'value': 1})
+ res[picking_type_id] = tristates
+ return res
+
+ def _get_picking_count(self, cr, uid, ids, field_names, arg, context=None):
+ obj = self.pool.get('stock.picking')
+ domains = {
+ 'count_picking_draft': [('state', '=', 'draft')],
+ 'count_picking_waiting': [('state', '=', 'confirmed')],
+ 'count_picking_ready': [('state', 'in', ('assigned', 'partially_available'))],
+ 'count_picking': [('state', 'in', ('assigned', 'waiting', 'confirmed', 'partially_available'))],
+ 'count_picking_late': [('min_date', '<', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)), ('state', 'in', ('assigned', 'waiting', 'confirmed', 'partially_available'))],
+ 'count_picking_backorders': [('backorder_id', '!=', False), ('state', 'in', ('confirmed', 'assigned', 'waiting', 'partially_available'))],
+ }
+ result = {}
+ for field in domains:
+ data = obj.read_group(cr, uid, domains[field] +
+ [('state', 'not in', ('done', 'cancel')), ('picking_type_id', 'in', ids)],
+ ['picking_type_id'], ['picking_type_id'], context=context)
+ count = dict(map(lambda x: (x['picking_type_id'] and x['picking_type_id'][0], x['picking_type_id_count']), data))
+ for tid in ids:
+ result.setdefault(tid, {})[field] = count.get(tid, 0)
+ for tid in ids:
+ if result[tid]['count_picking']:
+ result[tid]['rate_picking_late'] = result[tid]['count_picking_late'] * 100 / result[tid]['count_picking']
+ result[tid]['rate_picking_backorders'] = result[tid]['count_picking_backorders'] * 100 / result[tid]['count_picking']
+ else:
+ result[tid]['rate_picking_late'] = 0
+ result[tid]['rate_picking_backorders'] = 0
+ return result
+
+ def onchange_picking_code(self, cr, uid, ids, picking_code=False):
+ if not picking_code:
+ return False
+
+ obj_data = self.pool.get('ir.model.data')
+ stock_loc = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_stock')
+
+ result = {
+ 'default_location_src_id': stock_loc,
+ 'default_location_dest_id': stock_loc,
+ }
+ if picking_code == 'incoming':
+ result['default_location_src_id'] = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_suppliers')
+ elif picking_code == 'outgoing':
+ result['default_location_dest_id'] = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_customers')
+ return {'value': result}
+
+ def _get_name(self, cr, uid, ids, field_names, arg, context=None):
+ return dict(self.name_get(cr, uid, ids, context=context))
+
+ def name_get(self, cr, uid, ids, context=None):
+ """Overides orm name_get method to display 'Warehouse_name: PickingType_name' """
+ if context is None:
+ context = {}
+ if not isinstance(ids, list):
+ ids = [ids]
+ res = []
+ if not ids:
+ return res
+ for record in self.browse(cr, uid, ids, context=context):
+ name = record.name
+ if record.warehouse_id:
+ name = record.warehouse_id.name + ': ' +name
+ if context.get('special_shortened_wh_name'):
+ if record.warehouse_id:
+ name = record.warehouse_id.name
+ else:
+ name = _('Customer') + ' (' + record.name + ')'
+ res.append((record.id, name))
+ return res
+
+ def _default_warehouse(self, cr, uid, context=None):
+ user = self.pool.get('res.users').browse(cr, uid, uid, context)
+ res = self.pool.get('stock.warehouse').search(cr, uid, [('company_id', '=', user.company_id.id)], limit=1, context=context)
+ return res and res[0] or False
-class stock_picking_out(osv.osv):
- _name = "stock.picking.out"
- _inherit = "stock.picking"
- _table = "stock_picking"
- _description = "Delivery Orders"
-
- def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
- return self.pool.get('stock.picking').search(cr, user, args, offset, limit, order, context, count)
-
- def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
- return self.pool.get('stock.picking').read(cr, uid, ids, fields=fields, context=context, load=load)
-
- def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
- return self.pool['stock.picking'].read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby)
-
- def check_access_rights(self, cr, uid, operation, raise_exception=True):
- #override in order to redirect the check of acces rights on the stock.picking object
- return self.pool.get('stock.picking').check_access_rights(cr, uid, operation, raise_exception=raise_exception)
-
- def check_access_rule(self, cr, uid, ids, operation, context=None):
- #override in order to redirect the check of acces rules on the stock.picking object
- return self.pool.get('stock.picking').check_access_rule(cr, uid, ids, operation, context=context)
-
- def create_workflow(self, cr, uid, ids, context=None):
- # overridden in order to trigger the workflow of stock.picking at the end of create,
- # write and unlink operation instead of its own workflow (which is not existing)
- return self.pool.get('stock.picking').create_workflow(cr, uid, ids, context=context)
-
- def delete_workflow(self, cr, uid, ids, context=None):
- # overridden in order to trigger the workflow of stock.picking at the end of create,
- # write and unlink operation instead of its own workflow (which is not existing)
- return self.pool.get('stock.picking').delete_workflow(cr, uid, ids, context=context)
-
- def step_workflow(self, cr, uid, ids, context=None):
- # overridden in order to trigger the workflow of stock.picking at the end of create,
- # write and unlink operation instead of its own workflow (which is not existing)
- return self.pool.get('stock.picking').step_workflow(cr, uid, ids, context=context)
-
- def signal_workflow(self, cr, uid, ids, signal, context=None):
- # overridden in order to fire the workflow signal on given stock.picking workflow instance
- # instead of its own workflow (which is not existing)
- return self.pool.get('stock.picking').signal_workflow(cr, uid, ids, signal, context=context)
-
- def message_post(self, *args, **kwargs):
- """Post the message on stock.picking to be able to see it in the form view when using the chatter"""
- return self.pool.get('stock.picking').message_post(*args, **kwargs)
-
- def message_subscribe(self, *args, **kwargs):
- """Send the subscribe action on stock.picking model as it uses _name in request"""
- return self.pool.get('stock.picking').message_subscribe(*args, **kwargs)
-
- def message_unsubscribe(self, *args, **kwargs):
- """Send the unsubscribe action on stock.picking model to match with subscribe"""
- return self.pool.get('stock.picking').message_unsubscribe(*args, **kwargs)
-
- def default_get(self, cr, uid, fields_list, context=None):
- # merge defaults from stock.picking with possible defaults defined on stock.picking.out
- defaults = self.pool['stock.picking'].default_get(cr, uid, fields_list, context=context)
- out_defaults = super(stock_picking_out, self).default_get(cr, uid, fields_list, context=context)
- defaults.update(out_defaults)
- return defaults
- def _claim_count(self, cr, uid, ids, field_name, arg, context=None):
- return super(stock_picking_out, self)._claim_count(cr, uid, ids, field_name, arg, context=context)
-
_columns = {
- 'backorder_id': fields.many2one('stock.picking.out', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
- 'state': fields.selection(
- [('draft', 'Draft'),
- ('auto', 'Waiting Another Operation'),
- ('confirmed', 'Waiting Availability'),
- ('assigned', 'Ready to Deliver'),
- ('done', 'Delivered'),
- ('cancel', 'Cancelled'),],
- 'Status', readonly=True, select=True,
- help="""* Draft: not confirmed yet and will not be scheduled until confirmed\n
- * Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
- * Waiting Availability: still waiting for the availability of products\n
- * Ready to Deliver: products reserved, simply waiting for confirmation.\n
- * Delivered: has been processed, can't be modified or cancelled anymore\n
- * Cancelled: has been cancelled, can't be confirmed anymore"""),
- 'claim_count': fields.function(_claim_count, string='Claims', type='integer'),
+ 'name': fields.char('Picking Type Name', translate=True, required=True),
+ 'complete_name': fields.function(_get_name, type='char', string='Name'),
+ 'color': fields.integer('Color'),
+ 'sequence': fields.integer('Sequence', help="Used to order the 'All Operations' kanban view"),
+ 'sequence_id': fields.many2one('ir.sequence', 'Reference Sequence', required=True),
+ 'default_location_src_id': fields.many2one('stock.location', 'Default Source Location'),
+ 'default_location_dest_id': fields.many2one('stock.location', 'Default Destination Location'),
+ 'code': fields.selection([('incoming', 'Suppliers'), ('outgoing', 'Customers'), ('internal', 'Internal')], 'Type of Operation', required=True),
+ 'return_picking_type_id': fields.many2one('stock.picking.type', 'Picking Type for Returns'),
+ 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', ondelete='cascade'),
+ 'active': fields.boolean('Active'),
+
+ # Statistics for the kanban view
+ 'last_done_picking': fields.function(_get_tristate_values,
+ type='char',
+ string='Last 10 Done Pickings'),
+
+ 'count_picking_draft': fields.function(_get_picking_count,
+ type='integer', multi='_get_picking_count'),
+ 'count_picking_ready': fields.function(_get_picking_count,
+ type='integer', multi='_get_picking_count'),
+ 'count_picking': fields.function(_get_picking_count,
+ type='integer', multi='_get_picking_count'),
+ 'count_picking_waiting': fields.function(_get_picking_count,
+ type='integer', multi='_get_picking_count'),
+ 'count_picking_late': fields.function(_get_picking_count,
+ type='integer', multi='_get_picking_count'),
+ 'count_picking_backorders': fields.function(_get_picking_count,
+ type='integer', multi='_get_picking_count'),
+
+ 'rate_picking_late': fields.function(_get_picking_count,
+ type='integer', multi='_get_picking_count'),
+ 'rate_picking_backorders': fields.function(_get_picking_count,
+ type='integer', multi='_get_picking_count'),
+
}
_defaults = {
- 'type': 'out',
+ 'warehouse_id': _default_warehouse,
+ 'active': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: