from dateutil import relativedelta
import json
import time
+import sets
+import openerp
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT
-from openerp import SUPERUSER_ID, api
+from openerp import SUPERUSER_ID, api, models
import openerp.addons.decimal_precision as dp
from openerp.addons.procurement import procurement
import logging
'scrap_location': fields.boolean('Is a Scrap Location?', help='Check this box to allow using this location to put scrapped/damaged goods.'),
'removal_strategy_id': fields.many2one('product.removal', 'Removal Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to take the products from, which lot etc. for this location. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."),
'putaway_strategy_id': fields.many2one('product.putaway', 'Put Away Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to store the products. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."),
- 'loc_barcode': fields.char('Location Barcode'),
+ 'barcode': fields.char('Barcode', oldname='loc_barcode'),
}
_defaults = {
'active': True,
'posz': 0,
'scrap_location': False,
}
- _sql_constraints = [('loc_barcode_company_uniq', 'unique (loc_barcode,company_id)', 'The barcode for a location must be unique per company !')]
+ _sql_constraints = [('barcode_company_uniq', 'unique (barcode,company_id)', 'The barcode for a location must be unique per company !')]
def create(self, cr, uid, default, context=None):
- if not default.get('loc_barcode', False):
- default.update({'loc_barcode': default.get('complete_name', False)})
+ if not default.get('barcode', False):
+ default.update({'barcode': default.get('complete_name', False)})
return super(stock_location, self).create(cr, uid, default, context=context)
def get_putaway_strategy(self, cr, uid, location, product, context=None):
return self._default_removal_strategy(cr, uid, context=context)
+ def get_warehouse(self, cr, uid, location, context=None):
+ """
+ Returns warehouse id of warehouse that contains location
+ :param location: browse record (stock.location)
+ """
+ wh_obj = self.pool.get("stock.warehouse")
+ whs = wh_obj.search(cr, uid, [('view_location_id.parent_left', '<=', location.parent_left),
+ ('view_location_id.parent_right', '>=', location.parent_left)], context=context)
+ return whs and whs[0] or False
+
#----------------------------------------------------------
# Routes
#----------------------------------------------------------
self.pool.get('stock.move').write(cr, uid, [move.id], {'partially_available': True}, context=context)
def quants_move(self, cr, uid, quants, move, location_to, location_from=False, lot_id=False, owner_id=False, src_package_id=False, dest_package_id=False, context=None):
- """Moves all given stock.quant in the given destination location.
+ """Moves all given stock.quant in the given destination location. Unreserve from current move.
:param quants: list of tuple(browse record(stock.quant) or None, quantity to move)
:param move: browse record (stock.move)
:param location_to: browse record (stock.location) depicting where the quants have to be moved
def move_quants_write(self, cr, uid, quants, move, location_dest_id, dest_package_id, context=None):
vals = {'location_id': location_dest_id.id,
'history_ids': [(4, move.id)],
- 'package_id': dest_package_id}
+ 'package_id': dest_package_id,
+ 'reservation_id': False}
self.write(cr, SUPERUSER_ID, [q.id for q in quants], vals, context=context)
def quants_get_prefered_domain(self, cr, uid, location, product, qty, domain=None, prefered_domain_list=[], restrict_lot_id=False, restrict_partner_id=False, context=None):
class stock_picking(osv.osv):
_name = "stock.picking"
_inherit = ['mail.thread']
- _description = "Picking List"
+ _description = "Transfer"
_order = "priority desc, date asc, id desc"
def _set_min_date(self, cr, uid, id, field, value, arg, context=None):
if ('name' not in vals) or (vals.get('name') in ('/', False)):
ptype_id = vals.get('picking_type_id', context.get('default_picking_type_id', False))
sequence_id = self.pool.get('stock.picking.type').browse(cr, user, ptype_id, context=context).sequence_id.id
- vals['name'] = self.pool.get('ir.sequence').get_id(cr, user, sequence_id, 'id', context=context)
+ vals['name'] = self.pool.get('ir.sequence').next_by_id(cr, user, sequence_id, context=context)
return super(stock_picking, self).create(cr, user, vals, context)
def _state_get(self, cr, uid, ids, field_name, arg, context=None):
* Cancelled: has been cancelled, can't be confirmed anymore"""
),
'priority': fields.function(get_min_max_date, multi="min_max_date", fnct_inv=_set_priority, type='selection', selection=procurement.PROCUREMENT_PRIORITIES, string='Priority',
- store={'stock.move': (_get_pickings, ['priority'], 20)}, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, select=1, help="Priority for this picking. Setting manually a value here would set it as priority for all the moves",
+ store={'stock.move': (_get_pickings, ['priority', 'picking_id'], 20)}, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, select=1, help="Priority for this picking. Setting manually a value here would set it as priority for all the moves",
track_visibility='onchange', required=True),
'min_date': fields.function(get_min_max_date, multi="min_max_date", fnct_inv=_set_min_date,
- store={'stock.move': (_get_pickings, ['date_expected'], 20)}, type='datetime', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, string='Scheduled Date', select=1, help="Scheduled time for the first part of the shipment to be processed. Setting manually a value here would set it as expected date for all the stock moves.", track_visibility='onchange'),
+ store={'stock.move': (_get_pickings, ['date_expected', 'picking_id'], 20)}, type='datetime', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, string='Scheduled Date', select=1, help="Scheduled time for the first part of the shipment to be processed. Setting manually a value here would set it as expected date for all the stock moves.", track_visibility='onchange'),
'max_date': fields.function(get_min_max_date, multi="min_max_date",
- store={'stock.move': (_get_pickings, ['date_expected'], 20)}, type='datetime', string='Max. Expected Date', select=2, help="Scheduled time for the last part of the shipment to be processed"),
+ store={'stock.move': (_get_pickings, ['date_expected', 'picking_id'], 20)}, type='datetime', string='Max. Expected Date', select=2, help="Scheduled time for the last part of the shipment to be processed"),
'date': fields.datetime('Creation Date', help="Creation Date, usually the time of the order", select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, track_visibility='onchange'),
'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False),
'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=True),
'pack_operation_ids': [],
'backorder_id': picking.id,
})
- self.message_post(cr, uid, picking.id, body=_("Back order <em>%s</em> <b>created</b>.") % (picking.name), context=context)
+ backorder = self.browse(cr, uid, backorder_id, context=context)
+ self.message_post(cr, uid, picking.id, body=_("Back order <em>%s</em> <b>created</b>.") % (backorder.name), context=context)
move_obj = self.pool.get("stock.move")
move_obj.write(cr, uid, backorder_move_ids, {'picking_id': backorder_id}, context=context)
@api.cr_uid_ids_context
def open_barcode_interface(self, cr, uid, picking_ids, context=None):
- final_url="/barcode/web/#action=stock.ui&picking_id="+str(picking_ids[0])
+ final_url="/stock/barcode/#action=stock.ui&picking_id="+str(picking_ids[0])
return {'type': 'ir.actions.act_url', 'url':final_url, 'target': 'self',}
@api.cr_uid_ids_context
if picking.pack_operation_ids:
self.recompute_remaining_qty(cr, uid, picking, context=context)
+ def _prepare_values_extra_move(self, cr, uid, op, product, remaining_qty, context=None):
+ """
+ Creates an extra move when there is no corresponding original move to be copied
+ """
+ picking = op.picking_id
+ res = {
+ 'picking_id': picking.id,
+ 'location_id': picking.location_id.id,
+ 'location_dest_id': picking.location_dest_id.id,
+ 'product_id': product.id,
+ 'product_uom': product.uom_id.id,
+ 'product_uom_qty': remaining_qty,
+ 'name': _('Extra Move: ') + product.name,
+ 'state': 'draft',
+ }
+ return res
+
def _create_extra_moves(self, cr, uid, picking, context=None):
'''This function creates move lines on a picking, at the time of do_transfer, based on
unexpected product transfers (or exceeding quantities) found in the pack operations.
for product_id, remaining_qty in operation_obj._get_remaining_prod_quantities(cr, uid, op, context=context).items():
if remaining_qty > 0:
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
- vals = {
- 'picking_id': picking.id,
- 'location_id': picking.location_id.id,
- 'location_dest_id': picking.location_dest_id.id,
- 'product_id': product_id,
- 'product_uom': product.uom_id.id,
- 'product_uom_qty': remaining_qty,
- 'name': _('Extra Move: ') + product.name,
- 'state': 'draft',
- }
+ vals = self._prepare_values_extra_move(cr, uid, op, product, remaining_qty, context=context)
moves.append(move_obj.create(cr, uid, vals, context=context))
if moves:
move_obj.action_confirm(cr, uid, moves, context=context)
stock_operation_obj.write(cr, uid, pack_operation_ids, {'result_package_id': package_id}, context=context)
return True
- def process_product_id_from_ui(self, cr, uid, picking_id, product_id, op_id, increment=True, context=None):
+ def process_product_id_from_ui(self, cr, uid, picking_id, product_id, op_id, increment=1, context=None):
return self.pool.get('stock.pack.operation')._search_and_increment(cr, uid, picking_id, [('product_id', '=', product_id),('id', '=', op_id)], increment=increment, context=context)
def process_barcode_from_ui(self, cr, uid, picking_id, barcode_str, visible_op_ids, context=None):
'''This function is called each time there barcode scanner reads an input'''
- lot_obj = self.pool.get('stock.production.lot')
- package_obj = self.pool.get('stock.quant.package')
- product_obj = self.pool.get('product.product')
stock_operation_obj = self.pool.get('stock.pack.operation')
- stock_location_obj = self.pool.get('stock.location')
answer = {'filter_loc': False, 'operation_id': False}
- #check if the barcode correspond to a location
- matching_location_ids = stock_location_obj.search(cr, uid, [('loc_barcode', '=', barcode_str)], context=context)
- if matching_location_ids:
- #if we have a location, return immediatly with the location name
- location = stock_location_obj.browse(cr, uid, matching_location_ids[0], context=None)
- answer['filter_loc'] = stock_location_obj._name_get(cr, uid, location, context=None)
- answer['filter_loc_id'] = matching_location_ids[0]
- return answer
- #check if the barcode correspond to a product
- matching_product_ids = product_obj.search(cr, uid, ['|', ('ean13', '=', barcode_str), ('default_code', '=', barcode_str)], context=context)
- if matching_product_ids:
- op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', matching_product_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context)
- answer['operation_id'] = op_id
- return answer
+
+ # Barcode Nomenclatures
+ picking_type_id = self.browse(cr, uid, [picking_id], context=context).picking_type_id.id
+ barcode_nom = self.pool.get('stock.picking.type').browse(cr, uid, [picking_type_id], context=context).barcode_nomenclature_id
+ parsed_result = barcode_nom.parse_barcode(barcode_str)
+
+ #check if the barcode is a weighted barcode or simply a product
+ if parsed_result['type'] in ['weight', 'product', 'package']:
+ weight=1
+ if parsed_result['type'] == 'weight':
+ domain = ['|', ('barcode', '=', parsed_result['base_code']), ('default_code', '=', parsed_result['base_code'])]
+ weight=parsed_result['value']
+ obj = self.pool.get('product.product')
+ id_in_operation = 'product_id'
+ elif parsed_result['type'] == 'product':
+ domain = ['|', ('barcode', '=', parsed_result['code']), ('default_code', '=', parsed_result['code'])]
+ obj = self.pool.get('product.product')
+ id_in_operation = 'product_id'
+ else:
+ domain = [('name', '=', parsed_result['code'])]
+ obj = self.pool.get('stock.quant.package')
+ id_in_operation = 'package_id'
+
+ matching_product_ids = obj.search(cr, uid, domain, context=context)
+ if matching_product_ids:
+ op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [(id_in_operation, '=', matching_product_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=weight, context=context)
+ answer['operation_id'] = op_id
+ return answer
+
#check if the barcode correspond to a lot
- matching_lot_ids = lot_obj.search(cr, uid, [('name', '=', barcode_str)], context=context)
- if matching_lot_ids:
- lot = lot_obj.browse(cr, uid, matching_lot_ids[0], context=context)
- op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', lot.product_id.id), ('lot_id', '=', lot.id)], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context)
- answer['operation_id'] = op_id
- return answer
- #check if the barcode correspond to a package
- matching_package_ids = package_obj.search(cr, uid, [('name', '=', barcode_str)], context=context)
- if matching_package_ids:
- op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('package_id', '=', matching_package_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context)
- answer['operation_id'] = op_id
- return answer
+ elif parsed_result['type'] == 'lot':
+ lot_obj = self.pool.get('stock.production.lot')
+ matching_lot_ids = lot_obj.search(cr, uid, [('name', '=', parsed_result['code'])], context=context)
+ if matching_lot_ids:
+ lot = lot_obj.browse(cr, uid, matching_lot_ids[0], context=context)
+ op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', lot.product_id.id), ('lot_id', '=', lot.id)], filter_visible=True, visible_op_ids=visible_op_ids, increment=1, context=context)
+ answer['operation_id'] = op_id
+ return answer
+
+ #check if the barcode correspond to a location
+ elif parsed_result['type'] == 'location':
+ stock_location_obj = self.pool.get('stock.location')
+ matching_location_ids = stock_location_obj.search(cr, uid, [('barcode', '=', parsed_result['code'])], context=context)
+ if matching_location_ids:
+ #if we have a location, return immediatly with the location name
+ location = stock_location_obj.browse(cr, uid, matching_location_ids[0], context=None)
+ answer['filter_loc'] = stock_location_obj._name_get(cr, uid, location, context=None)
+ answer['filter_loc_id'] = matching_location_ids[0]
+ return answer
+
return answer
'create_date': fields.datetime('Creation Date'),
}
_defaults = {
- 'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial'),
+ 'name': lambda x, y, z, c: x.pool.get('ir.sequence').next_by_code(y, z, 'stock.lot.serial'),
'product_id': lambda x, y, z, c: c.get('product_id', False),
}
_sql_constraints = [
res.append((line.id, name))
return res
- def create(self, cr, uid, vals, context=None):
- if vals.get('product_id') and not vals.get('price_unit'):
- prod_obj = self.pool.get('product.product')
- vals['price_unit'] = prod_obj.browse(cr, uid, vals['product_id'], context=context).standard_price
- return super(stock_move, self).create(cr, uid, vals, context=context)
-
def _quantity_normalize(self, cr, uid, ids, name, args, context=None):
uom_obj = self.pool.get('product.uom')
res = {}
'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True, copy=False),
'move_orig_ids': fields.one2many('stock.move', 'move_dest_id', 'Original Move', help="Optional: previous stock move when chaining them", select=True),
- 'picking_id': fields.many2one('stock.picking', 'Reference', select=True, states={'done': [('readonly', True)]}),
+ 'picking_id': fields.many2one('stock.picking', 'Reference Stock Move', select=True, states={'done': [('readonly', True)]}),
'note': fields.text('Notes'),
'state': fields.selection([('draft', 'New'),
('cancel', 'Cancelled'),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True),
'split_from': fields.many2one('stock.move', string="Move Split From", help="Technical field used to track the origin of a split move, which can be useful in case of debug", copy=False),
'backorder_id': fields.related('picking_id', 'backorder_id', type='many2one', relation="stock.picking", string="Back Order of", select=True),
- 'origin': fields.char("Source"),
+ 'origin': fields.char("Source Document"),
'procure_method': fields.selection([('make_to_stock', 'Default: Take From Stock'), ('make_to_order', 'Advanced: Apply Procurement Rules')], 'Supply Method', required=True,
help="""By default, the system will take from the stock in the source location and passively wait for availability. The other possibility allows you to directly create a procurement on the source location (and thus ignore its current stock) to gather products. If we want to chain moves and have this one to wait for the previous, this second option should be chosen."""),
self.write(cr, uid, [move.id], {'state': 'confirmed'}, context=context)
def _prepare_procurement_from_move(self, cr, uid, move, context=None):
- origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or "/")
+ origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or move.origin or "/")
group_id = move.group_id and move.group_id.id or False
if move.rule_id:
if move.rule_id.group_propagation_option == 'fixed' and move.rule_id.group_id:
date_expected = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
return {'value': {'date': date_expected}}
+ def attribute_price(self, cr, uid, move, context=None):
+ """
+ Attribute price to move, important in inter-company moves or receipts with only one partner
+ """
+ if not move.price_unit:
+ price = move.product_id.standard_price
+ self.write(cr, uid, [move.id], {'price_unit': price})
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms stock move or put it in waiting if it's linked to another move.
}
to_assign = {}
for move in self.browse(cr, uid, ids, context=context):
+ self.attribute_price(cr, uid, move, context=context)
state = 'confirmed'
#if the move is preceeded, then it's waiting (if preceeding move is done, then action_assign has been called already and its state is already available)
if move.move_orig_ids:
self.write(cr, uid, [move.id], vals, context=context)
def action_done(self, cr, uid, ids, context=None):
- """ Process completly the moves given as ids and if all moves are done, it will finish the picking.
+ """ Process completely the moves given as ids and if all moves are done, it will finish the picking.
"""
context = context or {}
picking_obj = self.pool.get("stock.picking")
self.pool.get('stock.quant.package').write(cr, SUPERUSER_ID, [ops.package_id.id], {'parent_id': ops.result_package_id.id}, context=context)
move_qty[move.id] -= record.qty
#Check for remaining qtys and unreserve/check move_dest_id in
+ move_dest_ids = set()
for move in self.browse(cr, uid, ids, context=context):
if move_qty[move.id] > 0: # (=In case no pack operations in picking)
main_domain = [('qty', '>', 0)]
qty = move_qty[move.id]
quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, qty, domain=main_domain, prefered_domain_list=prefered_domain_list, restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context)
quant_obj.quants_move(cr, uid, quants, move, move.location_dest_id, lot_id=move.restrict_lot_id.id, owner_id=move.restrict_partner_id.id, context=context)
- #unreserve the quants and make them available for other operations/moves
- quant_obj.quants_unreserve(cr, uid, move, context=context)
- #Check moves that were pushed
- if move.move_dest_id.state in ('waiting', 'confirmed'):
- # FIXME is opw 607970 still present with new WMS?
- # (see commits 1ef2c181033bd200906fb1e5ce35e234bf566ac6
- # and 41c5ceb8ebb95c1b4e98d8dd1f12b8e547a24b1d)
- other_upstream_move_ids = self.search(cr, uid, [('id', '!=', move.id), ('state', 'not in', ['done', 'cancel']),
- ('move_dest_id', '=', move.move_dest_id.id)], context=context)
- #If no other moves for the move that got pushed:
- if not other_upstream_move_ids and move.move_dest_id.state in ('waiting', 'confirmed'):
- self.action_assign(cr, uid, [move.move_dest_id.id], context=context)
+ # If the move has a destination, add it to the list to reserve
+ if move.move_dest_id and move.move_dest_id.state in ('waiting', 'confirmed'):
+ move_dest_ids.add(move.move_dest_id.id)
+
if move.procurement_id:
procurement_ids.append(move.procurement_id.id)
+ #unreserve the quants and make them available for other operations/moves
+ quant_obj.quants_unreserve(cr, uid, move, context=context)
# Check the packages have been placed in the correct locations
self._check_package_from_moves(cr, uid, ids, context=context)
#set the move as done
self.write(cr, uid, ids, {'state': 'done', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context)
self.pool.get('procurement.order').check(cr, uid, procurement_ids, context=context)
+ #assign destination moves
+ if move_dest_ids:
+ self.action_assign(cr, uid, list(move_dest_ids), context=context)
#check picking state to set the date_done is needed
done_picking = []
for picking in picking_obj.browse(cr, uid, list(pickings), context=context):
return self.action_confirm(cr, uid, [new_move], context=context)[0]
+ def get_code_from_locs(self, cr, uid, move, location_id=False, location_dest_id=False, context=None):
+ """
+ Returns the code the picking type should have. This can easily be used
+ to check if a move is internal or not
+ move, location_id and location_dest_id are browse records
+ """
+ code = 'internal'
+ src_loc = location_id or move.location_id
+ dest_loc = location_dest_id or move.location_dest_id
+ if src_loc.usage == 'internal' and dest_loc.usage != 'internal':
+ code = 'outgoing'
+ if src_loc.usage != 'internal' and dest_loc.usage == 'internal':
+ code = 'incoming'
+ return code
+
+
class stock_inventory(osv.osv):
_name = "stock.inventory"
_description = "Inventory"
def prepare_inventory(self, cr, uid, ids, context=None):
inventory_line_obj = self.pool.get('stock.inventory.line')
for inventory in self.browse(cr, uid, ids, context=context):
- #clean the existing inventory lines before redoing an inventory proposal
+ # If there are inventory lines already (e.g. from import), respect those and set their theoretical qty
line_ids = [line.id for line in inventory.line_ids]
- inventory_line_obj.unlink(cr, uid, line_ids, context=context)
- #compute the inventory lines and create them
- vals = self._get_inventory_lines(cr, uid, inventory, context=context)
- for product_line in vals:
- inventory_line_obj.create(cr, uid, product_line, context=context)
+ if not line_ids:
+ #compute the inventory lines and create them
+ vals = self._get_inventory_lines(cr, uid, inventory, context=context)
+ for product_line in vals:
+ inventory_line_obj.create(cr, uid, product_line, context=context)
+ else:
+ # On import calculate theoretical quantity
+ quant_obj = self.pool.get("stock.quant")
+ for line in inventory.line_ids:
+ dom = [('company_id', '=', line.company_id.id), ('location_id', 'child_of', line.location_id.id), ('lot_id', '=', line.prod_lot_id.id),
+ ('product_id','=', line.product_id.id), ('owner_id', '=', line.partner_id.id)]
+ if line.package_id:
+ dom += [('package_id', '=', line.package_id.id)]
+ quants = quant_obj.search(cr, uid, dom, context=context)
+ tot_qty = 0
+ for quant in quant_obj.browse(cr, uid, quants, context=context):
+ tot_qty += quant.qty
+ inventory_line_obj.write(cr, uid, [line.id], {'theoretical_qty': tot_qty}, context=context)
+
return self.write(cr, uid, ids, {'state': 'confirm', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)})
def _get_inventory_lines(self, cr, uid, inventory, context=None):
'company_id': fields.related('inventory_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, select=True, readonly=True),
'prod_lot_id': fields.many2one('stock.production.lot', 'Serial Number', domain="[('product_id','=',product_id)]"),
'state': fields.related('inventory_id', 'state', type='char', string='Status', readonly=True),
- 'theoretical_qty': fields.float('Theoretical Quantity', readonly=True),
+ 'theoretical_qty': fields.float('Theoretical Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), readonly=True),
'partner_id': fields.many2one('res.partner', 'Owner'),
'product_name': fields.related('product_id', 'name', type='char', string='Product Name', store={
'product.product': (_get_product_name_change, ['name', 'default_code'], 20),
'product_qty': 1,
}
+ def create(self, cr, uid, values, context=None):
+ if context is None:
+ context = {}
+ product_obj = self.pool.get('product.product')
+ if 'product_id' in values and not 'product_uom_id' in values:
+ values['product_uom_id'] = product_obj.browse(cr, uid, values.get('product_id'), context=context).uom_id.id
+ return super(stock_inventory_line, self).create(cr, uid, values, context=context)
+
def _resolve_inventory_line(self, cr, uid, inventory_line, context=None):
stock_move_obj = self.pool.get('stock.move')
diff = inventory_line.theoretical_qty - inventory_line.product_qty
location_obj = self.pool.get('stock.location')
#create view location for warehouse
- wh_loc_id = location_obj.create(cr, uid, {
+ loc_vals = {
'name': _(vals.get('code')),
'usage': 'view',
- 'location_id': data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_locations')[1]
- }, context=context)
+ 'location_id': data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_locations')[1],
+ }
+ if vals.get('company_id'):
+ loc_vals['company_id'] = vals.get('company_id')
+ wh_loc_id = location_obj.create(cr, uid, loc_vals, context=context)
vals['view_location_id'] = wh_loc_id
#create all location
def_values = self.default_get(cr, uid, {'reception_steps', 'delivery_steps'})
{'name': _('Packing Zone'), 'active': delivery_steps == 'pick_pack_ship', 'field': 'wh_pack_stock_loc_id'},
]
for values in sub_locations:
- location_id = location_obj.create(cr, uid, {
+ loc_vals = {
'name': values['name'],
'usage': 'internal',
'location_id': wh_loc_id,
'active': values['active'],
- }, context=context_with_inactive)
+ }
+ if vals.get('company_id'):
+ loc_vals['company_id'] = vals.get('company_id')
+ location_id = location_obj.create(cr, uid, loc_vals, context=context_with_inactive)
vals[values['field']] = location_id
#create WH
'limit': 20
}
+
class stock_location_path(osv.osv):
_name = "stock.location.path"
_description = "Pushed Flows"
'active': True,
}
+ def _prepare_push_apply(self, cr, uid, rule, move, context=None):
+ newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
+ return {
+ 'location_id': move.location_dest_id.id,
+ 'location_dest_id': rule.location_dest_id.id,
+ 'date': newdate,
+ 'company_id': rule.company_id and rule.company_id.id or False,
+ 'date_expected': newdate,
+ 'picking_id': False,
+ 'picking_type_id': rule.picking_type_id and rule.picking_type_id.id or False,
+ 'propagate': rule.propagate,
+ 'push_rule_id': rule.id,
+ 'warehouse_id': rule.warehouse_id and rule.warehouse_id.id or False,
+ }
+
def _apply(self, cr, uid, rule, move, context=None):
move_obj = self.pool.get('stock.move')
newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
#call again push_apply to see if a next step is defined
move_obj._push_apply(cr, uid, [move], context=context)
else:
- move_id = move_obj.copy(cr, uid, move.id, {
- 'location_id': move.location_dest_id.id,
- 'location_dest_id': rule.location_dest_id.id,
- 'date': newdate,
- 'company_id': rule.company_id and rule.company_id.id or False,
- 'date_expected': newdate,
- 'picking_id': False,
- 'picking_type_id': rule.picking_type_id and rule.picking_type_id.id or False,
- 'propagate': rule.propagate,
- 'push_rule_id': rule.id,
- 'warehouse_id': rule.warehouse_id and rule.warehouse_id.id or False,
- })
+ vals = self._prepare_push_apply(cr, uid, rule, move, context=context)
+ move_id = move_obj.copy(cr, uid, move.id, vals, context=context)
move_obj.write(cr, uid, [move.id], {
'move_dest_id': move_id,
})
def _get_package_info(self, cr, uid, ids, name, args, context=None):
default_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
- res = {}.fromkeys(ids, {'location_id': False, 'company_id': default_company_id, 'owner_id': False})
+ res = dict((res_id, {'location_id': False, 'company_id': default_company_id, 'owner_id': False}) for res_id in ids)
for pack in self.browse(cr, uid, ids, context=context):
if pack.quant_ids:
res[pack.id]['location_id'] = pack.quant_ids[0].location_id.id
}, readonly=True, select=True),
}
_defaults = {
- 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.quant.package') or _('Unknown Pack')
+ 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').next_by_code(cr, uid, 'stock.quant.package') or _('Unknown Pack')
}
def _check_location_constraint(self, cr, uid, packs, context=None):
new_lot_id = self.pool.get('stock.production.lot').create(cr, uid, val, context=context)
self.write(cr, uid, id, {'lot_id': new_lot_id}, context=context)
- def _search_and_increment(self, cr, uid, picking_id, domain, filter_visible=False, visible_op_ids=False, increment=True, context=None):
- '''Search for an operation with given 'domain' in a picking, if it exists increment the qty (+1) otherwise create it
+ def _search_and_increment(self, cr, uid, picking_id, domain, filter_visible=False, visible_op_ids=False, increment=1, context=None):
+ '''Search for an operation with given 'domain' in a picking, if it exists increment the qty by the value of increment otherwise create it
:param domain: list of tuple directly reusable as a domain
context can receive a key 'current_package_id' with the package to consider for this operation
operation_id = todo_operation_ids[0]
op_obj = self.browse(cr, uid, operation_id, context=context)
qty = op_obj.qty_done
- if increment:
- qty += 1
- else:
- qty -= 1 if qty >= 1 else 0
+ if increment > 0:
+ qty += increment
+ elif increment < 0:
if qty == 0 and op_obj.product_qty == 0:
#we have a line with 0 qty set, so delete it
self.unlink(cr, uid, [operation_id], context=context)
return False
+ else:
+ qty = max(0, qty-1)
self.write(cr, uid, [operation_id], {'qty_done': qty}, context=context)
else:
#no existing operation found for the given domain and picking => create a new one
'product_qty': 0,
'location_id': picking.location_id.id,
'location_dest_id': picking.location_dest_id.id,
- 'qty_done': 1,
+ 'qty_done': increment,
}
for key in domain:
var_name, dummy, value = key
'active': lambda *a: 1,
'logic': lambda *a: 'max',
'qty_multiple': lambda *a: 1,
- 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.orderpoint') or '',
+ 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').next_by_code(cr, uid, 'stock.orderpoint') or '',
'product_uom': lambda self, cr, uid, context: context.get('product_uom', False),
'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.warehouse.orderpoint', context=context)
}
_order = 'sequence'
def open_barcode_interface(self, cr, uid, ids, context=None):
- final_url = "/barcode/web/#action=stock.ui&picking_type_id=" + str(ids[0]) if len(ids) else '0'
+ final_url = "/stock/barcode/#action=stock.ui&picking_type_id=" + str(ids[0]) if len(ids) else '0'
return {'type': 'ir.actions.act_url', 'url': final_url, 'target': 'self'}
def _get_tristate_values(self, cr, uid, ids, field_name, arg, context=None):
picking_obj = self.pool.get('stock.picking')
- res = dict.fromkeys(ids, [])
+ res = {}
for picking_type_id in ids:
#get last 10 pickings of this type
picking_ids = picking_obj.search(cr, uid, [('picking_type_id', '=', picking_type_id), ('state', '=', 'done')], order='date_done desc', limit=10, context=context)
'rate_picking_backorders': fields.function(_get_picking_count,
type='integer', multi='_get_picking_count'),
+ # Barcode nomenclature
+ 'barcode_nomenclature_id': fields.many2one('barcode.nomenclature','Barcode Nomenclature', help='A barcode nomenclature', required=True),
}
+
+ def _get_default_nomenclature(self, cr, uid, context=None):
+ nom_obj = self.pool.get('barcode.nomenclature')
+ res = nom_obj.search(cr, uid, [], limit=1, context=context)
+ return res and res[0] or False
+
_defaults = {
'warehouse_id': _default_warehouse,
'active': True,
+ 'barcode_nomenclature_id': _get_default_nomenclature,
}
+class barcode_rule(models.Model):
+ _inherit = 'barcode.rule'
+
+ def _get_type_selection(self):
+ types = sets.Set(super(barcode_rule,self)._get_type_selection())
+ types.update([
+ ('weight','Weighted Product'),
+ ('location','Location'),
+ ('lot','Lot'),
+ ('package','Package')
+ ])
+ return list(types)
+
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: