Merge lp:~unifield-team/unifield-server/us-838-new-batch-1 into lp:unifield-server

Proposed by jftempo
Status: Merged
Merged at revision: 3789
Proposed branch: lp:~unifield-team/unifield-server/us-838-new-batch-1
Merge into: lp:unifield-server
Diff against target: 580 lines (+329/-35) (has conflicts)
10 files modified
bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py (+9/-2)
bin/addons/msf_profile/data/patches.xml (+8/-0)
bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv (+3/-3)
bin/addons/product_expiry/product_expiry.py (+176/-0)
bin/addons/specific_rules/specific_rules.py (+1/-1)
bin/addons/specific_rules/specific_rules_view.xml (+0/-2)
bin/addons/stock_batch_recall/product_expiry.py (+46/-16)
bin/addons/sync_so/picking.py (+78/-9)
bin/addons/sync_so/picking_rw.py (+1/-1)
bin/addons/sync_so/specific_xml_id.py (+7/-1)
Text conflict in bin/addons/msf_profile/data/patches.xml
To merge this branch: bzr merge lp:~unifield-team/unifield-server/us-838-new-batch-1
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+295155@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py'
--- bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2016-03-08 15:48:47 +0000
+++ bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2016-05-23 07:50:39 +0000
@@ -137,10 +137,16 @@
137137
138 # No batch found, create a new one138 # No batch found, create a new one
139 if not lot_ids:139 if not lot_ids:
140 # US-838: Add the prefix as instance name of the current instance into the name of the EP object
141 company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
142 prefix = ''
143 if company and company.partner_id:
144 prefix = company.partner_id.name + "_"
145
140 vals = {146 vals = {
141 'product_id': product_id,147 'product_id': product_id,
142 'life_date': expiry_date,148 'life_date': expiry_date,
143 'name': seq_obj.get(cr, uid, 'stock.lot.serial'),149 'name': prefix + seq_obj.get(cr, uid, 'stock.lot.serial'),
144 'type': 'internal',150 'type': 'internal',
145 }151 }
146 lot_id = lot_obj.create(cr, uid, vals, context)152 lot_id = lot_obj.create(cr, uid, vals, context)
@@ -215,7 +221,8 @@
215 and not line.prodlot_id \221 and not line.prodlot_id \
216 and line.expiry_date:222 and line.expiry_date:
217 if line.type_check == 'in':223 if line.type_check == 'in':
218 prodlot_id = self._get_prodlot_from_expiry_date(cr, uid, line.expiry_date, line.product_id.id, context=context)224 # US-838: The method has been moved to addons/stock_batch_recall/product_expiry.py
225 prodlot_id = self.pool.get('stock.production.lot')._get_prodlot_from_expiry_date(cr, uid, line.expiry_date, line.product_id.id, context=context)
219 in_proc_obj.write(cr, uid, [line.id], {'prodlot_id': prodlot_id}, context=context)226 in_proc_obj.write(cr, uid, [line.id], {'prodlot_id': prodlot_id}, context=context)
220 else:227 else:
221 # Should not be reached thanks to UI checks228 # Should not be reached thanks to UI checks
222229
=== modified file 'bin/addons/msf_profile/data/patches.xml'
--- bin/addons/msf_profile/data/patches.xml 2016-05-12 12:09:25 +0000
+++ bin/addons/msf_profile/data/patches.xml 2016-05-23 07:50:39 +0000
@@ -33,6 +33,7 @@
33 <field name="method">us_1185_patch</field>33 <field name="method">us_1185_patch</field>
34 </record>34 </record>
3535
36<<<<<<< TREE
36 <record id="us_1061_patch" model="patch.scripts">37 <record id="us_1061_patch" model="patch.scripts">
37 <field name="method">us_1061_patch</field>38 <field name="method">us_1061_patch</field>
38 </record>39 </record>
@@ -40,5 +41,12 @@
40 <record id="us_1263_patch" model="patch.scripts">41 <record id="us_1263_patch" model="patch.scripts">
41 <field name="method">us_1263_patch</field>42 <field name="method">us_1263_patch</field>
42 </record>43 </record>
44=======
45 <record id="us_838_patch" model="patch.scripts">
46 <field name="method">us_838_migrate_dup_batch</field>
47 <field name="model">stock.production.lot</field>
48 </record>
49
50>>>>>>> MERGE-SOURCE
43 </data>51 </data>
44</openerp>52</openerp>
4553
=== modified file 'bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv'
--- bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv 2016-04-25 15:54:40 +0000
+++ bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv 2016-05-23 07:50:39 +0000
@@ -10,15 +10,15 @@
10fo_updates_po_ref,TRUE,TRUE,"['name','state','client_order_ref']","['&','&','&',('partner_type','!=','external'),('client_order_ref','!=',False),('split_type_sale_order','=','original_sale_order'),'!',('client_order_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,purchase.order.update_fo_ref,sale.order,FO updates PO ref,9,Valid10fo_updates_po_ref,TRUE,TRUE,"['name','state','client_order_ref']","['&','&','&',('partner_type','!=','external'),('client_order_ref','!=',False),('split_type_sale_order','=','original_sale_order'),'!',('client_order_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,purchase.order.update_fo_ref,sale.order,FO updates PO ref,9,Valid
11update_in_ref,TRUE,TRUE,"['name','shipment_ref']","['&',('shipment_ref','!=',False),'!',('shipment_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,stock.picking.update_in_ref,stock.picking,IN updates ref to OUT SHIP,10,Valid11update_in_ref,TRUE,TRUE,"['name','shipment_ref']","['&',('shipment_ref','!=',False),'!',('shipment_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,stock.picking.update_in_ref,stock.picking,IN updates ref to OUT SHIP,10,Valid
12canceled_fo_cancels_po,TRUE,TRUE,"['name','state', 'client_order_ref']","[('state', '=', 'cancel'), ('client_order_ref', '!=', '')]",partner_id,MISSION,purchase.order.canceled_fo_cancel_po,sale.order,Canceled FO cancels PO,18,Valid12canceled_fo_cancels_po,TRUE,TRUE,"['name','state', 'client_order_ref']","[('state', '=', 'cancel'), ('client_order_ref', '!=', '')]",partner_id,MISSION,purchase.order.canceled_fo_cancel_po,sale.order,Canceled FO cancels PO,18,Valid
13partial_shipped_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/location_dest_id/usage']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['standard', 'packing']), ('state', '=', 'done'), ('already_shipped', '=', True), ('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.partial_shipped_fo_updates_in_po,stock.picking,Partial shipped at Coordo updates IN at Project,19,Valid13partial_shipped_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id','move_lines/prodlot_id/name','move_lines/prodlot_id/life_date', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/location_dest_id/usage']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['standard', 'packing']), ('state', '=', 'done'), ('already_shipped', '=', True), ('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.partial_shipped_fo_updates_in_po,stock.picking,Partial shipped at Coordo updates IN at Project,19,Valid
14moves_from_dpo_closed_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'subtype', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/dpo_line_id']","['&', '&', '&', ('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['picking', 'standard']), ('dpo_out', '=', True)]",partner_id,MISSION,stock.picking.partial_shippped_dpo_updates_in_po,stock.picking,Moves from DPO closed at Coordo updates IN at Project,20,Valid14moves_from_dpo_closed_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'subtype', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id','move_lines/prodlot_id/name','move_lines/prodlot_id/life_date', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/dpo_line_id']","['&', '&', '&', ('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['picking', 'standard']), ('dpo_out', '=', True)]",partner_id,MISSION,stock.picking.partial_shippped_dpo_updates_in_po,stock.picking,Moves from DPO closed at Coordo updates IN at Project,20,Valid
15dpo_service_lines_update_in_at_project,TRUE,TRUE,"['order_id/name', 'order_id/delivery_confirmed_date', 'fake_id', 'origin', 'confirmed_delivery_date', 'name', 'product_uom/id', 'product_uom/name', 'link_sol_id/line_number', 'notes', 'product_qty', 'product_id/name', 'product_id/id']","[('dest_partner_id.partner_type', '=', 'internal'), ('order_id.order_type', '=', 'direct'), ('order_id.state', 'in', ['approved', 'done']), ('product_id.type', 'in', ['service', 'service_recep'])]",dest_partner_id,MISSION,purchase.order.line.confirmed_dpo_service_lines_update_in_po,purchase.order.line,DPO service lines update IN at Project,21,Valid15dpo_service_lines_update_in_at_project,TRUE,TRUE,"['order_id/name', 'order_id/delivery_confirmed_date', 'fake_id', 'origin', 'confirmed_delivery_date', 'name', 'product_uom/id', 'product_uom/name', 'link_sol_id/line_number', 'notes', 'product_qty', 'product_id/name', 'product_id/id']","[('dest_partner_id.partner_type', '=', 'internal'), ('order_id.order_type', '=', 'direct'), ('order_id.state', 'in', ['approved', 'done']), ('product_id.type', 'in', ['service', 'service_recep'])]",dest_partner_id,MISSION,purchase.order.line.confirmed_dpo_service_lines_update_in_po,purchase.order.line,DPO service lines update IN at Project,21,Valid
16cancel_out_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'standard'),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled OUT at Coordo cancels IN at Project,22,Valid16cancel_out_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'standard'),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled OUT at Coordo cancels IN at Project,22,Valid
17cancel_pick_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'picking'),('backorder_id', '=', False),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled PICK at Coordo cancels IN at Project,23,Valid17cancel_pick_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'picking'),('backorder_id', '=', False),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled PICK at Coordo cancels IN at Project,23,Valid
18cancel_stock_move_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'date_cancel']","['&','&','&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('picking_id.state', '=', 'done'),('picking_id.do_not_sync', '=', False),('to_be_sent', '=', True), '&', '|', ('picking_id.subtype', '=', 'picking'), ('picking_id.subtype', '=', 'standard'), ('picking_id.already_shipped', '=', False)]",partner_id,MISSION,stock.picking.cancel_stock_move_of_pick_cancel_in,stock.move,Canceled stock move cancels IN,24,Valid18cancel_stock_move_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'date_cancel']","['&','&','&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('picking_id.state', '=', 'done'),('picking_id.do_not_sync', '=', False),('to_be_sent', '=', True), '&', '|', ('picking_id.subtype', '=', 'picking'), ('picking_id.subtype', '=', 'standard'), ('picking_id.already_shipped', '=', False)]",partner_id,MISSION,stock.picking.cancel_stock_move_of_pick_cancel_in,stock.move,Canceled stock move cancels IN,24,Valid
19closed_in_validates_delivery_out_ship,TRUE,TRUE,"['name', 'state', 'shipment_ref']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'in'),('subtype', 'in', ['standard']), ('state', '=', 'done'), ('shipment_ref', '!=', False), ('dpo_incoming', '=', False)]",partner_id,MISSION,stock.picking.closed_in_validates_delivery_out_ship,stock.picking,Closed IN validates delivery of OUT-SHIP,26,Valid19closed_in_validates_delivery_out_ship,TRUE,TRUE,"['name', 'state', 'shipment_ref']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'in'),('subtype', 'in', ['standard']), ('state', '=', 'done'), ('shipment_ref', '!=', False), ('dpo_incoming', '=', False)]",partner_id,MISSION,stock.picking.closed_in_validates_delivery_out_ship,stock.picking,Closed IN validates delivery of OUT-SHIP,26,Valid
20closed_in_confirms_dpo_reception,TRUE,TRUE,"['name', 'state', 'dpo_line_id']","['&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('picking_id.type', '=', 'in'), ('picking_id.subtype', 'in', ['standard']), ('state', '=', 'done'), ('dpo_line_id', '!=', 0)]",partner_id,MISSION,stock.picking.closed_in_confirms_dpo_reception,stock.move,Closed IN confirms DPO reception,27,Valid20closed_in_confirms_dpo_reception,TRUE,TRUE,"['name', 'state', 'dpo_line_id']","['&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('picking_id.type', '=', 'in'), ('picking_id.subtype', 'in', ['standard']), ('state', '=', 'done'), ('dpo_line_id', '!=', 0)]",partner_id,MISSION,stock.picking.closed_in_confirms_dpo_reception,stock.move,Closed IN confirms DPO reception,27,Valid
21create_batch_object,TRUE,TRUE,"['name', 'xmlid_name', 'prefix', 'product_id/id', 'partner_id/id', 'date', 'ref','life_date','sequence_id','type']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_batch_number,stock.production.lot,Create Batch Object,1001,Valid21create_batch_object,FALSE,TRUE,"['name', 'xmlid_name', 'prefix', 'product_id/id', 'partner_id/id', 'date', 'ref','life_date','sequence_id','type']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_batch_number,stock.production.lot,Create Batch Object,1001,Valid
22create_asset_object,TRUE,TRUE,"['name', 'xmlid_name', 'arrival_date', 'asset_type_id/id', 'partner_id/id', 'brand', 'comment', 'description', 'hq_ref', 'international_po', 'invo_certif_depreciation', 'invo_currency/id', 'invo_date', 'invo_donator_code', 'invo_num', 'invo_supplier', 'invo_value', 'local_ref', 'model', 'orig_mission_code', 'product_id/id', 'project_po', 'receipt_place', 'serial_nb', 'type', 'year']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_asset,product.asset,Create Asset Object,1002,Valid22create_asset_object,TRUE,TRUE,"['name', 'xmlid_name', 'arrival_date', 'asset_type_id/id', 'partner_id/id', 'brand', 'comment', 'description', 'hq_ref', 'international_po', 'invo_certif_depreciation', 'invo_currency/id', 'invo_date', 'invo_donator_code', 'invo_num', 'invo_supplier', 'invo_value', 'local_ref', 'model', 'orig_mission_code', 'product_id/id', 'project_po', 'receipt_place', 'serial_nb', 'type', 'year']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_asset,product.asset,Create Asset Object,1002,Valid
23reset_ref_by_recovery_mode,TRUE,TRUE,['name'],"[('name', '=', False)]",partner_id,MISSION,sale.order.reset_ref_by_recovery_mode,sale.order,Reset Due to Recovery,1003,Valid23reset_ref_by_recovery_mode,TRUE,TRUE,['name'],"[('name', '=', False)]",partner_id,MISSION,sale.order.reset_ref_by_recovery_mode,sale.order,Reset Due to Recovery,1003,Valid
24USB_replicate_po,TRUE,TRUE,"['name', 'analytic_distribution_id/id', 'partner_id/id','pricelist_id/id','delivery_requested_date','details','notes', 'origin', 'categ', 'order_type', 'priority', 'loan_duration', 'is_a_counterpart', 'cross_docking_ok', 'order_line/product_id/id', 'order_line/product_id/name','order_line/id', 'order_line/name', 'order_line/product_qty', 'order_line/product_uom', 'order_line/price_unit', 'order_line/analytic_distribution_id/id','order_line/comment','order_line/have_analytic_distribution_from_header','order_line/line_number', 'order_line/nomen_manda_0/id','order_line/nomen_manda_1/id','order_line/nomen_manda_2/id','order_line/nomen_manda_3/id', 'order_line/sync_order_line_db_id', 'order_line/nomenclature_description','order_line/notes','order_line/default_name','order_line/default_code','order_line/is_line_split','order_line/date_planned','order_line/procurement_id/id']","[('state','in',['approved', 'done'])]",partner_id,USB,purchase.order.usb_replicate_po,purchase.order,USB_replicate_po,2000,Valid24USB_replicate_po,TRUE,TRUE,"['name', 'analytic_distribution_id/id', 'partner_id/id','pricelist_id/id','delivery_requested_date','details','notes', 'origin', 'categ', 'order_type', 'priority', 'loan_duration', 'is_a_counterpart', 'cross_docking_ok', 'order_line/product_id/id', 'order_line/product_id/name','order_line/id', 'order_line/name', 'order_line/product_qty', 'order_line/product_uom', 'order_line/price_unit', 'order_line/analytic_distribution_id/id','order_line/comment','order_line/have_analytic_distribution_from_header','order_line/line_number', 'order_line/nomen_manda_0/id','order_line/nomen_manda_1/id','order_line/nomen_manda_2/id','order_line/nomen_manda_3/id', 'order_line/sync_order_line_db_id', 'order_line/nomenclature_description','order_line/notes','order_line/default_name','order_line/default_code','order_line/is_line_split','order_line/date_planned','order_line/procurement_id/id']","[('state','in',['approved', 'done'])]",partner_id,USB,purchase.order.usb_replicate_po,purchase.order,USB_replicate_po,2000,Valid
2525
=== modified file 'bin/addons/product_expiry/product_expiry.py'
--- bin/addons/product_expiry/product_expiry.py 2011-01-14 00:11:01 +0000
+++ bin/addons/product_expiry/product_expiry.py 2016-05-23 07:50:39 +0000
@@ -21,9 +21,12 @@
21import datetime21import datetime
22from osv import fields, osv22from osv import fields, osv
23import pooler23import pooler
24import logging
25
2426
25class stock_production_lot(osv.osv):27class stock_production_lot(osv.osv):
26 _inherit = 'stock.production.lot'28 _inherit = 'stock.production.lot'
29 _logger = logging.getLogger('------US-838: Migrate duplicate BNs')
2730
28 def _get_date(dtype):31 def _get_date(dtype):
29 """Return a function to compute the limit date for this type"""32 """Return a function to compute the limit date for this type"""
@@ -54,6 +57,10 @@
54 }57 }
55 # Assign dates according to products data58 # Assign dates according to products data
56 def create(self, cr, uid, vals, context=None):59 def create(self, cr, uid, vals, context=None):
60
61 if self.violate_ed_unique(cr, uid, False, vals, context):
62 raise osv.except_osv('Error', 'An expiry date with same date for this product exists already!.')
63
57 newid = super(stock_production_lot, self).create(cr, uid, vals, context=context)64 newid = super(stock_production_lot, self).create(cr, uid, vals, context=context)
58 obj = self.browse(cr, uid, newid, context=context)65 obj = self.browse(cr, uid, newid, context=context)
59 towrite = []66 towrite = []
@@ -66,6 +73,175 @@
66 self.write(cr, uid, [obj.id], self.default_get(cr, uid, towrite, context=context))73 self.write(cr, uid, [obj.id], self.default_get(cr, uid, towrite, context=context))
67 return newid74 return newid
6875
76 # US-838: this method is to check if the expiry date values are valid
77 def violate_ed_unique(self, cr, uid, ids, vals, context):
78 if not('product_id' in vals and 'life_date' in vals):
79 return False
80
81 prod_obj = self.pool.get('product.product')
82 prod = prod_obj.browse(cr, uid, vals['product_id'], context=context)
83
84 # In case it's a EP only product, then search for date and product, no need to search for batch name
85 if prod.perishable and not prod.batch_management:
86 search_arg = [('life_date', '=', vals['life_date']), ('type', '=', 'internal'), ('product_id', '=', prod.id)]
87
88 if ids: # in case it's a write call, then exclude the current ids
89 search_arg.append(('id', 'not in', ids))
90
91 lot_ids = self.search(cr, uid, search_arg, context=context)
92 if lot_ids:
93 return True
94 return False
95
96 def write(self, cr, uid, ids, vals, context=None):
97 '''
98 force writing of expired_date which is readonly for batch management products
99 '''
100 if context is None:
101 context = {}
102 if isinstance(ids, (int, long)):
103 ids = [ids]
104 # US-838: Check if the values are in conflict with the existing data
105 if self.violate_ed_unique(cr, uid, ids, vals, context):
106 raise osv.except_osv('Error', 'An expiry date with same date for this product exists already!')
107
108 return super(stock_production_lot, self).write(cr, uid, ids, vals, context=context)
109
110 #US-838: migrate all the duplicated batch into single batch
111 '''
112
113 US-838: The 3 following methods will be moved to the patch call, it is called only when a patch is applied.
114 Check the steps to be executed in the description, but basically it will migrate the references to the wrong BN in relevant objects
115 to the lead BN, then delete these wrong BNs, and finally redefine the unique constraint on the table BN
116
117 method to move: migrate_dup_batch, remap_reference_tables, update_table
118
119 '''
120 def us_838_migrate_dup_batch(self, cr, uid, *a, **b):
121 '''
122 Step to do:
123
124 1. Search list of dup batches, that have the same name + product + xmlname values.
125 2. Go through this list, for each element do the following:
126 2.1. Get the 2 batch id of the same name, order by id ---> the smaller id will be kept, the other will be set as inactive
127 2.2. Search all tables that refer to the bigger_id, then map them to the smaller_id
128 2.3. Set the non-lead batches to become inactive
129 2.4. Update ir_model_data
130 3. Modify the unique constraint to be prod + BN + ED, and no more partner_name involved, because we will not use partner_name anymore
131
132 4. For the messages in the pipeline ---> treated in sync message
133
134 '''
135
136 cr.execute('''select id, name from stock_production_lot where name in
137 (select name from (select name, product_id, count(name) as amount_bn from stock_production_lot group by name, product_id, life_date) as foo_bn where amount_bn>1) order by name, id;''')
138 all_dup_batches = cr.dictfetchall()
139 self._logger.info("__________Start to migrate duplicate batch objects in instance: %s - with total of %s duplicate batches!" % (cr.dbname, len(all_dup_batches)))
140
141 context = {}
142
143 lead_id = 0 # This id will be used as the main batch id
144 to_be_deleted = []
145 same_name = None
146 for r in all_dup_batches:
147 if lead_id == 0:
148 same_name = r['name']
149 lead_id = r['id']
150 else:
151 if same_name == r['name']: # same batch --> replace in all table to the lead_id
152 # Do step 2.2, search the following tables to replace the link to the
153 self.remap_reference_tables(cr, uid, r['id'], lead_id, same_name, context)
154
155 # 2.3: Add this wrong batch id into the list, then delete them at the end
156 to_be_deleted.append(r['id'])
157 else:
158 lead_id = r['id'] # when the name change --> replace by the new lead_id
159 same_name = r['name']
160
161 # 2.3 call to delete all the wrong batch objects
162 if to_be_deleted:
163 self._logger.info("Delete duplicate batch objects (%s batches - keep only the lead batch)" % len(to_be_deleted))
164 self.unlink(cr, uid, to_be_deleted, context=context)
165 else:
166 self._logger.info("No duplicate batch found for this instance %s.", cr.dbname)
167
168 self._logger.info("Last step: update the unique constraint for the table stock_production_lot.")
169 # 3. Now alter the constraint unique of this table: first drop the current constraint, then create a new one with name+prod+life_date
170 cr.execute('''ALTER TABLE stock_production_lot DROP CONSTRAINT stock_production_lot_batch_name_uniq,
171 ADD CONSTRAINT stock_production_lot_batch_name_uniq UNIQUE (name, product_id, life_date);''')
172
173 self._logger.info("__________Finish the migration task on duplicate batch objects for instance: %s", cr.dbname)
174 return True
175
176 def remap_reference_tables(self, cr, uid, wrong_id, lead_id, batch_name, context=None):
177 '''
178 -- with fkey = prodlot_id (total=13)
179 TABLE "create_picking_move_processor" CONSTRAINT "create_picking_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
180 TABLE "export_report_stock_inventory" CONSTRAINT "export_report_stock_inventory_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
181 TABLE "export_report_stock_move" CONSTRAINT "export_report_stock_move_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
182 TABLE "internal_move_processor" CONSTRAINT "internal_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
183 TABLE "outgoing_delivery_move_processor" CONSTRAINT "outgoing_delivery_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
184 TABLE "ppl_move_processor" CONSTRAINT "ppl_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
185 TABLE "real_average_consumption_line" CONSTRAINT "real_average_consumption_line_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
186 TABLE "return_ppl_move_processor" CONSTRAINT "return_ppl_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
187 TABLE "stock_move_in_processor" CONSTRAINT "stock_move_in_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
188 TABLE "stock_move_processor" CONSTRAINT "stock_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
189 TABLE "stock_move" CONSTRAINT "stock_move_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
190 TABLE "unconsistent_stock_report_line" CONSTRAINT "unconsistent_stock_report_line_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE CASCADE
191 TABLE "validate_move_processor" CONSTRAINT "validate_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
192
193 -- with fkey = lot_id (2)
194 TABLE "stock_production_lot_revision" CONSTRAINT "stock_production_lot_revision_lot_id_fkey" FOREIGN KEY (lot_id) REFERENCES stock_production_lot(id) ON DELETE CASCADE
195 TABLE "product_likely_expire_report_item_line" CONSTRAINT "product_likely_expire_report_item_line_lot_id_fkey" FOREIGN KEY (lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
196
197 -- with fkey = prod_lot_id (2)
198 TABLE "stock_inventory_line" CONSTRAINT "stock_inventory_line_prod_lot_id_fkey" FOREIGN KEY (prod_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
199 TABLE "initial_stock_inventory_line" CONSTRAINT "initial_stock_inventory_line_prod_lot_id_fkey" FOREIGN KEY (prod_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
200
201 -- with fkey = no common name (3)
202 TABLE "claim_product_line" CONSTRAINT "claim_product_line_lot_id_claim_product_line_fkey" FOREIGN KEY (lot_id_claim_product_line) REFERENCES stock_production_lot(id) ON DELETE SET NULL
203 TABLE "composition_kit" CONSTRAINT "composition_kit_composition_lot_id_fkey" FOREIGN KEY (composition_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
204 TABLE "wizard_import_in_line_simulation_screen" CONSTRAINT "wizard_import_in_line_simulation_screen_imp_batch_id_fkey" FOREIGN KEY (imp_batch_id) REFERENCES stock_production_lot(id) ON DELETE SET
205 '''
206 # Tables with foreign key prodlot_id (total 13 tables)
207 self._logger.info("__ Migrating batch number: %s", batch_name)
208 list_table_fields = [
209 ('create_picking_move_processor', 'prodlot_id'),
210 ('export_report_stock_inventory', 'prodlot_id'),
211 ('export_report_stock_move', 'prodlot_id'),
212 ('internal_move_processor', 'prodlot_id'),
213 ('outgoing_delivery_move_processor', 'prodlot_id'),
214 ('ppl_move_processor', 'prodlot_id'),
215 ('real_average_consumption_line', 'prodlot_id'),
216 ('return_ppl_move_processor', 'prodlot_id'),
217 ('stock_move_in_processor', 'prodlot_id'),
218 ('stock_move_processor', 'prodlot_id'),
219 ('stock_move', 'prodlot_id'),
220 ('unconsistent_stock_report_line', 'prodlot_id'),
221 ('validate_move_processor', 'prodlot_id'),
222 ('stock_production_lot_revision', 'lot_id'),
223 ('product_likely_expire_report_item_line', 'lot_id'),
224 ('stock_inventory_line', 'prod_lot_id'),
225 ('initial_stock_inventory_line', 'prod_lot_id'),
226 ('claim_product_line', 'lot_id_claim_product_line'),
227 ('composition_kit', 'composition_lot_id'),
228 ('wizard_import_in_line_simulation_screen', 'imp_batch_id')
229 ]
230 for element in list_table_fields:
231 # Tables with foreign key prod_lot_id (total 2)
232 self.update_table(cr, uid, element[0] , element[1], wrong_id, lead_id, batch_name)
233
234
235 def update_table(self, cr, uid, table_name, field_id, wrong_id, lead_id, batch_name):
236 cr.execute('select count(*) as amount from ' + table_name + ' where ' + field_id + ' = %s;' %(wrong_id,))
237 count = cr.fetchone()[0]
238 if count > 0: # Only update the table if wrong bn exists
239 self._logger.info("Table %s has %s batch objects (%s) and will be-mapped." %(table_name, count, batch_name,))
240 sql_update = "update " + table_name + " set " + field_id + "=" + str(lead_id) + " where " + field_id + "=" + str(wrong_id)
241 cr.execute(sql_update)
242 else:
243 self._logger.info("Table %s has NO duplicate batch (%s)." %(table_name, batch_name,))
244
69 _defaults = {245 _defaults = {
70 'life_date': _get_date('life_time'),246 'life_date': _get_date('life_time'),
71 'use_date': _get_date('use_time'),247 'use_date': _get_date('use_time'),
72248
=== modified file 'bin/addons/specific_rules/specific_rules.py'
--- bin/addons/specific_rules/specific_rules.py 2016-04-25 09:24:04 +0000
+++ bin/addons/specific_rules/specific_rules.py 2016-05-23 07:50:39 +0000
@@ -1365,7 +1365,7 @@
1365 if not batch.delete_ok:1365 if not batch.delete_ok:
1366 raise osv.except_osv(_('Error'), _('You cannot remove a batch number which has stock !'))1366 raise osv.except_osv(_('Error'), _('You cannot remove a batch number which has stock !'))
13671367
1368 return super(stock_production_lot, self).unlink(cr, uid, batch.id, context=context)1368 return super(stock_production_lot, self).unlink(cr, uid, ids, context=context)
13691369
13701370
1371stock_production_lot()1371stock_production_lot()
13721372
=== modified file 'bin/addons/specific_rules/specific_rules_view.xml'
--- bin/addons/specific_rules/specific_rules_view.xml 2016-03-17 08:30:04 +0000
+++ bin/addons/specific_rules/specific_rules_view.xml 2016-05-23 07:50:39 +0000
@@ -107,7 +107,6 @@
107 <attribute name="colors">red:life_date &lt; current_date</attribute>107 <attribute name="colors">red:life_date &lt; current_date</attribute>
108 </xpath>108 </xpath>
109 <field name="ref" position="replace">109 <field name="ref" position="replace">
110 <field name="partner_name"/>
111 <field name="type" />110 <field name="type" />
112 <field name="life_date" />111 <field name="life_date" />
113 </field>112 </field>
@@ -198,7 +197,6 @@
198197
199 <field name="name" position="replace">198 <field name="name" position="replace">
200 <field name="name" colspan="2" attrs="{'readonly': [('type', '=', 'internal')]}" />199 <field name="name" colspan="2" attrs="{'readonly': [('type', '=', 'internal')]}" />
201 <field name="partner_name" colspan="2" readonly='1'/>
202 </field>200 </field>
203201
204 <field name="date" position="replace">202 <field name="date" position="replace">
205203
=== modified file 'bin/addons/stock_batch_recall/product_expiry.py'
--- bin/addons/stock_batch_recall/product_expiry.py 2014-10-07 12:45:45 +0000
+++ bin/addons/stock_batch_recall/product_expiry.py 2016-05-23 07:50:39 +0000
@@ -67,29 +67,59 @@
67 return super(stock_production_lot, self).copy_data(cr, uid, id, default, context=context)67 return super(stock_production_lot, self).copy_data(cr, uid, id, default, context=context)
6868
69 # UF-1617: Handle the instance in the batch number object69 # UF-1617: Handle the instance in the batch number object
70 def create(self, cr, uid, vals, context=None):70 # US-838: this method is removed in integration, because the 2 fields are no more used, xmlid_name and partner name
71
72
73# def create(self, cr, uid, vals, context=None):
71 '''74 '''
72 override create method to set the instance id to the current instance if it has not been provided75 override create method to set the instance id to the current instance if it has not been provided
73 '''76
77 # UF-2148: make the xmlid_name from batch name for building xmlid if the value is not given in vals
78 if 'product_id' in vals and ('xmlid_name' not in vals or not vals['xmlid_name']):
79 prod_name = self.pool.get('product.product').browse(cr, uid, vals['product_id'], context=context)
80 #US-838: xmlid_name now takes product code, batch name and expiry date as value, this value must be unique (soft constraint)
81 vals['xmlid_name'] = '%s_%s_%s' % (prod_name.default_code, vals['name'], vals['life_date'])
82
74 if 'partner_name' not in vals or not vals['partner_name']:83 if 'partner_name' not in vals or not vals['partner_name']:
75 company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id84 company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
76 if company and company.partner_id:85 if company and company.partner_id:
77 vals['partner_name'] = company.partner_id.name86 vals['partner_name'] = company.partner_id.name
78
79
80 # UF-2148: make the xmlid_name from batch name for building xmlid if the value is not given in vals
81 if 'product_id' in vals and ('xmlid_name' not in vals or not vals['xmlid_name']):
82 prod_name = self.pool.get('product.product').browse(cr, uid, vals['product_id'], context=context)
83 vals['xmlid_name'] = '%s_%s' % (prod_name.default_code, vals['name'])
84
85 if 'xmlid_name' in vals:
86 exist = self.search(cr, uid, [('xmlid_name', '=', vals['xmlid_name']), ('partner_name', '=', vals['partner_name']), ('product_id', '=', vals['product_id'])], context=context)
87 if exist:
88 # but if the value exist for xmlid_name, then add a suffix to differentiate, no constraint unique required here
89 vals['xmlid_name'] = vals['xmlid_name'] + "_1"
90 87
91 return super(stock_production_lot, self).create(cr, uid, vals, context)88 return super(stock_production_lot, self).create(cr, uid, vals, context)
9289
90 '''
91
92 # US-838: This method got moved from addons/msf_outgoing/wizard/incoming_shipment_processor.py
93 def _get_prodlot_from_expiry_date(self, cr, uid, expiry_date, product_id, context=None):
94 """
95 Search if an internal batch exists in the system with this expiry date.
96 If no, create the batch.
97 """
98 # Objects
99 seq_obj = self.pool.get('ir.sequence')
100
101 # Double check to find the corresponding batch
102 lot_ids = self.search(cr, uid, [
103 ('life_date', '=', expiry_date),
104 ('type', '=', 'internal'),
105 ('product_id', '=', product_id),
106 ], context=context)
107
108 # No batch found, create a new one
109 if not lot_ids:
110 seq_ed = seq_obj.get(cr, uid, 'stock.lot.serial')
111 vals = {
112 'product_id': product_id,
113 'life_date': expiry_date,
114 'name': seq_ed,
115 'type': 'internal',
116 }
117 lot_id = self.create(cr, uid, vals, context)
118 else:
119 lot_id = lot_ids[0]
120
121 return lot_id
122
93 _columns = {123 _columns = {
94 # renamed from End of Life Date124 # renamed from End of Life Date
95 'life_date': fields.date('Expiry Date',125 'life_date': fields.date('Expiry Date',
@@ -102,8 +132,8 @@
102132
103 # UF-1617: field only used for sync purpose133 # UF-1617: field only used for sync purpose
104 'partner_id': fields.many2one('res.partner', string="Supplier", readonly=True, required=False),134 'partner_id': fields.many2one('res.partner', string="Supplier", readonly=True, required=False),
105 'partner_name': fields.char('Partner', size=128, required=True),135 'partner_name': fields.char('Partner', size=128),
106 'xmlid_name': fields.char('XML Code, hidden field', size=128, required=True), # UF-2148, this field is used only for xml_id136 'xmlid_name': fields.char('XML Code, hidden field', size=128), # UF-2148, this field is used only for xml_id
107 }137 }
108138
109 _defaults = {139 _defaults = {
110140
=== modified file 'bin/addons/sync_so/picking.py'
--- bin/addons/sync_so/picking.py 2016-01-22 14:29:49 +0000
+++ bin/addons/sync_so/picking.py 2016-05-23 07:50:39 +0000
@@ -97,7 +97,7 @@
9797
98 # product98 # product
99 product_name = data['product_id']['name']99 product_name = data['product_id']['name']
100 product_id = self.pool.get('product.product').find_sd_ref(cr, uid, xmlid_to_sdref(data['product_id']['id']), context=context)100 product_id = prod_obj.find_sd_ref(cr, uid, xmlid_to_sdref(data['product_id']['id']), context=context)
101 if not product_id:101 if not product_id:
102 product_ids = prod_obj.search(cr, uid, [('name', '=', product_name)], context=context)102 product_ids = prod_obj.search(cr, uid, [('name', '=', product_name)], context=context)
103 if not product_ids:103 if not product_ids:
@@ -112,15 +112,54 @@
112 # uom112 # uom
113 uom_id = uom_obj.find_sd_ref(cr, uid, xmlid_to_sdref(data['product_uom']['id']), context=context)113 uom_id = uom_obj.find_sd_ref(cr, uid, xmlid_to_sdref(data['product_uom']['id']), context=context)
114 if not uom_id:114 if not uom_id:
115 raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_name115 raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_id
116116
117 # UF-1617: Handle batch and asset object117 # UF-1617: Handle batch and asset object
118 batch_id = False118 batch_id = False
119 if data['prodlot_id']:119 batch_values = data['prodlot_id']
120 batch_id = self.pool.get('stock.production.lot').find_sd_ref(cr, uid, xmlid_to_sdref(data['prodlot_id']['id']), context=context)120 if batch_values and product_id:
121 # us-838: WORK IN PROGRESS ..................................
122 # US-838: check first if this product is EP-only? if yes, treat differently, here we treat only for BN
123 prodlot_obj = self.pool.get('stock.production.lot')
124 prod = prod_obj.browse(cr, uid,product_id,context=context)
125
126 '''
127 US-838: The following block is for treating the sync message in pipeline!
128 If the sync message was made with old message rule, then in the message it contains ONLY the xmlid of the batch, NO life_date.
129 For this case, we have to retrieve the batch name from this xmlid, by using the double product_code in the search.
130 From this batch name + product_id, we can find the batch object in the system. There should only be one batch name for the same product
131 since the migration has already done, which merged all dup batch name into one.
132
133 The old sync message has the following xmlid format: sd.batch_numer_se_HQ1C1_DORADIDA15T_DORADIDA15T_MSFBN/000005
134 '''
135 xmlid = batch_values['id']
136 if 'life_date' not in batch_values and 'batch_numer' in xmlid: # it must have the 'batch_numer' as prefix
137 prod_code = "_" + prod.default_code + "_" + prod.default_code + "_" # This is how the old xmlid has been made: using double prod.default_code
138 indexOfProdCode = xmlid.find(prod_code) + len(prod_code)
139 batch_name = xmlid[indexOfProdCode:]
140 existing_bn = prodlot_obj.search(cr, uid, [('name', '=', batch_name), ('product_id', '=', product_id)], context=context)
141 if existing_bn:
142 batch_id = existing_bn[0]
143 else:
144 if prod.perishable and not prod.batch_management:
145 # In case it's a EP only product, then search for date and product, no need to search for batch name
146 if 'life_date' in batch_values:
147 # If name exists in the sync message, search by name and product, not by xmlid
148 life_date = batch_values['life_date']
149 # US-838: use different way to retrieve the EP object
150 batch_id = prodlot_obj._get_prodlot_from_expiry_date(cr, uid, life_date, product_id, context=context)
151 if not batch_id:
152 raise Exception, "Error while retrieving or creating the expiry date %s for the product %s" % (batch_values, prod.name)
153 else:
154 # US-838: for BN, retrieve it or create it, in the follwing method
155 batch_id, msg = self.retrieve_batch_number(cr, uid, product_id, batch_values, context) # return False if the batch object is not found, or cannot be created
156
157 ################## TODO: Treat the case for Remote Warehouse: WORK IN PROGRESS BELOW!!!!!!!!!!
158
159
121 if not batch_id:160 if not batch_id:
122 raise Exception, "Batch Number %s not found for this sync data record" % data['prodlot_id']161 raise Exception, "Batch Number %s not found for this sync data record" % batch_values
123162
124 expired_date = data['expired_date']163 expired_date = data['expired_date']
125164
126 # UTP-872: Add also the state into the move line, but if it is done, then change it to assigned (available)165 # UTP-872: Add also the state into the move line, but if it is done, then change it to assigned (available)
@@ -697,10 +736,12 @@
697 return message736 return message
698737
699738
739 #US-838: This method is no more use, the message will do nothing.
700 def create_batch_number(self, cr, uid, source, out_info, context=None):740 def create_batch_number(self, cr, uid, source, out_info, context=None):
701 if not context:741 if not context:
702 context = {}742 context = {}
703 self._logger.info("+++ Create batch number that comes with the SHIP/OUT from %s" % source)743 self._logger.info("+++ Create batch number that comes with the SHIP/OUT from %s - This message is deprecated." % source)
744
704 so_po_common = self.pool.get('so.po.common')745 so_po_common = self.pool.get('so.po.common')
705 batch_obj = self.pool.get('stock.production.lot')746 batch_obj = self.pool.get('stock.production.lot')
706747
@@ -733,6 +774,32 @@
733 self._logger.info(message)774 self._logger.info(message)
734 return message775 return message
735776
777 # US-838: Retrieve batch object, if not found then create new
778 def retrieve_batch_number(self, cr, uid, product_id, batch_dict, context=None):
779 if not context:
780 context = {}
781 #self._logger.info("+++ Retrieve batch number for the SHIP/OUT from %s")
782 so_po_common = self.pool.get('so.po.common')
783 batch_obj = self.pool.get('stock.production.lot')
784 prod_obj = self.pool.get('product.product')
785
786 if not ('name' in batch_dict and 'life_date' in batch_dict):
787 # Search for the batch object with the given data
788 return False, "Batch Number: Missing batch name or expiry date!"
789
790 existing_bn = batch_obj.search(cr, uid, [('name', '=', batch_dict['name']), ('product_id', '=', product_id),
791 ('life_date', '=', batch_dict['life_date'])], context=context)
792 if existing_bn: # existed already, then don't need to create a new one
793 message = "Batch object exists in the current system. No new batch created."
794 self._logger.info(message)
795 return existing_bn[0], message
796
797 # If not exists, then create this new batch object
798 new_bn_vals = {'name': batch_dict['name'], 'product_id': product_id, 'life_date': batch_dict['life_date']}
799 message = "The new BN " + batch_dict['name'] + " has been created"
800 self._logger.info(message)
801 bn_id = batch_obj.create(cr, uid, new_bn_vals, context=context)
802 return bn_id, message
736803
737 def create_asset(self, cr, uid, source, out_info, context=None):804 def create_asset(self, cr, uid, source, out_info, context=None):
738 if not context:805 if not context:
@@ -863,8 +930,10 @@
863930
864931
865 # for each new batch number object and for each partner, create messages and put into the queue for sending on next sync round932 # for each new batch number object and for each partner, create messages and put into the queue for sending on next sync round
866 for item in list_batch:933
867 so_po_common.create_message_with_object_and_partner(cr, uid, 1001, item, partner.name, context)934 #US-838: THIS METHOD IS NO MORE USED, AS THE BN OBJECT WILL NOT BE SENT EXPLICITLY, BUT TOGETHER WITH THE MESSAGE!
935 #for item in list_batch:
936 # so_po_common.create_message_with_object_and_partner(cr, uid, 1001, item, partner.name, context)
868937
869 # for each new asset object and for each partner, create messages and put into the queue for sending on next sync round938 # for each new asset object and for each partner, create messages and put into the queue for sending on next sync round
870 for item in list_asset:939 for item in list_asset:
871940
=== modified file 'bin/addons/sync_so/picking_rw.py'
--- bin/addons/sync_so/picking_rw.py 2016-04-25 15:54:40 +0000
+++ bin/addons/sync_so/picking_rw.py 2016-05-23 07:50:39 +0000
@@ -588,7 +588,7 @@
588 raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_name588 raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_name
589 uom_id = uom_ids[0]589 uom_id = uom_ids[0]
590 590
591591 # US-838: RW, need to check the new mechanism of the BN and ED object!!!!!!!
592 batch_id = False592 batch_id = False
593 if data['prodlot_id']:593 if data['prodlot_id']:
594 batch_id = self.pool.get('stock.production.lot').find_sd_ref(cr, uid, xmlid_to_sdref(data['prodlot_id']['id']), context=context)594 batch_id = self.pool.get('stock.production.lot').find_sd_ref(cr, uid, xmlid_to_sdref(data['prodlot_id']['id']), context=context)
595595
=== modified file 'bin/addons/sync_so/specific_xml_id.py'
--- bin/addons/sync_so/specific_xml_id.py 2016-01-25 10:53:13 +0000
+++ bin/addons/sync_so/specific_xml_id.py 2016-05-23 07:50:39 +0000
@@ -787,6 +787,10 @@
787787
788product_asset()788product_asset()
789789
790
791'''
792
793US-838: xmlid of batch number is no more used --- REMOVE THIS BLOCK OF CODE WHEN INTEGRATE THE TICKET!
790class batch_number(osv.osv):794class batch_number(osv.osv):
791 _inherit = "stock.production.lot"795 _inherit = "stock.production.lot"
792796
@@ -794,10 +798,12 @@
794 def get_unique_xml_name(self, cr, uid, uuid, table_name, res_id):798 def get_unique_xml_name(self, cr, uid, uuid, table_name, res_id):
795 batch = self.browse(cr, uid, res_id)799 batch = self.browse(cr, uid, res_id)
796 #UF-2148: use the xmlid_name for building the xml for this object800 #UF-2148: use the xmlid_name for building the xml for this object
797 return get_valid_xml_name('batch_numer', (batch.partner_name or 'no_partner'), (batch.product_id.code or 'noprod'), (batch.xmlid_name or 'noname'))801 return get_valid_xml_name('batch_number', (batch.partner_name or 'no_partner'), (batch.product_id.code or 'noprod'), (batch.xmlid_name or 'noname'))
798802
799batch_number()803batch_number()
800804
805'''
806
801class ir_model_access(osv.osv):807class ir_model_access(osv.osv):
802 """808 """
803 UF-2146 To allow synchronisation of ir.model.access, must have same sd ref across all instances809 UF-2146 To allow synchronisation of ir.model.access, must have same sd ref across all instances

Subscribers

People subscribed via source and target branches

to all changes: