Merge lp:~unifield-team/unifield-server/us-838-new-batch-1 into lp:unifield-server
- us-838-new-batch-1
- Merge into trunk
Proposed by
jftempo
Status: | Merged |
---|---|
Merged at revision: | 3789 |
Proposed branch: | lp:~unifield-team/unifield-server/us-838-new-batch-1 |
Merge into: | lp:unifield-server |
Diff against target: |
580 lines (+329/-35) (has conflicts) 10 files modified
bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py (+9/-2) bin/addons/msf_profile/data/patches.xml (+8/-0) bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv (+3/-3) bin/addons/product_expiry/product_expiry.py (+176/-0) bin/addons/specific_rules/specific_rules.py (+1/-1) bin/addons/specific_rules/specific_rules_view.xml (+0/-2) bin/addons/stock_batch_recall/product_expiry.py (+46/-16) bin/addons/sync_so/picking.py (+78/-9) bin/addons/sync_so/picking_rw.py (+1/-1) bin/addons/sync_so/specific_xml_id.py (+7/-1) Text conflict in bin/addons/msf_profile/data/patches.xml |
To merge this branch: | bzr merge lp:~unifield-team/unifield-server/us-838-new-batch-1 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+295155@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py' | |||
2 | --- bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2016-03-08 15:48:47 +0000 | |||
3 | +++ bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2016-05-23 07:50:39 +0000 | |||
4 | @@ -137,10 +137,16 @@ | |||
5 | 137 | 137 | ||
6 | 138 | # No batch found, create a new one | 138 | # No batch found, create a new one |
7 | 139 | if not lot_ids: | 139 | if not lot_ids: |
8 | 140 | # US-838: Add the prefix as instance name of the current instance into the name of the EP object | ||
9 | 141 | company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id | ||
10 | 142 | prefix = '' | ||
11 | 143 | if company and company.partner_id: | ||
12 | 144 | prefix = company.partner_id.name + "_" | ||
13 | 145 | |||
14 | 140 | vals = { | 146 | vals = { |
15 | 141 | 'product_id': product_id, | 147 | 'product_id': product_id, |
16 | 142 | 'life_date': expiry_date, | 148 | 'life_date': expiry_date, |
18 | 143 | 'name': seq_obj.get(cr, uid, 'stock.lot.serial'), | 149 | 'name': prefix + seq_obj.get(cr, uid, 'stock.lot.serial'), |
19 | 144 | 'type': 'internal', | 150 | 'type': 'internal', |
20 | 145 | } | 151 | } |
21 | 146 | lot_id = lot_obj.create(cr, uid, vals, context) | 152 | lot_id = lot_obj.create(cr, uid, vals, context) |
22 | @@ -215,7 +221,8 @@ | |||
23 | 215 | and not line.prodlot_id \ | 221 | and not line.prodlot_id \ |
24 | 216 | and line.expiry_date: | 222 | and line.expiry_date: |
25 | 217 | if line.type_check == 'in': | 223 | if line.type_check == 'in': |
27 | 218 | prodlot_id = self._get_prodlot_from_expiry_date(cr, uid, line.expiry_date, line.product_id.id, context=context) | 224 | # US-838: The method has been moved to addons/stock_batch_recall/product_expiry.py |
28 | 225 | prodlot_id = self.pool.get('stock.production.lot')._get_prodlot_from_expiry_date(cr, uid, line.expiry_date, line.product_id.id, context=context) | ||
29 | 219 | in_proc_obj.write(cr, uid, [line.id], {'prodlot_id': prodlot_id}, context=context) | 226 | in_proc_obj.write(cr, uid, [line.id], {'prodlot_id': prodlot_id}, context=context) |
30 | 220 | else: | 227 | else: |
31 | 221 | # Should not be reached thanks to UI checks | 228 | # Should not be reached thanks to UI checks |
32 | 222 | 229 | ||
33 | === modified file 'bin/addons/msf_profile/data/patches.xml' | |||
34 | --- bin/addons/msf_profile/data/patches.xml 2016-05-12 12:09:25 +0000 | |||
35 | +++ bin/addons/msf_profile/data/patches.xml 2016-05-23 07:50:39 +0000 | |||
36 | @@ -33,6 +33,7 @@ | |||
37 | 33 | <field name="method">us_1185_patch</field> | 33 | <field name="method">us_1185_patch</field> |
38 | 34 | </record> | 34 | </record> |
39 | 35 | 35 | ||
40 | 36 | <<<<<<< TREE | ||
41 | 36 | <record id="us_1061_patch" model="patch.scripts"> | 37 | <record id="us_1061_patch" model="patch.scripts"> |
42 | 37 | <field name="method">us_1061_patch</field> | 38 | <field name="method">us_1061_patch</field> |
43 | 38 | </record> | 39 | </record> |
44 | @@ -40,5 +41,12 @@ | |||
45 | 40 | <record id="us_1263_patch" model="patch.scripts"> | 41 | <record id="us_1263_patch" model="patch.scripts"> |
46 | 41 | <field name="method">us_1263_patch</field> | 42 | <field name="method">us_1263_patch</field> |
47 | 42 | </record> | 43 | </record> |
48 | 44 | ======= | ||
49 | 45 | <record id="us_838_patch" model="patch.scripts"> | ||
50 | 46 | <field name="method">us_838_migrate_dup_batch</field> | ||
51 | 47 | <field name="model">stock.production.lot</field> | ||
52 | 48 | </record> | ||
53 | 49 | |||
54 | 50 | >>>>>>> MERGE-SOURCE | ||
55 | 43 | </data> | 51 | </data> |
56 | 44 | </openerp> | 52 | </openerp> |
57 | 45 | 53 | ||
58 | === modified file 'bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv' | |||
59 | --- bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv 2016-04-25 15:54:40 +0000 | |||
60 | +++ bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv 2016-05-23 07:50:39 +0000 | |||
61 | @@ -10,15 +10,15 @@ | |||
62 | 10 | fo_updates_po_ref,TRUE,TRUE,"['name','state','client_order_ref']","['&','&','&',('partner_type','!=','external'),('client_order_ref','!=',False),('split_type_sale_order','=','original_sale_order'),'!',('client_order_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,purchase.order.update_fo_ref,sale.order,FO updates PO ref,9,Valid | 10 | fo_updates_po_ref,TRUE,TRUE,"['name','state','client_order_ref']","['&','&','&',('partner_type','!=','external'),('client_order_ref','!=',False),('split_type_sale_order','=','original_sale_order'),'!',('client_order_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,purchase.order.update_fo_ref,sale.order,FO updates PO ref,9,Valid |
63 | 11 | update_in_ref,TRUE,TRUE,"['name','shipment_ref']","['&',('shipment_ref','!=',False),'!',('shipment_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,stock.picking.update_in_ref,stock.picking,IN updates ref to OUT SHIP,10,Valid | 11 | update_in_ref,TRUE,TRUE,"['name','shipment_ref']","['&',('shipment_ref','!=',False),'!',('shipment_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,stock.picking.update_in_ref,stock.picking,IN updates ref to OUT SHIP,10,Valid |
64 | 12 | canceled_fo_cancels_po,TRUE,TRUE,"['name','state', 'client_order_ref']","[('state', '=', 'cancel'), ('client_order_ref', '!=', '')]",partner_id,MISSION,purchase.order.canceled_fo_cancel_po,sale.order,Canceled FO cancels PO,18,Valid | 12 | canceled_fo_cancels_po,TRUE,TRUE,"['name','state', 'client_order_ref']","[('state', '=', 'cancel'), ('client_order_ref', '!=', '')]",partner_id,MISSION,purchase.order.canceled_fo_cancel_po,sale.order,Canceled FO cancels PO,18,Valid |
67 | 13 | partial_shipped_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/location_dest_id/usage']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['standard', 'packing']), ('state', '=', 'done'), ('already_shipped', '=', True), ('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.partial_shipped_fo_updates_in_po,stock.picking,Partial shipped at Coordo updates IN at Project,19,Valid | 13 | partial_shipped_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id','move_lines/prodlot_id/name','move_lines/prodlot_id/life_date', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/location_dest_id/usage']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['standard', 'packing']), ('state', '=', 'done'), ('already_shipped', '=', True), ('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.partial_shipped_fo_updates_in_po,stock.picking,Partial shipped at Coordo updates IN at Project,19,Valid |
68 | 14 | moves_from_dpo_closed_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'subtype', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/dpo_line_id']","['&', '&', '&', ('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['picking', 'standard']), ('dpo_out', '=', True)]",partner_id,MISSION,stock.picking.partial_shippped_dpo_updates_in_po,stock.picking,Moves from DPO closed at Coordo updates IN at Project,20,Valid | 14 | moves_from_dpo_closed_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'subtype', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id','move_lines/prodlot_id/name','move_lines/prodlot_id/life_date', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/dpo_line_id']","['&', '&', '&', ('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['picking', 'standard']), ('dpo_out', '=', True)]",partner_id,MISSION,stock.picking.partial_shippped_dpo_updates_in_po,stock.picking,Moves from DPO closed at Coordo updates IN at Project,20,Valid |
69 | 15 | dpo_service_lines_update_in_at_project,TRUE,TRUE,"['order_id/name', 'order_id/delivery_confirmed_date', 'fake_id', 'origin', 'confirmed_delivery_date', 'name', 'product_uom/id', 'product_uom/name', 'link_sol_id/line_number', 'notes', 'product_qty', 'product_id/name', 'product_id/id']","[('dest_partner_id.partner_type', '=', 'internal'), ('order_id.order_type', '=', 'direct'), ('order_id.state', 'in', ['approved', 'done']), ('product_id.type', 'in', ['service', 'service_recep'])]",dest_partner_id,MISSION,purchase.order.line.confirmed_dpo_service_lines_update_in_po,purchase.order.line,DPO service lines update IN at Project,21,Valid | 15 | dpo_service_lines_update_in_at_project,TRUE,TRUE,"['order_id/name', 'order_id/delivery_confirmed_date', 'fake_id', 'origin', 'confirmed_delivery_date', 'name', 'product_uom/id', 'product_uom/name', 'link_sol_id/line_number', 'notes', 'product_qty', 'product_id/name', 'product_id/id']","[('dest_partner_id.partner_type', '=', 'internal'), ('order_id.order_type', '=', 'direct'), ('order_id.state', 'in', ['approved', 'done']), ('product_id.type', 'in', ['service', 'service_recep'])]",dest_partner_id,MISSION,purchase.order.line.confirmed_dpo_service_lines_update_in_po,purchase.order.line,DPO service lines update IN at Project,21,Valid |
70 | 16 | cancel_out_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'standard'),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled OUT at Coordo cancels IN at Project,22,Valid | 16 | cancel_out_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'standard'),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled OUT at Coordo cancels IN at Project,22,Valid |
71 | 17 | cancel_pick_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'picking'),('backorder_id', '=', False),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled PICK at Coordo cancels IN at Project,23,Valid | 17 | cancel_pick_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'picking'),('backorder_id', '=', False),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled PICK at Coordo cancels IN at Project,23,Valid |
72 | 18 | cancel_stock_move_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'date_cancel']","['&','&','&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('picking_id.state', '=', 'done'),('picking_id.do_not_sync', '=', False),('to_be_sent', '=', True), '&', '|', ('picking_id.subtype', '=', 'picking'), ('picking_id.subtype', '=', 'standard'), ('picking_id.already_shipped', '=', False)]",partner_id,MISSION,stock.picking.cancel_stock_move_of_pick_cancel_in,stock.move,Canceled stock move cancels IN,24,Valid | 18 | cancel_stock_move_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'date_cancel']","['&','&','&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('picking_id.state', '=', 'done'),('picking_id.do_not_sync', '=', False),('to_be_sent', '=', True), '&', '|', ('picking_id.subtype', '=', 'picking'), ('picking_id.subtype', '=', 'standard'), ('picking_id.already_shipped', '=', False)]",partner_id,MISSION,stock.picking.cancel_stock_move_of_pick_cancel_in,stock.move,Canceled stock move cancels IN,24,Valid |
73 | 19 | closed_in_validates_delivery_out_ship,TRUE,TRUE,"['name', 'state', 'shipment_ref']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'in'),('subtype', 'in', ['standard']), ('state', '=', 'done'), ('shipment_ref', '!=', False), ('dpo_incoming', '=', False)]",partner_id,MISSION,stock.picking.closed_in_validates_delivery_out_ship,stock.picking,Closed IN validates delivery of OUT-SHIP,26,Valid | 19 | closed_in_validates_delivery_out_ship,TRUE,TRUE,"['name', 'state', 'shipment_ref']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'in'),('subtype', 'in', ['standard']), ('state', '=', 'done'), ('shipment_ref', '!=', False), ('dpo_incoming', '=', False)]",partner_id,MISSION,stock.picking.closed_in_validates_delivery_out_ship,stock.picking,Closed IN validates delivery of OUT-SHIP,26,Valid |
74 | 20 | closed_in_confirms_dpo_reception,TRUE,TRUE,"['name', 'state', 'dpo_line_id']","['&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('picking_id.type', '=', 'in'), ('picking_id.subtype', 'in', ['standard']), ('state', '=', 'done'), ('dpo_line_id', '!=', 0)]",partner_id,MISSION,stock.picking.closed_in_confirms_dpo_reception,stock.move,Closed IN confirms DPO reception,27,Valid | 20 | closed_in_confirms_dpo_reception,TRUE,TRUE,"['name', 'state', 'dpo_line_id']","['&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('picking_id.type', '=', 'in'), ('picking_id.subtype', 'in', ['standard']), ('state', '=', 'done'), ('dpo_line_id', '!=', 0)]",partner_id,MISSION,stock.picking.closed_in_confirms_dpo_reception,stock.move,Closed IN confirms DPO reception,27,Valid |
76 | 21 | create_batch_object,TRUE,TRUE,"['name', 'xmlid_name', 'prefix', 'product_id/id', 'partner_id/id', 'date', 'ref','life_date','sequence_id','type']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_batch_number,stock.production.lot,Create Batch Object,1001,Valid | 21 | create_batch_object,FALSE,TRUE,"['name', 'xmlid_name', 'prefix', 'product_id/id', 'partner_id/id', 'date', 'ref','life_date','sequence_id','type']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_batch_number,stock.production.lot,Create Batch Object,1001,Valid |
77 | 22 | create_asset_object,TRUE,TRUE,"['name', 'xmlid_name', 'arrival_date', 'asset_type_id/id', 'partner_id/id', 'brand', 'comment', 'description', 'hq_ref', 'international_po', 'invo_certif_depreciation', 'invo_currency/id', 'invo_date', 'invo_donator_code', 'invo_num', 'invo_supplier', 'invo_value', 'local_ref', 'model', 'orig_mission_code', 'product_id/id', 'project_po', 'receipt_place', 'serial_nb', 'type', 'year']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_asset,product.asset,Create Asset Object,1002,Valid | 22 | create_asset_object,TRUE,TRUE,"['name', 'xmlid_name', 'arrival_date', 'asset_type_id/id', 'partner_id/id', 'brand', 'comment', 'description', 'hq_ref', 'international_po', 'invo_certif_depreciation', 'invo_currency/id', 'invo_date', 'invo_donator_code', 'invo_num', 'invo_supplier', 'invo_value', 'local_ref', 'model', 'orig_mission_code', 'product_id/id', 'project_po', 'receipt_place', 'serial_nb', 'type', 'year']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_asset,product.asset,Create Asset Object,1002,Valid |
78 | 23 | reset_ref_by_recovery_mode,TRUE,TRUE,['name'],"[('name', '=', False)]",partner_id,MISSION,sale.order.reset_ref_by_recovery_mode,sale.order,Reset Due to Recovery,1003,Valid | 23 | reset_ref_by_recovery_mode,TRUE,TRUE,['name'],"[('name', '=', False)]",partner_id,MISSION,sale.order.reset_ref_by_recovery_mode,sale.order,Reset Due to Recovery,1003,Valid |
79 | 24 | USB_replicate_po,TRUE,TRUE,"['name', 'analytic_distribution_id/id', 'partner_id/id','pricelist_id/id','delivery_requested_date','details','notes', 'origin', 'categ', 'order_type', 'priority', 'loan_duration', 'is_a_counterpart', 'cross_docking_ok', 'order_line/product_id/id', 'order_line/product_id/name','order_line/id', 'order_line/name', 'order_line/product_qty', 'order_line/product_uom', 'order_line/price_unit', 'order_line/analytic_distribution_id/id','order_line/comment','order_line/have_analytic_distribution_from_header','order_line/line_number', 'order_line/nomen_manda_0/id','order_line/nomen_manda_1/id','order_line/nomen_manda_2/id','order_line/nomen_manda_3/id', 'order_line/sync_order_line_db_id', 'order_line/nomenclature_description','order_line/notes','order_line/default_name','order_line/default_code','order_line/is_line_split','order_line/date_planned','order_line/procurement_id/id']","[('state','in',['approved', 'done'])]",partner_id,USB,purchase.order.usb_replicate_po,purchase.order,USB_replicate_po,2000,Valid | 24 | USB_replicate_po,TRUE,TRUE,"['name', 'analytic_distribution_id/id', 'partner_id/id','pricelist_id/id','delivery_requested_date','details','notes', 'origin', 'categ', 'order_type', 'priority', 'loan_duration', 'is_a_counterpart', 'cross_docking_ok', 'order_line/product_id/id', 'order_line/product_id/name','order_line/id', 'order_line/name', 'order_line/product_qty', 'order_line/product_uom', 'order_line/price_unit', 'order_line/analytic_distribution_id/id','order_line/comment','order_line/have_analytic_distribution_from_header','order_line/line_number', 'order_line/nomen_manda_0/id','order_line/nomen_manda_1/id','order_line/nomen_manda_2/id','order_line/nomen_manda_3/id', 'order_line/sync_order_line_db_id', 'order_line/nomenclature_description','order_line/notes','order_line/default_name','order_line/default_code','order_line/is_line_split','order_line/date_planned','order_line/procurement_id/id']","[('state','in',['approved', 'done'])]",partner_id,USB,purchase.order.usb_replicate_po,purchase.order,USB_replicate_po,2000,Valid |
80 | 25 | 25 | ||
81 | === modified file 'bin/addons/product_expiry/product_expiry.py' | |||
82 | --- bin/addons/product_expiry/product_expiry.py 2011-01-14 00:11:01 +0000 | |||
83 | +++ bin/addons/product_expiry/product_expiry.py 2016-05-23 07:50:39 +0000 | |||
84 | @@ -21,9 +21,12 @@ | |||
85 | 21 | import datetime | 21 | import datetime |
86 | 22 | from osv import fields, osv | 22 | from osv import fields, osv |
87 | 23 | import pooler | 23 | import pooler |
88 | 24 | import logging | ||
89 | 25 | |||
90 | 24 | 26 | ||
91 | 25 | class stock_production_lot(osv.osv): | 27 | class stock_production_lot(osv.osv): |
92 | 26 | _inherit = 'stock.production.lot' | 28 | _inherit = 'stock.production.lot' |
93 | 29 | _logger = logging.getLogger('------US-838: Migrate duplicate BNs') | ||
94 | 27 | 30 | ||
95 | 28 | def _get_date(dtype): | 31 | def _get_date(dtype): |
96 | 29 | """Return a function to compute the limit date for this type""" | 32 | """Return a function to compute the limit date for this type""" |
97 | @@ -54,6 +57,10 @@ | |||
98 | 54 | } | 57 | } |
99 | 55 | # Assign dates according to products data | 58 | # Assign dates according to products data |
100 | 56 | def create(self, cr, uid, vals, context=None): | 59 | def create(self, cr, uid, vals, context=None): |
101 | 60 | |||
102 | 61 | if self.violate_ed_unique(cr, uid, False, vals, context): | ||
103 | 62 | raise osv.except_osv('Error', 'An expiry date with same date for this product exists already!.') | ||
104 | 63 | |||
105 | 57 | newid = super(stock_production_lot, self).create(cr, uid, vals, context=context) | 64 | newid = super(stock_production_lot, self).create(cr, uid, vals, context=context) |
106 | 58 | obj = self.browse(cr, uid, newid, context=context) | 65 | obj = self.browse(cr, uid, newid, context=context) |
107 | 59 | towrite = [] | 66 | towrite = [] |
108 | @@ -66,6 +73,175 @@ | |||
109 | 66 | self.write(cr, uid, [obj.id], self.default_get(cr, uid, towrite, context=context)) | 73 | self.write(cr, uid, [obj.id], self.default_get(cr, uid, towrite, context=context)) |
110 | 67 | return newid | 74 | return newid |
111 | 68 | 75 | ||
112 | 76 | # US-838: this method is to check if the expiry date values are valid | ||
113 | 77 | def violate_ed_unique(self, cr, uid, ids, vals, context): | ||
114 | 78 | if not('product_id' in vals and 'life_date' in vals): | ||
115 | 79 | return False | ||
116 | 80 | |||
117 | 81 | prod_obj = self.pool.get('product.product') | ||
118 | 82 | prod = prod_obj.browse(cr, uid, vals['product_id'], context=context) | ||
119 | 83 | |||
120 | 84 | # In case it's a EP only product, then search for date and product, no need to search for batch name | ||
121 | 85 | if prod.perishable and not prod.batch_management: | ||
122 | 86 | search_arg = [('life_date', '=', vals['life_date']), ('type', '=', 'internal'), ('product_id', '=', prod.id)] | ||
123 | 87 | |||
124 | 88 | if ids: # in case it's a write call, then exclude the current ids | ||
125 | 89 | search_arg.append(('id', 'not in', ids)) | ||
126 | 90 | |||
127 | 91 | lot_ids = self.search(cr, uid, search_arg, context=context) | ||
128 | 92 | if lot_ids: | ||
129 | 93 | return True | ||
130 | 94 | return False | ||
131 | 95 | |||
132 | 96 | def write(self, cr, uid, ids, vals, context=None): | ||
133 | 97 | ''' | ||
134 | 98 | force writing of expired_date which is readonly for batch management products | ||
135 | 99 | ''' | ||
136 | 100 | if context is None: | ||
137 | 101 | context = {} | ||
138 | 102 | if isinstance(ids, (int, long)): | ||
139 | 103 | ids = [ids] | ||
140 | 104 | # US-838: Check if the values are in conflict with the existing data | ||
141 | 105 | if self.violate_ed_unique(cr, uid, ids, vals, context): | ||
142 | 106 | raise osv.except_osv('Error', 'An expiry date with same date for this product exists already!') | ||
143 | 107 | |||
144 | 108 | return super(stock_production_lot, self).write(cr, uid, ids, vals, context=context) | ||
145 | 109 | |||
146 | 110 | #US-838: migrate all the duplicated batch into single batch | ||
147 | 111 | ''' | ||
148 | 112 | |||
149 | 113 | US-838: The 3 following methods will be moved to the patch call, it is called only when a patch is applied. | ||
150 | 114 | Check the steps to be executed in the description, but basically it will migrate the references to the wrong BN in relevant objects | ||
151 | 115 | to the lead BN, then delete these wrong BNs, and finally redefine the unique constraint on the table BN | ||
152 | 116 | |||
153 | 117 | method to move: migrate_dup_batch, remap_reference_tables, update_table | ||
154 | 118 | |||
155 | 119 | ''' | ||
156 | 120 | def us_838_migrate_dup_batch(self, cr, uid, *a, **b): | ||
157 | 121 | ''' | ||
158 | 122 | Step to do: | ||
159 | 123 | |||
160 | 124 | 1. Search list of dup batches, that have the same name + product + xmlname values. | ||
161 | 125 | 2. Go through this list, for each element do the following: | ||
162 | 126 | 2.1. Get the 2 batch id of the same name, order by id ---> the smaller id will be kept, the other will be set as inactive | ||
163 | 127 | 2.2. Search all tables that refer to the bigger_id, then map them to the smaller_id | ||
164 | 128 | 2.3. Set the non-lead batches to become inactive | ||
165 | 129 | 2.4. Update ir_model_data | ||
166 | 130 | 3. Modify the unique constraint to be prod + BN + ED, and no more partner_name involved, because we will not use partner_name anymore | ||
167 | 131 | |||
168 | 132 | 4. For the messages in the pipeline ---> treated in sync message | ||
169 | 133 | |||
170 | 134 | ''' | ||
171 | 135 | |||
172 | 136 | cr.execute('''select id, name from stock_production_lot where name in | ||
173 | 137 | (select name from (select name, product_id, count(name) as amount_bn from stock_production_lot group by name, product_id, life_date) as foo_bn where amount_bn>1) order by name, id;''') | ||
174 | 138 | all_dup_batches = cr.dictfetchall() | ||
175 | 139 | self._logger.info("__________Start to migrate duplicate batch objects in instance: %s - with total of %s duplicate batches!" % (cr.dbname, len(all_dup_batches))) | ||
176 | 140 | |||
177 | 141 | context = {} | ||
178 | 142 | |||
179 | 143 | lead_id = 0 # This id will be used as the main batch id | ||
180 | 144 | to_be_deleted = [] | ||
181 | 145 | same_name = None | ||
182 | 146 | for r in all_dup_batches: | ||
183 | 147 | if lead_id == 0: | ||
184 | 148 | same_name = r['name'] | ||
185 | 149 | lead_id = r['id'] | ||
186 | 150 | else: | ||
187 | 151 | if same_name == r['name']: # same batch --> replace in all table to the lead_id | ||
188 | 152 | # Do step 2.2, search the following tables to replace the link to the | ||
189 | 153 | self.remap_reference_tables(cr, uid, r['id'], lead_id, same_name, context) | ||
190 | 154 | |||
191 | 155 | # 2.3: Add this wrong batch id into the list, then delete them at the end | ||
192 | 156 | to_be_deleted.append(r['id']) | ||
193 | 157 | else: | ||
194 | 158 | lead_id = r['id'] # when the name change --> replace by the new lead_id | ||
195 | 159 | same_name = r['name'] | ||
196 | 160 | |||
197 | 161 | # 2.3 call to delete all the wrong batch objects | ||
198 | 162 | if to_be_deleted: | ||
199 | 163 | self._logger.info("Delete duplicate batch objects (%s batches - keep only the lead batch)" % len(to_be_deleted)) | ||
200 | 164 | self.unlink(cr, uid, to_be_deleted, context=context) | ||
201 | 165 | else: | ||
202 | 166 | self._logger.info("No duplicate batch found for this instance %s.", cr.dbname) | ||
203 | 167 | |||
204 | 168 | self._logger.info("Last step: update the unique constraint for the table stock_production_lot.") | ||
205 | 169 | # 3. Now alter the constraint unique of this table: first drop the current constraint, then create a new one with name+prod+life_date | ||
206 | 170 | cr.execute('''ALTER TABLE stock_production_lot DROP CONSTRAINT stock_production_lot_batch_name_uniq, | ||
207 | 171 | ADD CONSTRAINT stock_production_lot_batch_name_uniq UNIQUE (name, product_id, life_date);''') | ||
208 | 172 | |||
209 | 173 | self._logger.info("__________Finish the migration task on duplicate batch objects for instance: %s", cr.dbname) | ||
210 | 174 | return True | ||
211 | 175 | |||
212 | 176 | def remap_reference_tables(self, cr, uid, wrong_id, lead_id, batch_name, context=None): | ||
213 | 177 | ''' | ||
214 | 178 | -- with fkey = prodlot_id (total=13) | ||
215 | 179 | TABLE "create_picking_move_processor" CONSTRAINT "create_picking_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
216 | 180 | TABLE "export_report_stock_inventory" CONSTRAINT "export_report_stock_inventory_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
217 | 181 | TABLE "export_report_stock_move" CONSTRAINT "export_report_stock_move_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
218 | 182 | TABLE "internal_move_processor" CONSTRAINT "internal_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
219 | 183 | TABLE "outgoing_delivery_move_processor" CONSTRAINT "outgoing_delivery_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
220 | 184 | TABLE "ppl_move_processor" CONSTRAINT "ppl_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
221 | 185 | TABLE "real_average_consumption_line" CONSTRAINT "real_average_consumption_line_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
222 | 186 | TABLE "return_ppl_move_processor" CONSTRAINT "return_ppl_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
223 | 187 | TABLE "stock_move_in_processor" CONSTRAINT "stock_move_in_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
224 | 188 | TABLE "stock_move_processor" CONSTRAINT "stock_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
225 | 189 | TABLE "stock_move" CONSTRAINT "stock_move_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
226 | 190 | TABLE "unconsistent_stock_report_line" CONSTRAINT "unconsistent_stock_report_line_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE CASCADE | ||
227 | 191 | TABLE "validate_move_processor" CONSTRAINT "validate_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
228 | 192 | |||
229 | 193 | -- with fkey = lot_id (2) | ||
230 | 194 | TABLE "stock_production_lot_revision" CONSTRAINT "stock_production_lot_revision_lot_id_fkey" FOREIGN KEY (lot_id) REFERENCES stock_production_lot(id) ON DELETE CASCADE | ||
231 | 195 | TABLE "product_likely_expire_report_item_line" CONSTRAINT "product_likely_expire_report_item_line_lot_id_fkey" FOREIGN KEY (lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
232 | 196 | |||
233 | 197 | -- with fkey = prod_lot_id (2) | ||
234 | 198 | TABLE "stock_inventory_line" CONSTRAINT "stock_inventory_line_prod_lot_id_fkey" FOREIGN KEY (prod_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
235 | 199 | TABLE "initial_stock_inventory_line" CONSTRAINT "initial_stock_inventory_line_prod_lot_id_fkey" FOREIGN KEY (prod_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
236 | 200 | |||
237 | 201 | -- with fkey = no common name (3) | ||
238 | 202 | TABLE "claim_product_line" CONSTRAINT "claim_product_line_lot_id_claim_product_line_fkey" FOREIGN KEY (lot_id_claim_product_line) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
239 | 203 | TABLE "composition_kit" CONSTRAINT "composition_kit_composition_lot_id_fkey" FOREIGN KEY (composition_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL | ||
240 | 204 | TABLE "wizard_import_in_line_simulation_screen" CONSTRAINT "wizard_import_in_line_simulation_screen_imp_batch_id_fkey" FOREIGN KEY (imp_batch_id) REFERENCES stock_production_lot(id) ON DELETE SET | ||
241 | 205 | ''' | ||
242 | 206 | # Tables with foreign key prodlot_id (total 13 tables) | ||
243 | 207 | self._logger.info("__ Migrating batch number: %s", batch_name) | ||
244 | 208 | list_table_fields = [ | ||
245 | 209 | ('create_picking_move_processor', 'prodlot_id'), | ||
246 | 210 | ('export_report_stock_inventory', 'prodlot_id'), | ||
247 | 211 | ('export_report_stock_move', 'prodlot_id'), | ||
248 | 212 | ('internal_move_processor', 'prodlot_id'), | ||
249 | 213 | ('outgoing_delivery_move_processor', 'prodlot_id'), | ||
250 | 214 | ('ppl_move_processor', 'prodlot_id'), | ||
251 | 215 | ('real_average_consumption_line', 'prodlot_id'), | ||
252 | 216 | ('return_ppl_move_processor', 'prodlot_id'), | ||
253 | 217 | ('stock_move_in_processor', 'prodlot_id'), | ||
254 | 218 | ('stock_move_processor', 'prodlot_id'), | ||
255 | 219 | ('stock_move', 'prodlot_id'), | ||
256 | 220 | ('unconsistent_stock_report_line', 'prodlot_id'), | ||
257 | 221 | ('validate_move_processor', 'prodlot_id'), | ||
258 | 222 | ('stock_production_lot_revision', 'lot_id'), | ||
259 | 223 | ('product_likely_expire_report_item_line', 'lot_id'), | ||
260 | 224 | ('stock_inventory_line', 'prod_lot_id'), | ||
261 | 225 | ('initial_stock_inventory_line', 'prod_lot_id'), | ||
262 | 226 | ('claim_product_line', 'lot_id_claim_product_line'), | ||
263 | 227 | ('composition_kit', 'composition_lot_id'), | ||
264 | 228 | ('wizard_import_in_line_simulation_screen', 'imp_batch_id') | ||
265 | 229 | ] | ||
266 | 230 | for element in list_table_fields: | ||
267 | 231 | # Tables with foreign key prod_lot_id (total 2) | ||
268 | 232 | self.update_table(cr, uid, element[0] , element[1], wrong_id, lead_id, batch_name) | ||
269 | 233 | |||
270 | 234 | |||
271 | 235 | def update_table(self, cr, uid, table_name, field_id, wrong_id, lead_id, batch_name): | ||
272 | 236 | cr.execute('select count(*) as amount from ' + table_name + ' where ' + field_id + ' = %s;' %(wrong_id,)) | ||
273 | 237 | count = cr.fetchone()[0] | ||
274 | 238 | if count > 0: # Only update the table if wrong bn exists | ||
275 | 239 | self._logger.info("Table %s has %s batch objects (%s) and will be-mapped." %(table_name, count, batch_name,)) | ||
276 | 240 | sql_update = "update " + table_name + " set " + field_id + "=" + str(lead_id) + " where " + field_id + "=" + str(wrong_id) | ||
277 | 241 | cr.execute(sql_update) | ||
278 | 242 | else: | ||
279 | 243 | self._logger.info("Table %s has NO duplicate batch (%s)." %(table_name, batch_name,)) | ||
280 | 244 | |||
281 | 69 | _defaults = { | 245 | _defaults = { |
282 | 70 | 'life_date': _get_date('life_time'), | 246 | 'life_date': _get_date('life_time'), |
283 | 71 | 'use_date': _get_date('use_time'), | 247 | 'use_date': _get_date('use_time'), |
284 | 72 | 248 | ||
285 | === modified file 'bin/addons/specific_rules/specific_rules.py' | |||
286 | --- bin/addons/specific_rules/specific_rules.py 2016-04-25 09:24:04 +0000 | |||
287 | +++ bin/addons/specific_rules/specific_rules.py 2016-05-23 07:50:39 +0000 | |||
288 | @@ -1365,7 +1365,7 @@ | |||
289 | 1365 | if not batch.delete_ok: | 1365 | if not batch.delete_ok: |
290 | 1366 | raise osv.except_osv(_('Error'), _('You cannot remove a batch number which has stock !')) | 1366 | raise osv.except_osv(_('Error'), _('You cannot remove a batch number which has stock !')) |
291 | 1367 | 1367 | ||
293 | 1368 | return super(stock_production_lot, self).unlink(cr, uid, batch.id, context=context) | 1368 | return super(stock_production_lot, self).unlink(cr, uid, ids, context=context) |
294 | 1369 | 1369 | ||
295 | 1370 | 1370 | ||
296 | 1371 | stock_production_lot() | 1371 | stock_production_lot() |
297 | 1372 | 1372 | ||
298 | === modified file 'bin/addons/specific_rules/specific_rules_view.xml' | |||
299 | --- bin/addons/specific_rules/specific_rules_view.xml 2016-03-17 08:30:04 +0000 | |||
300 | +++ bin/addons/specific_rules/specific_rules_view.xml 2016-05-23 07:50:39 +0000 | |||
301 | @@ -107,7 +107,6 @@ | |||
302 | 107 | <attribute name="colors">red:life_date < current_date</attribute> | 107 | <attribute name="colors">red:life_date < current_date</attribute> |
303 | 108 | </xpath> | 108 | </xpath> |
304 | 109 | <field name="ref" position="replace"> | 109 | <field name="ref" position="replace"> |
305 | 110 | <field name="partner_name"/> | ||
306 | 111 | <field name="type" /> | 110 | <field name="type" /> |
307 | 112 | <field name="life_date" /> | 111 | <field name="life_date" /> |
308 | 113 | </field> | 112 | </field> |
309 | @@ -198,7 +197,6 @@ | |||
310 | 198 | 197 | ||
311 | 199 | <field name="name" position="replace"> | 198 | <field name="name" position="replace"> |
312 | 200 | <field name="name" colspan="2" attrs="{'readonly': [('type', '=', 'internal')]}" /> | 199 | <field name="name" colspan="2" attrs="{'readonly': [('type', '=', 'internal')]}" /> |
313 | 201 | <field name="partner_name" colspan="2" readonly='1'/> | ||
314 | 202 | </field> | 200 | </field> |
315 | 203 | 201 | ||
316 | 204 | <field name="date" position="replace"> | 202 | <field name="date" position="replace"> |
317 | 205 | 203 | ||
318 | === modified file 'bin/addons/stock_batch_recall/product_expiry.py' | |||
319 | --- bin/addons/stock_batch_recall/product_expiry.py 2014-10-07 12:45:45 +0000 | |||
320 | +++ bin/addons/stock_batch_recall/product_expiry.py 2016-05-23 07:50:39 +0000 | |||
321 | @@ -67,29 +67,59 @@ | |||
322 | 67 | return super(stock_production_lot, self).copy_data(cr, uid, id, default, context=context) | 67 | return super(stock_production_lot, self).copy_data(cr, uid, id, default, context=context) |
323 | 68 | 68 | ||
324 | 69 | # UF-1617: Handle the instance in the batch number object | 69 | # UF-1617: Handle the instance in the batch number object |
326 | 70 | def create(self, cr, uid, vals, context=None): | 70 | # US-838: this method is removed in integration, because the 2 fields are no more used, xmlid_name and partner name |
327 | 71 | |||
328 | 72 | |||
329 | 73 | # def create(self, cr, uid, vals, context=None): | ||
330 | 71 | ''' | 74 | ''' |
331 | 72 | override create method to set the instance id to the current instance if it has not been provided | 75 | override create method to set the instance id to the current instance if it has not been provided |
333 | 73 | ''' | 76 | |
334 | 77 | # UF-2148: make the xmlid_name from batch name for building xmlid if the value is not given in vals | ||
335 | 78 | if 'product_id' in vals and ('xmlid_name' not in vals or not vals['xmlid_name']): | ||
336 | 79 | prod_name = self.pool.get('product.product').browse(cr, uid, vals['product_id'], context=context) | ||
337 | 80 | #US-838: xmlid_name now takes product code, batch name and expiry date as value, this value must be unique (soft constraint) | ||
338 | 81 | vals['xmlid_name'] = '%s_%s_%s' % (prod_name.default_code, vals['name'], vals['life_date']) | ||
339 | 82 | |||
340 | 74 | if 'partner_name' not in vals or not vals['partner_name']: | 83 | if 'partner_name' not in vals or not vals['partner_name']: |
341 | 75 | company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id | 84 | company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id |
342 | 76 | if company and company.partner_id: | 85 | if company and company.partner_id: |
343 | 77 | vals['partner_name'] = company.partner_id.name | 86 | vals['partner_name'] = company.partner_id.name |
344 | 78 | |||
345 | 79 | |||
346 | 80 | # UF-2148: make the xmlid_name from batch name for building xmlid if the value is not given in vals | ||
347 | 81 | if 'product_id' in vals and ('xmlid_name' not in vals or not vals['xmlid_name']): | ||
348 | 82 | prod_name = self.pool.get('product.product').browse(cr, uid, vals['product_id'], context=context) | ||
349 | 83 | vals['xmlid_name'] = '%s_%s' % (prod_name.default_code, vals['name']) | ||
350 | 84 | |||
351 | 85 | if 'xmlid_name' in vals: | ||
352 | 86 | exist = self.search(cr, uid, [('xmlid_name', '=', vals['xmlid_name']), ('partner_name', '=', vals['partner_name']), ('product_id', '=', vals['product_id'])], context=context) | ||
353 | 87 | if exist: | ||
354 | 88 | # but if the value exist for xmlid_name, then add a suffix to differentiate, no constraint unique required here | ||
355 | 89 | vals['xmlid_name'] = vals['xmlid_name'] + "_1" | ||
356 | 90 | 87 | ||
357 | 91 | return super(stock_production_lot, self).create(cr, uid, vals, context) | 88 | return super(stock_production_lot, self).create(cr, uid, vals, context) |
358 | 92 | 89 | ||
359 | 90 | ''' | ||
360 | 91 | |||
361 | 92 | # US-838: This method got moved from addons/msf_outgoing/wizard/incoming_shipment_processor.py | ||
362 | 93 | def _get_prodlot_from_expiry_date(self, cr, uid, expiry_date, product_id, context=None): | ||
363 | 94 | """ | ||
364 | 95 | Search if an internal batch exists in the system with this expiry date. | ||
365 | 96 | If no, create the batch. | ||
366 | 97 | """ | ||
367 | 98 | # Objects | ||
368 | 99 | seq_obj = self.pool.get('ir.sequence') | ||
369 | 100 | |||
370 | 101 | # Double check to find the corresponding batch | ||
371 | 102 | lot_ids = self.search(cr, uid, [ | ||
372 | 103 | ('life_date', '=', expiry_date), | ||
373 | 104 | ('type', '=', 'internal'), | ||
374 | 105 | ('product_id', '=', product_id), | ||
375 | 106 | ], context=context) | ||
376 | 107 | |||
377 | 108 | # No batch found, create a new one | ||
378 | 109 | if not lot_ids: | ||
379 | 110 | seq_ed = seq_obj.get(cr, uid, 'stock.lot.serial') | ||
380 | 111 | vals = { | ||
381 | 112 | 'product_id': product_id, | ||
382 | 113 | 'life_date': expiry_date, | ||
383 | 114 | 'name': seq_ed, | ||
384 | 115 | 'type': 'internal', | ||
385 | 116 | } | ||
386 | 117 | lot_id = self.create(cr, uid, vals, context) | ||
387 | 118 | else: | ||
388 | 119 | lot_id = lot_ids[0] | ||
389 | 120 | |||
390 | 121 | return lot_id | ||
391 | 122 | |||
392 | 93 | _columns = { | 123 | _columns = { |
393 | 94 | # renamed from End of Life Date | 124 | # renamed from End of Life Date |
394 | 95 | 'life_date': fields.date('Expiry Date', | 125 | 'life_date': fields.date('Expiry Date', |
395 | @@ -102,8 +132,8 @@ | |||
396 | 102 | 132 | ||
397 | 103 | # UF-1617: field only used for sync purpose | 133 | # UF-1617: field only used for sync purpose |
398 | 104 | 'partner_id': fields.many2one('res.partner', string="Supplier", readonly=True, required=False), | 134 | 'partner_id': fields.many2one('res.partner', string="Supplier", readonly=True, required=False), |
401 | 105 | 'partner_name': fields.char('Partner', size=128, required=True), | 135 | 'partner_name': fields.char('Partner', size=128), |
402 | 106 | 'xmlid_name': fields.char('XML Code, hidden field', size=128, required=True), # UF-2148, this field is used only for xml_id | 136 | 'xmlid_name': fields.char('XML Code, hidden field', size=128), # UF-2148, this field is used only for xml_id |
403 | 107 | } | 137 | } |
404 | 108 | 138 | ||
405 | 109 | _defaults = { | 139 | _defaults = { |
406 | 110 | 140 | ||
407 | === modified file 'bin/addons/sync_so/picking.py' | |||
408 | --- bin/addons/sync_so/picking.py 2016-01-22 14:29:49 +0000 | |||
409 | +++ bin/addons/sync_so/picking.py 2016-05-23 07:50:39 +0000 | |||
410 | @@ -97,7 +97,7 @@ | |||
411 | 97 | 97 | ||
412 | 98 | # product | 98 | # product |
413 | 99 | product_name = data['product_id']['name'] | 99 | product_name = data['product_id']['name'] |
415 | 100 | product_id = self.pool.get('product.product').find_sd_ref(cr, uid, xmlid_to_sdref(data['product_id']['id']), context=context) | 100 | product_id = prod_obj.find_sd_ref(cr, uid, xmlid_to_sdref(data['product_id']['id']), context=context) |
416 | 101 | if not product_id: | 101 | if not product_id: |
417 | 102 | product_ids = prod_obj.search(cr, uid, [('name', '=', product_name)], context=context) | 102 | product_ids = prod_obj.search(cr, uid, [('name', '=', product_name)], context=context) |
418 | 103 | if not product_ids: | 103 | if not product_ids: |
419 | @@ -112,15 +112,54 @@ | |||
420 | 112 | # uom | 112 | # uom |
421 | 113 | uom_id = uom_obj.find_sd_ref(cr, uid, xmlid_to_sdref(data['product_uom']['id']), context=context) | 113 | uom_id = uom_obj.find_sd_ref(cr, uid, xmlid_to_sdref(data['product_uom']['id']), context=context) |
422 | 114 | if not uom_id: | 114 | if not uom_id: |
424 | 115 | raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_name | 115 | raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_id |
425 | 116 | 116 | ||
426 | 117 | # UF-1617: Handle batch and asset object | 117 | # UF-1617: Handle batch and asset object |
427 | 118 | batch_id = False | 118 | batch_id = False |
430 | 119 | if data['prodlot_id']: | 119 | batch_values = data['prodlot_id'] |
431 | 120 | batch_id = self.pool.get('stock.production.lot').find_sd_ref(cr, uid, xmlid_to_sdref(data['prodlot_id']['id']), context=context) | 120 | if batch_values and product_id: |
432 | 121 | # us-838: WORK IN PROGRESS .................................. | ||
433 | 122 | # US-838: check first if this product is EP-only? if yes, treat differently, here we treat only for BN | ||
434 | 123 | prodlot_obj = self.pool.get('stock.production.lot') | ||
435 | 124 | prod = prod_obj.browse(cr, uid,product_id,context=context) | ||
436 | 125 | |||
437 | 126 | ''' | ||
438 | 127 | US-838: The following block is for treating the sync message in pipeline! | ||
439 | 128 | If the sync message was made with old message rule, then in the message it contains ONLY the xmlid of the batch, NO life_date. | ||
440 | 129 | For this case, we have to retrieve the batch name from this xmlid, by using the double product_code in the search. | ||
441 | 130 | From this batch name + product_id, we can find the batch object in the system. There should only be one batch name for the same product | ||
442 | 131 | since the migration has already done, which merged all dup batch name into one. | ||
443 | 132 | |||
444 | 133 | The old sync message has the following xmlid format: sd.batch_numer_se_HQ1C1_DORADIDA15T_DORADIDA15T_MSFBN/000005 | ||
445 | 134 | ''' | ||
446 | 135 | xmlid = batch_values['id'] | ||
447 | 136 | if 'life_date' not in batch_values and 'batch_numer' in xmlid: # it must have the 'batch_numer' as prefix | ||
448 | 137 | prod_code = "_" + prod.default_code + "_" + prod.default_code + "_" # This is how the old xmlid has been made: using double prod.default_code | ||
449 | 138 | indexOfProdCode = xmlid.find(prod_code) + len(prod_code) | ||
450 | 139 | batch_name = xmlid[indexOfProdCode:] | ||
451 | 140 | existing_bn = prodlot_obj.search(cr, uid, [('name', '=', batch_name), ('product_id', '=', product_id)], context=context) | ||
452 | 141 | if existing_bn: | ||
453 | 142 | batch_id = existing_bn[0] | ||
454 | 143 | else: | ||
455 | 144 | if prod.perishable and not prod.batch_management: | ||
456 | 145 | # In case it's a EP only product, then search for date and product, no need to search for batch name | ||
457 | 146 | if 'life_date' in batch_values: | ||
458 | 147 | # If name exists in the sync message, search by name and product, not by xmlid | ||
459 | 148 | life_date = batch_values['life_date'] | ||
460 | 149 | # US-838: use different way to retrieve the EP object | ||
461 | 150 | batch_id = prodlot_obj._get_prodlot_from_expiry_date(cr, uid, life_date, product_id, context=context) | ||
462 | 151 | if not batch_id: | ||
463 | 152 | raise Exception, "Error while retrieving or creating the expiry date %s for the product %s" % (batch_values, prod.name) | ||
464 | 153 | else: | ||
465 | 154 | # US-838: for BN, retrieve it or create it, in the follwing method | ||
466 | 155 | batch_id, msg = self.retrieve_batch_number(cr, uid, product_id, batch_values, context) # return False if the batch object is not found, or cannot be created | ||
467 | 156 | |||
468 | 157 | ################## TODO: Treat the case for Remote Warehouse: WORK IN PROGRESS BELOW!!!!!!!!!! | ||
469 | 158 | |||
470 | 159 | |||
471 | 121 | if not batch_id: | 160 | if not batch_id: |
474 | 122 | raise Exception, "Batch Number %s not found for this sync data record" % data['prodlot_id'] | 161 | raise Exception, "Batch Number %s not found for this sync data record" % batch_values |
475 | 123 | 162 | ||
476 | 124 | expired_date = data['expired_date'] | 163 | expired_date = data['expired_date'] |
477 | 125 | 164 | ||
478 | 126 | # UTP-872: Add also the state into the move line, but if it is done, then change it to assigned (available) | 165 | # UTP-872: Add also the state into the move line, but if it is done, then change it to assigned (available) |
479 | @@ -697,10 +736,12 @@ | |||
480 | 697 | return message | 736 | return message |
481 | 698 | 737 | ||
482 | 699 | 738 | ||
483 | 739 | #US-838: This method is no more use, the message will do nothing. | ||
484 | 700 | def create_batch_number(self, cr, uid, source, out_info, context=None): | 740 | def create_batch_number(self, cr, uid, source, out_info, context=None): |
485 | 701 | if not context: | 741 | if not context: |
486 | 702 | context = {} | 742 | context = {} |
488 | 703 | self._logger.info("+++ Create batch number that comes with the SHIP/OUT from %s" % source) | 743 | self._logger.info("+++ Create batch number that comes with the SHIP/OUT from %s - This message is deprecated." % source) |
489 | 744 | |||
490 | 704 | so_po_common = self.pool.get('so.po.common') | 745 | so_po_common = self.pool.get('so.po.common') |
491 | 705 | batch_obj = self.pool.get('stock.production.lot') | 746 | batch_obj = self.pool.get('stock.production.lot') |
492 | 706 | 747 | ||
493 | @@ -733,6 +774,32 @@ | |||
494 | 733 | self._logger.info(message) | 774 | self._logger.info(message) |
495 | 734 | return message | 775 | return message |
496 | 735 | 776 | ||
497 | 777 | # US-838: Retrieve batch object, if not found then create new | ||
498 | 778 | def retrieve_batch_number(self, cr, uid, product_id, batch_dict, context=None): | ||
499 | 779 | if not context: | ||
500 | 780 | context = {} | ||
501 | 781 | #self._logger.info("+++ Retrieve batch number for the SHIP/OUT from %s") | ||
502 | 782 | so_po_common = self.pool.get('so.po.common') | ||
503 | 783 | batch_obj = self.pool.get('stock.production.lot') | ||
504 | 784 | prod_obj = self.pool.get('product.product') | ||
505 | 785 | |||
506 | 786 | if not ('name' in batch_dict and 'life_date' in batch_dict): | ||
507 | 787 | # Search for the batch object with the given data | ||
508 | 788 | return False, "Batch Number: Missing batch name or expiry date!" | ||
509 | 789 | |||
510 | 790 | existing_bn = batch_obj.search(cr, uid, [('name', '=', batch_dict['name']), ('product_id', '=', product_id), | ||
511 | 791 | ('life_date', '=', batch_dict['life_date'])], context=context) | ||
512 | 792 | if existing_bn: # existed already, then don't need to create a new one | ||
513 | 793 | message = "Batch object exists in the current system. No new batch created." | ||
514 | 794 | self._logger.info(message) | ||
515 | 795 | return existing_bn[0], message | ||
516 | 796 | |||
517 | 797 | # If not exists, then create this new batch object | ||
518 | 798 | new_bn_vals = {'name': batch_dict['name'], 'product_id': product_id, 'life_date': batch_dict['life_date']} | ||
519 | 799 | message = "The new BN " + batch_dict['name'] + " has been created" | ||
520 | 800 | self._logger.info(message) | ||
521 | 801 | bn_id = batch_obj.create(cr, uid, new_bn_vals, context=context) | ||
522 | 802 | return bn_id, message | ||
523 | 736 | 803 | ||
524 | 737 | def create_asset(self, cr, uid, source, out_info, context=None): | 804 | def create_asset(self, cr, uid, source, out_info, context=None): |
525 | 738 | if not context: | 805 | if not context: |
526 | @@ -863,8 +930,10 @@ | |||
527 | 863 | 930 | ||
528 | 864 | 931 | ||
529 | 865 | # for each new batch number object and for each partner, create messages and put into the queue for sending on next sync round | 932 | # for each new batch number object and for each partner, create messages and put into the queue for sending on next sync round |
532 | 866 | for item in list_batch: | 933 | |
533 | 867 | so_po_common.create_message_with_object_and_partner(cr, uid, 1001, item, partner.name, context) | 934 | #US-838: THIS METHOD IS NO MORE USED, AS THE BN OBJECT WILL NOT BE SENT EXPLICITLY, BUT TOGETHER WITH THE MESSAGE! |
534 | 935 | #for item in list_batch: | ||
535 | 936 | # so_po_common.create_message_with_object_and_partner(cr, uid, 1001, item, partner.name, context) | ||
536 | 868 | 937 | ||
537 | 869 | # for each new asset object and for each partner, create messages and put into the queue for sending on next sync round | 938 | # for each new asset object and for each partner, create messages and put into the queue for sending on next sync round |
538 | 870 | for item in list_asset: | 939 | for item in list_asset: |
539 | 871 | 940 | ||
540 | === modified file 'bin/addons/sync_so/picking_rw.py' | |||
541 | --- bin/addons/sync_so/picking_rw.py 2016-04-25 15:54:40 +0000 | |||
542 | +++ bin/addons/sync_so/picking_rw.py 2016-05-23 07:50:39 +0000 | |||
543 | @@ -588,7 +588,7 @@ | |||
544 | 588 | raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_name | 588 | raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_name |
545 | 589 | uom_id = uom_ids[0] | 589 | uom_id = uom_ids[0] |
546 | 590 | 590 | ||
548 | 591 | 591 | # US-838: RW, need to check the new mechanism of the BN and ED object!!!!!!! | |
549 | 592 | batch_id = False | 592 | batch_id = False |
550 | 593 | if data['prodlot_id']: | 593 | if data['prodlot_id']: |
551 | 594 | batch_id = self.pool.get('stock.production.lot').find_sd_ref(cr, uid, xmlid_to_sdref(data['prodlot_id']['id']), context=context) | 594 | batch_id = self.pool.get('stock.production.lot').find_sd_ref(cr, uid, xmlid_to_sdref(data['prodlot_id']['id']), context=context) |
552 | 595 | 595 | ||
553 | === modified file 'bin/addons/sync_so/specific_xml_id.py' | |||
554 | --- bin/addons/sync_so/specific_xml_id.py 2016-01-25 10:53:13 +0000 | |||
555 | +++ bin/addons/sync_so/specific_xml_id.py 2016-05-23 07:50:39 +0000 | |||
556 | @@ -787,6 +787,10 @@ | |||
557 | 787 | 787 | ||
558 | 788 | product_asset() | 788 | product_asset() |
559 | 789 | 789 | ||
560 | 790 | |||
561 | 791 | ''' | ||
562 | 792 | |||
563 | 793 | US-838: xmlid of batch number is no more used --- REMOVE THIS BLOCK OF CODE WHEN INTEGRATE THE TICKET! | ||
564 | 790 | class batch_number(osv.osv): | 794 | class batch_number(osv.osv): |
565 | 791 | _inherit = "stock.production.lot" | 795 | _inherit = "stock.production.lot" |
566 | 792 | 796 | ||
567 | @@ -794,10 +798,12 @@ | |||
568 | 794 | def get_unique_xml_name(self, cr, uid, uuid, table_name, res_id): | 798 | def get_unique_xml_name(self, cr, uid, uuid, table_name, res_id): |
569 | 795 | batch = self.browse(cr, uid, res_id) | 799 | batch = self.browse(cr, uid, res_id) |
570 | 796 | #UF-2148: use the xmlid_name for building the xml for this object | 800 | #UF-2148: use the xmlid_name for building the xml for this object |
572 | 797 | return get_valid_xml_name('batch_numer', (batch.partner_name or 'no_partner'), (batch.product_id.code or 'noprod'), (batch.xmlid_name or 'noname')) | 801 | return get_valid_xml_name('batch_number', (batch.partner_name or 'no_partner'), (batch.product_id.code or 'noprod'), (batch.xmlid_name or 'noname')) |
573 | 798 | 802 | ||
574 | 799 | batch_number() | 803 | batch_number() |
575 | 800 | 804 | ||
576 | 805 | ''' | ||
577 | 806 | |||
578 | 801 | class ir_model_access(osv.osv): | 807 | class ir_model_access(osv.osv): |
579 | 802 | """ | 808 | """ |
580 | 803 | UF-2146 To allow synchronisation of ir.model.access, must have same sd ref across all instances | 809 | UF-2146 To allow synchronisation of ir.model.access, must have same sd ref across all instances |