Merge lp:~unifield-team/unifield-server/us-838-new-batch-1 into lp:unifield-server

Proposed by jftempo
Status: Merged
Merged at revision: 3789
Proposed branch: lp:~unifield-team/unifield-server/us-838-new-batch-1
Merge into: lp:unifield-server
Diff against target: 580 lines (+329/-35) (has conflicts)
10 files modified
bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py (+9/-2)
bin/addons/msf_profile/data/patches.xml (+8/-0)
bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv (+3/-3)
bin/addons/product_expiry/product_expiry.py (+176/-0)
bin/addons/specific_rules/specific_rules.py (+1/-1)
bin/addons/specific_rules/specific_rules_view.xml (+0/-2)
bin/addons/stock_batch_recall/product_expiry.py (+46/-16)
bin/addons/sync_so/picking.py (+78/-9)
bin/addons/sync_so/picking_rw.py (+1/-1)
bin/addons/sync_so/specific_xml_id.py (+7/-1)
Text conflict in bin/addons/msf_profile/data/patches.xml
To merge this branch: bzr merge lp:~unifield-team/unifield-server/us-838-new-batch-1
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+295155@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py'
2--- bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2016-03-08 15:48:47 +0000
3+++ bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2016-05-23 07:50:39 +0000
4@@ -137,10 +137,16 @@
5
6 # No batch found, create a new one
7 if not lot_ids:
8+ # US-838: Add the prefix as instance name of the current instance into the name of the EP object
9+ company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
10+ prefix = ''
11+ if company and company.partner_id:
12+ prefix = company.partner_id.name + "_"
13+
14 vals = {
15 'product_id': product_id,
16 'life_date': expiry_date,
17- 'name': seq_obj.get(cr, uid, 'stock.lot.serial'),
18+ 'name': prefix + seq_obj.get(cr, uid, 'stock.lot.serial'),
19 'type': 'internal',
20 }
21 lot_id = lot_obj.create(cr, uid, vals, context)
22@@ -215,7 +221,8 @@
23 and not line.prodlot_id \
24 and line.expiry_date:
25 if line.type_check == 'in':
26- prodlot_id = self._get_prodlot_from_expiry_date(cr, uid, line.expiry_date, line.product_id.id, context=context)
27+ # US-838: The method has been moved to addons/stock_batch_recall/product_expiry.py
28+ prodlot_id = self.pool.get('stock.production.lot')._get_prodlot_from_expiry_date(cr, uid, line.expiry_date, line.product_id.id, context=context)
29 in_proc_obj.write(cr, uid, [line.id], {'prodlot_id': prodlot_id}, context=context)
30 else:
31 # Should not be reached thanks to UI checks
32
33=== modified file 'bin/addons/msf_profile/data/patches.xml'
34--- bin/addons/msf_profile/data/patches.xml 2016-05-12 12:09:25 +0000
35+++ bin/addons/msf_profile/data/patches.xml 2016-05-23 07:50:39 +0000
36@@ -33,6 +33,7 @@
37 <field name="method">us_1185_patch</field>
38 </record>
39
40+<<<<<<< TREE
41 <record id="us_1061_patch" model="patch.scripts">
42 <field name="method">us_1061_patch</field>
43 </record>
44@@ -40,5 +41,12 @@
45 <record id="us_1263_patch" model="patch.scripts">
46 <field name="method">us_1263_patch</field>
47 </record>
48+=======
49+ <record id="us_838_patch" model="patch.scripts">
50+ <field name="method">us_838_migrate_dup_batch</field>
51+ <field name="model">stock.production.lot</field>
52+ </record>
53+
54+>>>>>>> MERGE-SOURCE
55 </data>
56 </openerp>
57
58=== modified file 'bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv'
59--- bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv 2016-04-25 15:54:40 +0000
60+++ bin/addons/msf_sync_data_server/data/sync_server.message_rule.csv 2016-05-23 07:50:39 +0000
61@@ -10,15 +10,15 @@
62 fo_updates_po_ref,TRUE,TRUE,"['name','state','client_order_ref']","['&','&','&',('partner_type','!=','external'),('client_order_ref','!=',False),('split_type_sale_order','=','original_sale_order'),'!',('client_order_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,purchase.order.update_fo_ref,sale.order,FO updates PO ref,9,Valid
63 update_in_ref,TRUE,TRUE,"['name','shipment_ref']","['&',('shipment_ref','!=',False),'!',('shipment_ref', 'like', 'invalid_by_recovery')]",partner_id,MISSION,stock.picking.update_in_ref,stock.picking,IN updates ref to OUT SHIP,10,Valid
64 canceled_fo_cancels_po,TRUE,TRUE,"['name','state', 'client_order_ref']","[('state', '=', 'cancel'), ('client_order_ref', '!=', '')]",partner_id,MISSION,purchase.order.canceled_fo_cancel_po,sale.order,Canceled FO cancels PO,18,Valid
65-partial_shipped_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/location_dest_id/usage']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['standard', 'packing']), ('state', '=', 'done'), ('already_shipped', '=', True), ('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.partial_shipped_fo_updates_in_po,stock.picking,Partial shipped at Coordo updates IN at Project,19,Valid
66-moves_from_dpo_closed_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'subtype', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/dpo_line_id']","['&', '&', '&', ('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['picking', 'standard']), ('dpo_out', '=', True)]",partner_id,MISSION,stock.picking.partial_shippped_dpo_updates_in_po,stock.picking,Moves from DPO closed at Coordo updates IN at Project,20,Valid
67+partial_shipped_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id','move_lines/prodlot_id/name','move_lines/prodlot_id/life_date', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/location_dest_id/usage']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['standard', 'packing']), ('state', '=', 'done'), ('already_shipped', '=', True), ('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.partial_shipped_fo_updates_in_po,stock.picking,Partial shipped at Coordo updates IN at Project,19,Valid
68+moves_from_dpo_closed_coordo_updates_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'subtype', 'partner_type_stock_picking', 'shipment_id/name', 'min_date', 'note', 'move_lines/processed_stock_move', 'move_lines/id', 'move_lines/state','move_lines/original_qty_partial', 'move_lines/line_number', 'move_lines/name', 'move_lines/change_reason', 'move_lines/product_id/id', 'move_lines/product_id/name', 'move_lines/product_qty', 'move_lines/prodlot_id/id','move_lines/prodlot_id/name','move_lines/prodlot_id/life_date', 'move_lines/expired_date', 'move_lines/asset_id/id','move_lines/product_uom/id', 'move_lines/product_uom/name', 'move_lines/date', 'move_lines/date_expected', 'move_lines/note', 'move_lines/dpo_line_id']","['&', '&', '&', ('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'), ('subtype', 'in', ['picking', 'standard']), ('dpo_out', '=', True)]",partner_id,MISSION,stock.picking.partial_shippped_dpo_updates_in_po,stock.picking,Moves from DPO closed at Coordo updates IN at Project,20,Valid
69 dpo_service_lines_update_in_at_project,TRUE,TRUE,"['order_id/name', 'order_id/delivery_confirmed_date', 'fake_id', 'origin', 'confirmed_delivery_date', 'name', 'product_uom/id', 'product_uom/name', 'link_sol_id/line_number', 'notes', 'product_qty', 'product_id/name', 'product_id/id']","[('dest_partner_id.partner_type', '=', 'internal'), ('order_id.order_type', '=', 'direct'), ('order_id.state', 'in', ['approved', 'done']), ('product_id.type', 'in', ['service', 'service_recep'])]",dest_partner_id,MISSION,purchase.order.line.confirmed_dpo_service_lines_update_in_po,purchase.order.line,DPO service lines update IN at Project,21,Valid
70 cancel_out_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'standard'),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled OUT at Coordo cancels IN at Project,22,Valid
71 cancel_pick_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('subtype', '=', 'picking'),('backorder_id', '=', False),('do_not_sync', '=', False)]",partner_id,MISSION,stock.picking.cancel_out_pick_cancel_in,stock.picking,Canceled PICK at Coordo cancels IN at Project,23,Valid
72 cancel_stock_move_at_coordo_cancels_in_at_project,TRUE,TRUE,"['name', 'state', 'origin', 'date_cancel']","['&','&','&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('type', '=', 'out'),('state', '=', 'cancel'),('picking_id.state', '=', 'done'),('picking_id.do_not_sync', '=', False),('to_be_sent', '=', True), '&', '|', ('picking_id.subtype', '=', 'picking'), ('picking_id.subtype', '=', 'standard'), ('picking_id.already_shipped', '=', False)]",partner_id,MISSION,stock.picking.cancel_stock_move_of_pick_cancel_in,stock.move,Canceled stock move cancels IN,24,Valid
73 closed_in_validates_delivery_out_ship,TRUE,TRUE,"['name', 'state', 'shipment_ref']","['&','&','&','&','&',('partner_type_stock_picking', '!=', 'external'), ('type', '=', 'in'),('subtype', 'in', ['standard']), ('state', '=', 'done'), ('shipment_ref', '!=', False), ('dpo_incoming', '=', False)]",partner_id,MISSION,stock.picking.closed_in_validates_delivery_out_ship,stock.picking,Closed IN validates delivery of OUT-SHIP,26,Valid
74 closed_in_confirms_dpo_reception,TRUE,TRUE,"['name', 'state', 'dpo_line_id']","['&','&','&','&',('picking_id.partner_type_stock_picking', '!=', 'external'), ('picking_id.type', '=', 'in'), ('picking_id.subtype', 'in', ['standard']), ('state', '=', 'done'), ('dpo_line_id', '!=', 0)]",partner_id,MISSION,stock.picking.closed_in_confirms_dpo_reception,stock.move,Closed IN confirms DPO reception,27,Valid
75-create_batch_object,TRUE,TRUE,"['name', 'xmlid_name', 'prefix', 'product_id/id', 'partner_id/id', 'date', 'ref','life_date','sequence_id','type']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_batch_number,stock.production.lot,Create Batch Object,1001,Valid
76+create_batch_object,FALSE,TRUE,"['name', 'xmlid_name', 'prefix', 'product_id/id', 'partner_id/id', 'date', 'ref','life_date','sequence_id','type']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_batch_number,stock.production.lot,Create Batch Object,1001,Valid
77 create_asset_object,TRUE,TRUE,"['name', 'xmlid_name', 'arrival_date', 'asset_type_id/id', 'partner_id/id', 'brand', 'comment', 'description', 'hq_ref', 'international_po', 'invo_certif_depreciation', 'invo_currency/id', 'invo_date', 'invo_donator_code', 'invo_num', 'invo_supplier', 'invo_value', 'local_ref', 'model', 'orig_mission_code', 'product_id/id', 'project_po', 'receipt_place', 'serial_nb', 'type', 'year']","[('name', '=', False)]",partner_id,MISSION,stock.picking.create_asset,product.asset,Create Asset Object,1002,Valid
78 reset_ref_by_recovery_mode,TRUE,TRUE,['name'],"[('name', '=', False)]",partner_id,MISSION,sale.order.reset_ref_by_recovery_mode,sale.order,Reset Due to Recovery,1003,Valid
79 USB_replicate_po,TRUE,TRUE,"['name', 'analytic_distribution_id/id', 'partner_id/id','pricelist_id/id','delivery_requested_date','details','notes', 'origin', 'categ', 'order_type', 'priority', 'loan_duration', 'is_a_counterpart', 'cross_docking_ok', 'order_line/product_id/id', 'order_line/product_id/name','order_line/id', 'order_line/name', 'order_line/product_qty', 'order_line/product_uom', 'order_line/price_unit', 'order_line/analytic_distribution_id/id','order_line/comment','order_line/have_analytic_distribution_from_header','order_line/line_number', 'order_line/nomen_manda_0/id','order_line/nomen_manda_1/id','order_line/nomen_manda_2/id','order_line/nomen_manda_3/id', 'order_line/sync_order_line_db_id', 'order_line/nomenclature_description','order_line/notes','order_line/default_name','order_line/default_code','order_line/is_line_split','order_line/date_planned','order_line/procurement_id/id']","[('state','in',['approved', 'done'])]",partner_id,USB,purchase.order.usb_replicate_po,purchase.order,USB_replicate_po,2000,Valid
80
81=== modified file 'bin/addons/product_expiry/product_expiry.py'
82--- bin/addons/product_expiry/product_expiry.py 2011-01-14 00:11:01 +0000
83+++ bin/addons/product_expiry/product_expiry.py 2016-05-23 07:50:39 +0000
84@@ -21,9 +21,12 @@
85 import datetime
86 from osv import fields, osv
87 import pooler
88+import logging
89+
90
91 class stock_production_lot(osv.osv):
92 _inherit = 'stock.production.lot'
93+ _logger = logging.getLogger('------US-838: Migrate duplicate BNs')
94
95 def _get_date(dtype):
96 """Return a function to compute the limit date for this type"""
97@@ -54,6 +57,10 @@
98 }
99 # Assign dates according to products data
100 def create(self, cr, uid, vals, context=None):
101+
102+ if self.violate_ed_unique(cr, uid, False, vals, context):
103+ raise osv.except_osv('Error', 'An expiry date with same date for this product exists already!.')
104+
105 newid = super(stock_production_lot, self).create(cr, uid, vals, context=context)
106 obj = self.browse(cr, uid, newid, context=context)
107 towrite = []
108@@ -66,6 +73,175 @@
109 self.write(cr, uid, [obj.id], self.default_get(cr, uid, towrite, context=context))
110 return newid
111
112+ # US-838: this method is to check if the expiry date values are valid
113+ def violate_ed_unique(self, cr, uid, ids, vals, context):
114+ if not('product_id' in vals and 'life_date' in vals):
115+ return False
116+
117+ prod_obj = self.pool.get('product.product')
118+ prod = prod_obj.browse(cr, uid, vals['product_id'], context=context)
119+
120+ # In case it's a EP only product, then search for date and product, no need to search for batch name
121+ if prod.perishable and not prod.batch_management:
122+ search_arg = [('life_date', '=', vals['life_date']), ('type', '=', 'internal'), ('product_id', '=', prod.id)]
123+
124+ if ids: # in case it's a write call, then exclude the current ids
125+ search_arg.append(('id', 'not in', ids))
126+
127+ lot_ids = self.search(cr, uid, search_arg, context=context)
128+ if lot_ids:
129+ return True
130+ return False
131+
132+ def write(self, cr, uid, ids, vals, context=None):
133+ '''
134+ force writing of expired_date which is readonly for batch management products
135+ '''
136+ if context is None:
137+ context = {}
138+ if isinstance(ids, (int, long)):
139+ ids = [ids]
140+ # US-838: Check if the values are in conflict with the existing data
141+ if self.violate_ed_unique(cr, uid, ids, vals, context):
142+ raise osv.except_osv('Error', 'An expiry date with same date for this product exists already!')
143+
144+ return super(stock_production_lot, self).write(cr, uid, ids, vals, context=context)
145+
146+ #US-838: migrate all the duplicated batch into single batch
147+ '''
148+
149+ US-838: The 3 following methods will be moved to the patch call, it is called only when a patch is applied.
150+ Check the steps to be executed in the description, but basically it will migrate the references to the wrong BN in relevant objects
151+ to the lead BN, then delete these wrong BNs, and finally redefine the unique constraint on the table BN
152+
153+ method to move: migrate_dup_batch, remap_reference_tables, update_table
154+
155+ '''
156+ def us_838_migrate_dup_batch(self, cr, uid, *a, **b):
157+ '''
158+ Step to do:
159+
160+ 1. Search list of dup batches, that have the same name + product + xmlname values.
161+ 2. Go through this list, for each element do the following:
162+ 2.1. Get the 2 batch id of the same name, order by id ---> the smaller id will be kept, the other will be set as inactive
163+ 2.2. Search all tables that refer to the bigger_id, then map them to the smaller_id
164+ 2.3. Set the non-lead batches to become inactive
165+ 2.4. Update ir_model_data
166+ 3. Modify the unique constraint to be prod + BN + ED, and no more partner_name involved, because we will not use partner_name anymore
167+
168+ 4. For the messages in the pipeline ---> treated in sync message
169+
170+ '''
171+
172+ cr.execute('''select id, name from stock_production_lot where name in
173+ (select name from (select name, product_id, count(name) as amount_bn from stock_production_lot group by name, product_id, life_date) as foo_bn where amount_bn>1) order by name, id;''')
174+ all_dup_batches = cr.dictfetchall()
175+ self._logger.info("__________Start to migrate duplicate batch objects in instance: %s - with total of %s duplicate batches!" % (cr.dbname, len(all_dup_batches)))
176+
177+ context = {}
178+
179+ lead_id = 0 # This id will be used as the main batch id
180+ to_be_deleted = []
181+ same_name = None
182+ for r in all_dup_batches:
183+ if lead_id == 0:
184+ same_name = r['name']
185+ lead_id = r['id']
186+ else:
187+ if same_name == r['name']: # same batch --> replace in all table to the lead_id
188+ # Do step 2.2, search the following tables to replace the link to the
189+ self.remap_reference_tables(cr, uid, r['id'], lead_id, same_name, context)
190+
191+ # 2.3: Add this wrong batch id into the list, then delete them at the end
192+ to_be_deleted.append(r['id'])
193+ else:
194+ lead_id = r['id'] # when the name change --> replace by the new lead_id
195+ same_name = r['name']
196+
197+ # 2.3 call to delete all the wrong batch objects
198+ if to_be_deleted:
199+ self._logger.info("Delete duplicate batch objects (%s batches - keep only the lead batch)" % len(to_be_deleted))
200+ self.unlink(cr, uid, to_be_deleted, context=context)
201+ else:
202+ self._logger.info("No duplicate batch found for this instance %s.", cr.dbname)
203+
204+ self._logger.info("Last step: update the unique constraint for the table stock_production_lot.")
205+ # 3. Now alter the constraint unique of this table: first drop the current constraint, then create a new one with name+prod+life_date
206+ cr.execute('''ALTER TABLE stock_production_lot DROP CONSTRAINT stock_production_lot_batch_name_uniq,
207+ ADD CONSTRAINT stock_production_lot_batch_name_uniq UNIQUE (name, product_id, life_date);''')
208+
209+ self._logger.info("__________Finish the migration task on duplicate batch objects for instance: %s", cr.dbname)
210+ return True
211+
212+ def remap_reference_tables(self, cr, uid, wrong_id, lead_id, batch_name, context=None):
213+ '''
214+ -- with fkey = prodlot_id (total=13)
215+ TABLE "create_picking_move_processor" CONSTRAINT "create_picking_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
216+ TABLE "export_report_stock_inventory" CONSTRAINT "export_report_stock_inventory_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
217+ TABLE "export_report_stock_move" CONSTRAINT "export_report_stock_move_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
218+ TABLE "internal_move_processor" CONSTRAINT "internal_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
219+ TABLE "outgoing_delivery_move_processor" CONSTRAINT "outgoing_delivery_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
220+ TABLE "ppl_move_processor" CONSTRAINT "ppl_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
221+ TABLE "real_average_consumption_line" CONSTRAINT "real_average_consumption_line_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
222+ TABLE "return_ppl_move_processor" CONSTRAINT "return_ppl_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
223+ TABLE "stock_move_in_processor" CONSTRAINT "stock_move_in_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
224+ TABLE "stock_move_processor" CONSTRAINT "stock_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
225+ TABLE "stock_move" CONSTRAINT "stock_move_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
226+ TABLE "unconsistent_stock_report_line" CONSTRAINT "unconsistent_stock_report_line_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE CASCADE
227+ TABLE "validate_move_processor" CONSTRAINT "validate_move_processor_prodlot_id_fkey" FOREIGN KEY (prodlot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
228+
229+ -- with fkey = lot_id (2)
230+ TABLE "stock_production_lot_revision" CONSTRAINT "stock_production_lot_revision_lot_id_fkey" FOREIGN KEY (lot_id) REFERENCES stock_production_lot(id) ON DELETE CASCADE
231+ TABLE "product_likely_expire_report_item_line" CONSTRAINT "product_likely_expire_report_item_line_lot_id_fkey" FOREIGN KEY (lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
232+
233+ -- with fkey = prod_lot_id (2)
234+ TABLE "stock_inventory_line" CONSTRAINT "stock_inventory_line_prod_lot_id_fkey" FOREIGN KEY (prod_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
235+ TABLE "initial_stock_inventory_line" CONSTRAINT "initial_stock_inventory_line_prod_lot_id_fkey" FOREIGN KEY (prod_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
236+
237+ -- with fkey = no common name (3)
238+ TABLE "claim_product_line" CONSTRAINT "claim_product_line_lot_id_claim_product_line_fkey" FOREIGN KEY (lot_id_claim_product_line) REFERENCES stock_production_lot(id) ON DELETE SET NULL
239+ TABLE "composition_kit" CONSTRAINT "composition_kit_composition_lot_id_fkey" FOREIGN KEY (composition_lot_id) REFERENCES stock_production_lot(id) ON DELETE SET NULL
240+ TABLE "wizard_import_in_line_simulation_screen" CONSTRAINT "wizard_import_in_line_simulation_screen_imp_batch_id_fkey" FOREIGN KEY (imp_batch_id) REFERENCES stock_production_lot(id) ON DELETE SET
241+ '''
242+ # Tables with foreign key prodlot_id (total 13 tables)
243+ self._logger.info("__ Migrating batch number: %s", batch_name)
244+ list_table_fields = [
245+ ('create_picking_move_processor', 'prodlot_id'),
246+ ('export_report_stock_inventory', 'prodlot_id'),
247+ ('export_report_stock_move', 'prodlot_id'),
248+ ('internal_move_processor', 'prodlot_id'),
249+ ('outgoing_delivery_move_processor', 'prodlot_id'),
250+ ('ppl_move_processor', 'prodlot_id'),
251+ ('real_average_consumption_line', 'prodlot_id'),
252+ ('return_ppl_move_processor', 'prodlot_id'),
253+ ('stock_move_in_processor', 'prodlot_id'),
254+ ('stock_move_processor', 'prodlot_id'),
255+ ('stock_move', 'prodlot_id'),
256+ ('unconsistent_stock_report_line', 'prodlot_id'),
257+ ('validate_move_processor', 'prodlot_id'),
258+ ('stock_production_lot_revision', 'lot_id'),
259+ ('product_likely_expire_report_item_line', 'lot_id'),
260+ ('stock_inventory_line', 'prod_lot_id'),
261+ ('initial_stock_inventory_line', 'prod_lot_id'),
262+ ('claim_product_line', 'lot_id_claim_product_line'),
263+ ('composition_kit', 'composition_lot_id'),
264+ ('wizard_import_in_line_simulation_screen', 'imp_batch_id')
265+ ]
266+ for element in list_table_fields:
267+ # Tables with foreign key prod_lot_id (total 2)
268+ self.update_table(cr, uid, element[0] , element[1], wrong_id, lead_id, batch_name)
269+
270+
271+ def update_table(self, cr, uid, table_name, field_id, wrong_id, lead_id, batch_name):
272+ cr.execute('select count(*) as amount from ' + table_name + ' where ' + field_id + ' = %s;' %(wrong_id,))
273+ count = cr.fetchone()[0]
274+ if count > 0: # Only update the table if wrong bn exists
275+ self._logger.info("Table %s has %s batch objects (%s) and will be-mapped." %(table_name, count, batch_name,))
276+ sql_update = "update " + table_name + " set " + field_id + "=" + str(lead_id) + " where " + field_id + "=" + str(wrong_id)
277+ cr.execute(sql_update)
278+ else:
279+ self._logger.info("Table %s has NO duplicate batch (%s)." %(table_name, batch_name,))
280+
281 _defaults = {
282 'life_date': _get_date('life_time'),
283 'use_date': _get_date('use_time'),
284
285=== modified file 'bin/addons/specific_rules/specific_rules.py'
286--- bin/addons/specific_rules/specific_rules.py 2016-04-25 09:24:04 +0000
287+++ bin/addons/specific_rules/specific_rules.py 2016-05-23 07:50:39 +0000
288@@ -1365,7 +1365,7 @@
289 if not batch.delete_ok:
290 raise osv.except_osv(_('Error'), _('You cannot remove a batch number which has stock !'))
291
292- return super(stock_production_lot, self).unlink(cr, uid, batch.id, context=context)
293+ return super(stock_production_lot, self).unlink(cr, uid, ids, context=context)
294
295
296 stock_production_lot()
297
298=== modified file 'bin/addons/specific_rules/specific_rules_view.xml'
299--- bin/addons/specific_rules/specific_rules_view.xml 2016-03-17 08:30:04 +0000
300+++ bin/addons/specific_rules/specific_rules_view.xml 2016-05-23 07:50:39 +0000
301@@ -107,7 +107,6 @@
302 <attribute name="colors">red:life_date &lt; current_date</attribute>
303 </xpath>
304 <field name="ref" position="replace">
305- <field name="partner_name"/>
306 <field name="type" />
307 <field name="life_date" />
308 </field>
309@@ -198,7 +197,6 @@
310
311 <field name="name" position="replace">
312 <field name="name" colspan="2" attrs="{'readonly': [('type', '=', 'internal')]}" />
313- <field name="partner_name" colspan="2" readonly='1'/>
314 </field>
315
316 <field name="date" position="replace">
317
318=== modified file 'bin/addons/stock_batch_recall/product_expiry.py'
319--- bin/addons/stock_batch_recall/product_expiry.py 2014-10-07 12:45:45 +0000
320+++ bin/addons/stock_batch_recall/product_expiry.py 2016-05-23 07:50:39 +0000
321@@ -67,29 +67,59 @@
322 return super(stock_production_lot, self).copy_data(cr, uid, id, default, context=context)
323
324 # UF-1617: Handle the instance in the batch number object
325- def create(self, cr, uid, vals, context=None):
326+ # US-838: this method is removed in integration, because the 2 fields are no more used, xmlid_name and partner name
327+
328+
329+# def create(self, cr, uid, vals, context=None):
330 '''
331 override create method to set the instance id to the current instance if it has not been provided
332- '''
333+
334+ # UF-2148: make the xmlid_name from batch name for building xmlid if the value is not given in vals
335+ if 'product_id' in vals and ('xmlid_name' not in vals or not vals['xmlid_name']):
336+ prod_name = self.pool.get('product.product').browse(cr, uid, vals['product_id'], context=context)
337+ #US-838: xmlid_name now takes product code, batch name and expiry date as value, this value must be unique (soft constraint)
338+ vals['xmlid_name'] = '%s_%s_%s' % (prod_name.default_code, vals['name'], vals['life_date'])
339+
340 if 'partner_name' not in vals or not vals['partner_name']:
341 company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
342 if company and company.partner_id:
343 vals['partner_name'] = company.partner_id.name
344-
345-
346- # UF-2148: make the xmlid_name from batch name for building xmlid if the value is not given in vals
347- if 'product_id' in vals and ('xmlid_name' not in vals or not vals['xmlid_name']):
348- prod_name = self.pool.get('product.product').browse(cr, uid, vals['product_id'], context=context)
349- vals['xmlid_name'] = '%s_%s' % (prod_name.default_code, vals['name'])
350-
351- if 'xmlid_name' in vals:
352- exist = self.search(cr, uid, [('xmlid_name', '=', vals['xmlid_name']), ('partner_name', '=', vals['partner_name']), ('product_id', '=', vals['product_id'])], context=context)
353- if exist:
354- # but if the value exist for xmlid_name, then add a suffix to differentiate, no constraint unique required here
355- vals['xmlid_name'] = vals['xmlid_name'] + "_1"
356
357 return super(stock_production_lot, self).create(cr, uid, vals, context)
358
359+ '''
360+
361+ # US-838: This method got moved from addons/msf_outgoing/wizard/incoming_shipment_processor.py
362+ def _get_prodlot_from_expiry_date(self, cr, uid, expiry_date, product_id, context=None):
363+ """
364+ Search if an internal batch exists in the system with this expiry date.
365+ If no, create the batch.
366+ """
367+ # Objects
368+ seq_obj = self.pool.get('ir.sequence')
369+
370+ # Double check to find the corresponding batch
371+ lot_ids = self.search(cr, uid, [
372+ ('life_date', '=', expiry_date),
373+ ('type', '=', 'internal'),
374+ ('product_id', '=', product_id),
375+ ], context=context)
376+
377+ # No batch found, create a new one
378+ if not lot_ids:
379+ seq_ed = seq_obj.get(cr, uid, 'stock.lot.serial')
380+ vals = {
381+ 'product_id': product_id,
382+ 'life_date': expiry_date,
383+ 'name': seq_ed,
384+ 'type': 'internal',
385+ }
386+ lot_id = self.create(cr, uid, vals, context)
387+ else:
388+ lot_id = lot_ids[0]
389+
390+ return lot_id
391+
392 _columns = {
393 # renamed from End of Life Date
394 'life_date': fields.date('Expiry Date',
395@@ -102,8 +132,8 @@
396
397 # UF-1617: field only used for sync purpose
398 'partner_id': fields.many2one('res.partner', string="Supplier", readonly=True, required=False),
399- 'partner_name': fields.char('Partner', size=128, required=True),
400- 'xmlid_name': fields.char('XML Code, hidden field', size=128, required=True), # UF-2148, this field is used only for xml_id
401+ 'partner_name': fields.char('Partner', size=128),
402+ 'xmlid_name': fields.char('XML Code, hidden field', size=128), # UF-2148, this field is used only for xml_id
403 }
404
405 _defaults = {
406
407=== modified file 'bin/addons/sync_so/picking.py'
408--- bin/addons/sync_so/picking.py 2016-01-22 14:29:49 +0000
409+++ bin/addons/sync_so/picking.py 2016-05-23 07:50:39 +0000
410@@ -97,7 +97,7 @@
411
412 # product
413 product_name = data['product_id']['name']
414- product_id = self.pool.get('product.product').find_sd_ref(cr, uid, xmlid_to_sdref(data['product_id']['id']), context=context)
415+ product_id = prod_obj.find_sd_ref(cr, uid, xmlid_to_sdref(data['product_id']['id']), context=context)
416 if not product_id:
417 product_ids = prod_obj.search(cr, uid, [('name', '=', product_name)], context=context)
418 if not product_ids:
419@@ -112,15 +112,54 @@
420 # uom
421 uom_id = uom_obj.find_sd_ref(cr, uid, xmlid_to_sdref(data['product_uom']['id']), context=context)
422 if not uom_id:
423- raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_name
424+ raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_id
425
426 # UF-1617: Handle batch and asset object
427 batch_id = False
428- if data['prodlot_id']:
429- batch_id = self.pool.get('stock.production.lot').find_sd_ref(cr, uid, xmlid_to_sdref(data['prodlot_id']['id']), context=context)
430+ batch_values = data['prodlot_id']
431+ if batch_values and product_id:
432+ # us-838: WORK IN PROGRESS ..................................
433+ # US-838: check first if this product is EP-only? if yes, treat differently, here we treat only for BN
434+ prodlot_obj = self.pool.get('stock.production.lot')
435+ prod = prod_obj.browse(cr, uid,product_id,context=context)
436+
437+ '''
438+ US-838: The following block is for treating the sync message in pipeline!
439+ If the sync message was made with old message rule, then in the message it contains ONLY the xmlid of the batch, NO life_date.
440+ For this case, we have to retrieve the batch name from this xmlid, by using the double product_code in the search.
441+ From this batch name + product_id, we can find the batch object in the system. There should only be one batch name for the same product
442+ since the migration has already done, which merged all dup batch name into one.
443+
444+ The old sync message has the following xmlid format: sd.batch_numer_se_HQ1C1_DORADIDA15T_DORADIDA15T_MSFBN/000005
445+ '''
446+ xmlid = batch_values['id']
447+ if 'life_date' not in batch_values and 'batch_numer' in xmlid: # it must have the 'batch_numer' as prefix
448+ prod_code = "_" + prod.default_code + "_" + prod.default_code + "_" # This is how the old xmlid has been made: using double prod.default_code
449+ indexOfProdCode = xmlid.find(prod_code) + len(prod_code)
450+ batch_name = xmlid[indexOfProdCode:]
451+ existing_bn = prodlot_obj.search(cr, uid, [('name', '=', batch_name), ('product_id', '=', product_id)], context=context)
452+ if existing_bn:
453+ batch_id = existing_bn[0]
454+ else:
455+ if prod.perishable and not prod.batch_management:
456+ # In case it's a EP only product, then search for date and product, no need to search for batch name
457+ if 'life_date' in batch_values:
458+ # If name exists in the sync message, search by name and product, not by xmlid
459+ life_date = batch_values['life_date']
460+ # US-838: use different way to retrieve the EP object
461+ batch_id = prodlot_obj._get_prodlot_from_expiry_date(cr, uid, life_date, product_id, context=context)
462+ if not batch_id:
463+ raise Exception, "Error while retrieving or creating the expiry date %s for the product %s" % (batch_values, prod.name)
464+ else:
465+ # US-838: for BN, retrieve it or create it, in the follwing method
466+ batch_id, msg = self.retrieve_batch_number(cr, uid, product_id, batch_values, context) # return False if the batch object is not found, or cannot be created
467+
468+ ################## TODO: Treat the case for Remote Warehouse: WORK IN PROGRESS BELOW!!!!!!!!!!
469+
470+
471 if not batch_id:
472- raise Exception, "Batch Number %s not found for this sync data record" % data['prodlot_id']
473-
474+ raise Exception, "Batch Number %s not found for this sync data record" % batch_values
475+
476 expired_date = data['expired_date']
477
478 # UTP-872: Add also the state into the move line, but if it is done, then change it to assigned (available)
479@@ -697,10 +736,12 @@
480 return message
481
482
483+ #US-838: This method is no more use, the message will do nothing.
484 def create_batch_number(self, cr, uid, source, out_info, context=None):
485 if not context:
486 context = {}
487- self._logger.info("+++ Create batch number that comes with the SHIP/OUT from %s" % source)
488+ self._logger.info("+++ Create batch number that comes with the SHIP/OUT from %s - This message is deprecated." % source)
489+
490 so_po_common = self.pool.get('so.po.common')
491 batch_obj = self.pool.get('stock.production.lot')
492
493@@ -733,6 +774,32 @@
494 self._logger.info(message)
495 return message
496
497+ # US-838: Retrieve batch object, if not found then create new
498+ def retrieve_batch_number(self, cr, uid, product_id, batch_dict, context=None):
499+ if not context:
500+ context = {}
501+ #self._logger.info("+++ Retrieve batch number for the SHIP/OUT from %s")
502+ so_po_common = self.pool.get('so.po.common')
503+ batch_obj = self.pool.get('stock.production.lot')
504+ prod_obj = self.pool.get('product.product')
505+
506+ if not ('name' in batch_dict and 'life_date' in batch_dict):
507+ # Search for the batch object with the given data
508+ return False, "Batch Number: Missing batch name or expiry date!"
509+
510+ existing_bn = batch_obj.search(cr, uid, [('name', '=', batch_dict['name']), ('product_id', '=', product_id),
511+ ('life_date', '=', batch_dict['life_date'])], context=context)
512+ if existing_bn: # existed already, then don't need to create a new one
513+ message = "Batch object exists in the current system. No new batch created."
514+ self._logger.info(message)
515+ return existing_bn[0], message
516+
517+ # If not exists, then create this new batch object
518+ new_bn_vals = {'name': batch_dict['name'], 'product_id': product_id, 'life_date': batch_dict['life_date']}
519+ message = "The new BN " + batch_dict['name'] + " has been created"
520+ self._logger.info(message)
521+ bn_id = batch_obj.create(cr, uid, new_bn_vals, context=context)
522+ return bn_id, message
523
524 def create_asset(self, cr, uid, source, out_info, context=None):
525 if not context:
526@@ -863,8 +930,10 @@
527
528
529 # for each new batch number object and for each partner, create messages and put into the queue for sending on next sync round
530- for item in list_batch:
531- so_po_common.create_message_with_object_and_partner(cr, uid, 1001, item, partner.name, context)
532+
533+ #US-838: THIS METHOD IS NO MORE USED, AS THE BN OBJECT WILL NOT BE SENT EXPLICITLY, BUT TOGETHER WITH THE MESSAGE!
534+ #for item in list_batch:
535+ # so_po_common.create_message_with_object_and_partner(cr, uid, 1001, item, partner.name, context)
536
537 # for each new asset object and for each partner, create messages and put into the queue for sending on next sync round
538 for item in list_asset:
539
540=== modified file 'bin/addons/sync_so/picking_rw.py'
541--- bin/addons/sync_so/picking_rw.py 2016-04-25 15:54:40 +0000
542+++ bin/addons/sync_so/picking_rw.py 2016-05-23 07:50:39 +0000
543@@ -588,7 +588,7 @@
544 raise Exception, "The corresponding uom does not exist here. Uom name: %s" % uom_name
545 uom_id = uom_ids[0]
546
547-
548+ # US-838: RW, need to check the new mechanism of the BN and ED object!!!!!!!
549 batch_id = False
550 if data['prodlot_id']:
551 batch_id = self.pool.get('stock.production.lot').find_sd_ref(cr, uid, xmlid_to_sdref(data['prodlot_id']['id']), context=context)
552
553=== modified file 'bin/addons/sync_so/specific_xml_id.py'
554--- bin/addons/sync_so/specific_xml_id.py 2016-01-25 10:53:13 +0000
555+++ bin/addons/sync_so/specific_xml_id.py 2016-05-23 07:50:39 +0000
556@@ -787,6 +787,10 @@
557
558 product_asset()
559
560+
561+'''
562+
563+US-838: xmlid of batch number is no more used --- REMOVE THIS BLOCK OF CODE WHEN INTEGRATE THE TICKET!
564 class batch_number(osv.osv):
565 _inherit = "stock.production.lot"
566
567@@ -794,10 +798,12 @@
568 def get_unique_xml_name(self, cr, uid, uuid, table_name, res_id):
569 batch = self.browse(cr, uid, res_id)
570 #UF-2148: use the xmlid_name for building the xml for this object
571- return get_valid_xml_name('batch_numer', (batch.partner_name or 'no_partner'), (batch.product_id.code or 'noprod'), (batch.xmlid_name or 'noname'))
572+ return get_valid_xml_name('batch_number', (batch.partner_name or 'no_partner'), (batch.product_id.code or 'noprod'), (batch.xmlid_name or 'noname'))
573
574 batch_number()
575
576+'''
577+
578 class ir_model_access(osv.osv):
579 """
580 UF-2146 To allow synchronisation of ir.model.access, must have same sd ref across all instances

Subscribers

People subscribed via source and target branches

to all changes: