Merge lp:~unifield-team/unifield-server/qt-us-2292 into lp:unifield-server

Proposed by Quentin THEURET @Amaris
Status: Merged
Merged at revision: 4263
Proposed branch: lp:~unifield-team/unifield-server/qt-us-2292
Merge into: lp:unifield-server
Diff against target: 116 lines (+37/-16)
2 files modified
bin/addons/consumption_calculation/consumption_calculation.py (+10/-7)
bin/addons/consumption_calculation/history_consumption.py (+27/-9)
To merge this branch: bzr merge lp:~unifield-team/unifield-server/qt-us-2292
Reviewer Review Type Date Requested Status
Jeff Allen Pending
Review via email: mp+318072@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'bin/addons/consumption_calculation/consumption_calculation.py'
--- bin/addons/consumption_calculation/consumption_calculation.py 2017-01-31 15:03:10 +0000
+++ bin/addons/consumption_calculation/consumption_calculation.py 2017-02-23 08:12:35 +0000
@@ -1654,13 +1654,16 @@
1654 for id in ids:1654 for id in ids:
1655 res[id] = 0.001655 res[id] = 0.00
1656 if from_date and to_date:1656 if from_date and to_date:
1657 rcr_domain = ['&', '&', ('product_id', '=', id), ('rac_id.cons_location_id', 'in', location_ids),1657 rac_ids = self.pool.get('real.average.consumption').search(cr, uid, [
1658 # All lines with a report started out the period and finished in the period 1658 ('cons_location_id', 'in', location_ids),
1659 '|', '&', ('rac_id.period_to', '>=', from_date), ('rac_id.period_to', '<=', to_date),1659 # All lines with a report started out the period and finished in the period
1660 # All lines with a report started in the period and finished out the period 1660 '|', '&', ('period_to', '>=', from_date), ('period_to', '<=', to_date),
1661 '|', '&', ('rac_id.period_from', '<=', to_date), ('rac_id.period_from', '>=', from_date),1661 #  All lines with a report started in the period and finished out the period
1662 # All lines with a report started before the period and finished after the period1662 '|', '&', ('period_from', '<=', to_date), ('period_from', '>=', from_date),
1663 '&', ('rac_id.period_from', '<=', from_date), ('rac_id.period_to', '>=', to_date)]1663 #  All lines with a report started before the period and finished after the period
1664 '&', ('period_from', '<=', from_date), ('period_to', '>=', to_date)
1665 ])
1666 rcr_domain = [('product_id', '=', id), ('rac_id', 'in', rac_ids)]
16641667
1665 rcr_line_ids = self.pool.get('real.average.consumption.line').search(cr, uid, rcr_domain, context=context)1668 rcr_line_ids = self.pool.get('real.average.consumption.line').search(cr, uid, rcr_domain, context=context)
1666 for line in self.pool.get('real.average.consumption.line').browse(cr, uid, rcr_line_ids, context=context):1669 for line in self.pool.get('real.average.consumption.line').browse(cr, uid, rcr_line_ids, context=context):
16671670
=== modified file 'bin/addons/consumption_calculation/history_consumption.py'
--- bin/addons/consumption_calculation/history_consumption.py 2016-10-12 10:07:50 +0000
+++ bin/addons/consumption_calculation/history_consumption.py 2017-02-23 08:12:35 +0000
@@ -148,8 +148,8 @@
148 'location_id',148 'location_id',
149 'id',149 'id',
150 'nomen_manda_0',150 'nomen_manda_0',
151 'sublist_id']151 'sublist_id'],
152 , context=context)152 context=context)
153 product_ids = []153 product_ids = []
154154
155 # Update the locations in context155 # Update the locations in context
@@ -242,6 +242,23 @@
242 import pooler242 import pooler
243 new_cr = pooler.get_db(cr.dbname).cursor()243 new_cr = pooler.get_db(cr.dbname).cursor()
244244
245 res = self.browse(cr, uid, ids[0], context=context)
246 if res.consumption_type == 'rac':
247 cr.execute('''
248 SELECT distinct(product_id)
249 FROM real_average_consumption_line
250 WHERE move_id IS NOT NULL
251 ''')
252 else:
253 cr.execute('''
254 SELECT distinct(product_id)
255 FROM stock_move
256 WHERE state = 'done'
257 AND
258 (location_id IN %s OR location_dest_id IN %s)
259 ''', (tuple(context.get('location_id', [])), tuple(context.get('location_id', []))))
260 product_ids = [x[0] for x in cr.fetchall()]
261
245 # split ids into slices to not read a lot record in the same time (memory)262 # split ids into slices to not read a lot record in the same time (memory)
246 ids_len = len(product_ids)263 ids_len = len(product_ids)
247 slice_len = 500264 slice_len = 500
@@ -260,6 +277,7 @@
260 except Exception, e:277 except Exception, e:
261 logging.getLogger('history.consumption').warn('Exception in read average', exc_info=True)278 logging.getLogger('history.consumption').warn('Exception in read average', exc_info=True)
262 new_cr.rollback()279 new_cr.rollback()
280
263 self.write(new_cr, uid, ids, {'status': 'ready'}, context=context)281 self.write(new_cr, uid, ids, {'status': 'ready'}, context=context)
264282
265 new_cr.commit()283 new_cr.commit()
@@ -549,7 +567,7 @@
549 ('consumption_id', '=', obj_id)]567 ('consumption_id', '=', obj_id)]
550 if context.get('amc') == 'AMC':568 if context.get('amc') == 'AMC':
551 cons_prod_domain.append(('cons_type', '=', 'amc'))569 cons_prod_domain.append(('cons_type', '=', 'amc'))
552 cons_id = cons_prod_obj.search(cr, uid, cons_prod_domain, context=context)570 cons_id = cons_prod_obj.search(cr, uid, cons_prod_domain, order='NO_ORDER', limit=1, context=context)
553 if cons_id:571 if cons_id:
554 consumption = cons_prod_obj.browse(cr, uid, cons_id[0], context=context).value572 consumption = cons_prod_obj.browse(cr, uid, cons_id[0], context=context).value
555 else:573 else:
@@ -561,7 +579,7 @@
561 'value': consumption}, context=context)579 'value': consumption}, context=context)
562 else:580 else:
563 cons_prod_domain.append(('cons_type', '=', 'fmc'))581 cons_prod_domain.append(('cons_type', '=', 'fmc'))
564 cons_id = cons_prod_obj.search(cr, uid, cons_prod_domain, context=context)582 cons_id = cons_prod_obj.search(cr, uid, cons_prod_domain, order='NO_ORDER', limit=1, context=context)
565 if cons_id:583 if cons_id:
566 consumption = cons_prod_obj.browse(cr, uid, cons_id[0], context=context).value584 consumption = cons_prod_obj.browse(cr, uid, cons_id[0], context=context).value
567 else:585 else:
@@ -581,7 +599,7 @@
581 ('consumption_id', '=', obj_id),599 ('consumption_id', '=', obj_id),
582 ('cons_type', '=', context.get('amc') == 'AMC' and 'amc' or 'fmc')]600 ('cons_type', '=', context.get('amc') == 'AMC' and 'amc' or 'fmc')]
583 r.update({'average': round(total_consumption/float(len(context.get('months'))),2)})601 r.update({'average': round(total_consumption/float(len(context.get('months'))),2)})
584 cons_id = cons_prod_obj.search(cr, uid, cons_prod_domain, context=context)602 cons_id = cons_prod_obj.search(cr, uid, cons_prod_domain, order='NO_ORDER', limit=1, context=context)
585 if cons_id:603 if cons_id:
586 cons_prod_obj.write(cr, uid, cons_id, {'value': r['average']}, context=context)604 cons_prod_obj.write(cr, uid, cons_id, {'value': r['average']}, context=context)
587 else:605 else:
@@ -664,10 +682,10 @@
664682
665 _columns = {683 _columns = {
666 'consumption_id': fields.many2one('product.history.consumption', string='Consumption id', select=1, ondelete='cascade'),684 'consumption_id': fields.many2one('product.history.consumption', string='Consumption id', select=1, ondelete='cascade'),
667 'product_id': fields.many2one('product.product', string='Product'),685 'product_id': fields.many2one('product.product', string='Product', select=1),
668 'name': fields.char(size=64, string='Name'),686 'name': fields.char(size=64, string='Name', select=1),
669 'value': fields.float(digits=(16,2), string='Value', select=1),687 'value': fields.float(digits=(16,2), string='Value'),
670 'cons_type': fields.selection([('amc', 'AMC'), ('fmc', 'FMC')], string='Consumption type'),688 'cons_type': fields.selection([('amc', 'AMC'), ('fmc', 'FMC')], string='Consumption type', select=1),
671 }689 }
672690
673 def read(self, cr, uid, ids, fields, context=None, load='_classic_read'):691 def read(self, cr, uid, ids, fields, context=None, load='_classic_read'):

Subscribers

People subscribed via source and target branches

to all changes: