Merge lp:~jfb-tempo-consulting/unifield-server/US-5341 into lp:unifield-server

Proposed by jftempo
Status: Merged
Merged at revision: 5315
Proposed branch: lp:~jfb-tempo-consulting/unifield-server/US-5341
Merge into: lp:unifield-server
Diff against target: 1657 lines (+715/-347) (has conflicts)
12 files modified
bin/addons/msf_doc_import/purchase_order.py (+84/-8)
bin/addons/msf_doc_import/wizard/wizard_in_simulation_screen.py (+2/-1)
bin/addons/msf_doc_import/wizard/wizard_po_simulation_screen.py (+185/-82)
bin/addons/msf_doc_import/wizard/wizard_po_simulation_screen_view.xml (+1/-0)
bin/addons/msf_profile/i18n/fr_MF.po (+138/-4)
bin/addons/msf_tools/automated_export.py (+1/-1)
bin/addons/msf_tools/automated_export_job.py (+13/-12)
bin/addons/msf_tools/automated_import.py (+1/-1)
bin/addons/msf_tools/automated_import_job.py (+247/-202)
bin/addons/purchase/purchase_order_line.py (+33/-30)
bin/addons/purchase/purchase_view.xml (+8/-4)
bin/addons/purchase/purchase_workflow.py (+2/-2)
Text conflict in bin/addons/msf_profile/i18n/fr_MF.po
To merge this branch: bzr merge lp:~jfb-tempo-consulting/unifield-server/US-5341
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+366464@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/addons/msf_doc_import/purchase_order.py'
2--- bin/addons/msf_doc_import/purchase_order.py 2018-09-11 12:56:23 +0000
3+++ bin/addons/msf_doc_import/purchase_order.py 2019-04-25 15:27:08 +0000
4@@ -35,6 +35,51 @@
5 import xml.etree.ElementTree as ET
6 from service.web_services import report_spool
7
8+class purchase_order_manual_export(osv.osv_memory):
9+ _name = 'purchase.order.manual.export'
10+
11+ _columns = {
12+ 'purchase_id': fields.many2one('purchase.order', 'Purchase Order'),
13+
14+ }
15+
16+ def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
17+ result = {'fields': {}, 'model': 'purchase.order.manual.export', 'type': 'form'}
18+
19+ msg = _('Manually run export of PO')
20+
21+ if self.pool.get('purchase.order').search_exist(cr, uid, [('id', '=', context.get('purchase_order')), ('auto_exported_ok', '=', True)], context=context):
22+ msg = _('The PO was already exported, do you want to export it again ?')
23+ result['arch'] = '''<form string="%(title)s">
24+ <separator coslpan="4" string="%(msg)s" />
25+ <button special="cancel" string="%(cancel)s" icon="gtk-cancel" colspan="2"/>
26+ <button name="export_po" string="%(ok)s" icon="gtk-ok" colspan="2" type="object"/>
27+ </form>''' % {
28+ 'title': msg,
29+ 'msg': msg,
30+ 'cancel': _('Cancel'),
31+ 'ok': _('OK'),
32+ }
33+ return result
34+
35+ def export_po(self, cr, uid, ids, context=None):
36+ auto_job_ids = self.pool.get('automated.export').search(cr, uid, [('function_id.method_to_call', '=', 'auto_export_validated_purchase_order'), ('active', '=', True)], context=context)
37+ if not auto_job_ids:
38+ raise osv.except_osv(_('Warning'), _('The job to export PO is not active.'))
39+
40+ auto_job = self.pool.get('automated.export').browse(cr, uid, auto_job_ids[0], context=context)
41+ wiz = self.browse(cr, uid, ids[0], context)
42+
43+
44+ processed, rejected, trash = self.pool.get('purchase.order').auto_export_validated_purchase_order(cr, uid, auto_job, [wiz.purchase_id.id], context=context)
45+ if not rejected:
46+ self.log(cr, uid, wiz.purchase_id.id, _('PO %s successfully exported') % wiz.purchase_id.name)
47+ else:
48+ self.log(cr, uid, wiz.purchase_id.id, _('PO %s %d lines rejected') % (wiz.purchase_id.name, len(rejected)))
49+
50+ return {'type': 'ir.actions.act_window_close'}
51+
52+purchase_order_manual_export()
53
54 class purchase_order(osv.osv):
55 _inherit = 'purchase.order'
56@@ -73,6 +118,19 @@
57
58 return res
59
60+
61+ def _can_be_auto_exported(self, cr, uid, ids, field_name, args, context=None):
62+ ret = {}
63+ for id in ids:
64+ ret[id] = False
65+
66+ if not self.pool.get('automated.export').search_exist(cr, uid, [('function_id.method_to_call', '=', 'auto_export_validated_purchase_order'), ('active', '=', True)], context=context):
67+ return ret
68+
69+ for x in self.search(cr, uid, [('id', 'in', ids), ('partner_type', '=', 'esc'), ('state', 'in', ['validated', 'validated_p'])], context=context):
70+ ret[x] = True
71+ return ret
72+
73 _columns = {
74 'import_in_progress': fields.function(
75 _get_import_progress,
76@@ -83,12 +141,26 @@
77 ),
78 'import_filenames': fields.one2many('purchase.order.simu.import.file', 'order_id', string='Imported files', readonly=True),
79 'auto_exported_ok': fields.boolean('PO exported to ESC'),
80+ 'can_be_auto_exported': fields.function(_can_be_auto_exported, method=True, type='boolean', string='Can be auto exported ?'),
81 }
82
83 _defaults = {
84 'import_in_progress': lambda *a: False,
85 }
86
87+ def auto_export_manual(self, cr, uid, ids, context=None):
88+ wiz_id = self.pool.get('purchase.order.manual.export').create(cr, uid, {'purchase_id': ids[0]}, context=context)
89+ ctx = context.copy()
90+ ctx['purchase_order'] = ids[0]
91+ return {
92+ 'view_type': 'form',
93+ 'view_mode': 'form',
94+ 'res_id': wiz_id,
95+ 'context': ctx,
96+ 'type': 'ir.actions.act_window',
97+ 'target': 'new',
98+ 'res_model': 'purchase.order.manual.export',
99+ }
100
101 def get_file_content(self, cr, uid, file_path, context=None):
102 if context is None:
103@@ -201,8 +273,8 @@
104 rejected.append( (key, values[key]) )
105 else:
106 values = self.pool.get('wizard.import.po.simulation.screen').get_values_from_xml(cr, uid, base64.encodestring(file_content), context=context)
107- header = [x.replace('_', ' ').title() for x in values.get(21)]
108- for key in sorted([k for k in values.keys() if k > 21]):
109+ header = [x.replace('_', ' ').title() for x in values.get(23)]
110+ for key in sorted([k for k in values.keys() if k > 23]):
111 if import_success:
112 processed.append( (key, values[key]) )
113 else:
114@@ -291,18 +363,20 @@
115 return res
116
117
118- def auto_export_validated_purchase_order(self, cr, uid, export_wiz, context=None):
119+ def auto_export_validated_purchase_order(self, cr, uid, export_wiz, po_ids=False, context=None):
120 '''
121 Method called by obj automated.export
122 '''
123 if context is None:
124 context = {}
125
126- po_ids = self.search(cr, uid, [
127- ('partner_type', '=', 'esc'),
128- ('state', 'in', ['validated', 'validated_p']),
129- ('auto_exported_ok', '=', False),
130- ], context= context)
131+ # any change in domain must also be changed in _can_be_auto_exported
132+ if not po_ids:
133+ po_ids = self.search(cr, uid, [
134+ ('partner_type', '=', 'esc'),
135+ ('state', 'in', ['validated', 'validated_p']),
136+ ('auto_exported_ok', '=', False),
137+ ], context= context)
138
139 if not po_ids:
140 msg = _('No PO to export !')
141@@ -387,6 +461,8 @@
142
143 if 'import_in_progress' not in defaults:
144 defaults.update({'import_in_progress': False})
145+ if 'auto_exported_ok' not in defaults:
146+ defaults.update({'auto_exported_ok': False})
147
148 return super(purchase_order, self).copy(cr, uid, id, defaults, context=context)
149
150
151=== modified file 'bin/addons/msf_doc_import/wizard/wizard_in_simulation_screen.py'
152--- bin/addons/msf_doc_import/wizard/wizard_in_simulation_screen.py 2019-02-11 09:38:43 +0000
153+++ bin/addons/msf_doc_import/wizard/wizard_in_simulation_screen.py 2019-04-25 15:27:08 +0000
154@@ -1117,9 +1117,10 @@
155
156 context['from_simu_screen'] = True
157
158- if simu_id.with_pack:
159+ if simu_id.with_pack or context.get('do_not_import_with_thread'):
160 cr.commit()
161 if context.get('do_not_import_with_thread'):
162+ # Auto VI IN import: do not process IN
163 self._import_with_thread(cr, uid, [partial_id], simu_id.id, context=context)
164 else:
165 new_thread = threading.Thread(target=self._import_with_thread, args=(cr, uid, [partial_id], simu_id.id, context))
166
167=== modified file 'bin/addons/msf_doc_import/wizard/wizard_po_simulation_screen.py'
168--- bin/addons/msf_doc_import/wizard/wizard_po_simulation_screen.py 2019-01-30 16:53:05 +0000
169+++ bin/addons/msf_doc_import/wizard/wizard_po_simulation_screen.py 2019-04-25 15:27:08 +0000
170@@ -33,7 +33,6 @@
171
172 from mx import DateTime
173
174-
175 # Server imports
176 from osv import osv
177 from osv import fields
178@@ -268,6 +267,7 @@
179 readonly=True),
180 'simu_line_ids': fields.one2many('wizard.import.po.simulation.screen.line',
181 'simu_id', string='Lines', readonly=True),
182+ 'ad_info': fields.text(string='New Header AD', readonly=1),
183 }
184
185 _defaults = {
186@@ -488,19 +488,29 @@
187 'message_esc1', 'message_esc2']
188 for line in field:
189 rec_lines.append(line)
190+ elif field.attrib['name'] == 'analytic_distribution_id':
191+ index += 1
192+ values[index] = []
193+ index += 1
194+ ad_info = ['']
195+ for ad_node in field:
196+ if ad_node.text:
197+ ad_info.append(ad_node.text)
198+ values[index] = ad_info
199
200 for line in rec_lines:
201 index += 1
202 values[index] = []
203 for fl in line:
204- if fl.attrib['name'] in ad_field_names:
205- continue
206- if not fl.getchildren():
207+ if fl.attrib['name'] == 'analytic_distribution_id':
208+ for ad_node in fl:
209+ if ad_node.text:
210+ values[index].append(ad_node.text or '')
211+ elif not fl.getchildren():
212 values[index].append(fl.text or '')
213 else:
214 for sfl in fl:
215 values[index].append(sfl.text or '')
216-
217 return values
218
219
220@@ -528,6 +538,96 @@
221
222 return values
223
224+ def create_ad(self, cr, uid, ad_info, partner_type, currency_id, context):
225+ ad_infos = tools.safe_eval(ad_info)
226+ cc_lines = []
227+ for ad_info in ad_infos:
228+ info = ad_info.split('-')
229+ cc_lines.append((0, 0, {
230+ 'partner_type': partner_type,
231+ 'destination_id': int(info[0]),
232+ 'analytic_id': int(info[1]),
233+ 'percentage': float(info[2]),
234+ 'currency_id': currency_id,
235+ }
236+ ))
237+ distrib_id = self.pool.get('analytic.distribution').create(cr, uid, {'partner_type': partner_type, 'cost_center_lines': cc_lines}, context=context)
238+ self.pool.get('analytic.distribution').create_funding_pool_lines(cr, uid, [distrib_id], context=context)
239+ return distrib_id
240+
241+ def check_ad(self, cr, uid, values, existing_ad, product_id=False, po_type=False, cc_cache=None, context=None):
242+ errors = []
243+
244+ if context is None:
245+ context = {}
246+ if cc_cache is None:
247+ cc_cache = {}
248+
249+ cc_cache.setdefault('aa_ko', {'DEST': {}, 'OC': {}})
250+ cc_cache.setdefault('aa_ok', {'DEST': {}, 'OC': {}})
251+ existing_ad_set = set()
252+ if existing_ad:
253+ for cc_line in existing_ad.cost_center_lines:
254+ existing_ad_set.add('%s-%s-%s'%(cc_line.destination_id.id, cc_line.analytic_id.id, round(cc_line.percentage,2)))
255+ ad = []
256+ if len(values) < 4 or len(values) % 4 != 0:
257+ errors.append(_('Invalid AD format: %d columns found, multiple of 4 expected') % (len(values), ))
258+ else:
259+ idx = 0
260+ sum_percent = 0
261+ while idx < len(values):
262+ if not values[idx]:
263+ break
264+ try:
265+ percent = float(values[idx+2])
266+ except TypeError:
267+ errors.append(_('%% in AD must be a number (value found %s), AD in file ignored') % (values[idx+2]))
268+ ad = []
269+ break
270+ ad.append([values[idx], values[idx+1], percent])
271+ sum_percent += percent
272+ idx += 4
273+ if ad and abs(100-sum_percent) > 0.001:
274+ ad = []
275+ errors.append(_('Sum of AD %% must be 100 (value found %s), AD in file ignored') % (sum_percent))
276+
277+ valid_ad = True
278+ data_ad_set = set()
279+ aa_ko = cc_cache['aa_ko']
280+ aa_ok = cc_cache['aa_ok']
281+
282+ gl_account_id = False
283+ if product_id:
284+ product_record = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
285+ gl_account_id = self.pool.get('purchase.order.line').get_distribution_account(cr, uid, product_record, False, po_type, context=None)
286+
287+ for ad_value in ad:
288+ ad = {}
289+ for x in [(0, 'DEST', _('Destination')), (1, 'OC', _('Cost Center'))]:
290+ account = ad_value[x[0]].strip()
291+ if account not in aa_ko[x[1]] and account not in aa_ok[x[1]]:
292+ dom = [('category', '=', x[1]), ('type','!=', 'view'), ('code', '=ilike', account), ('filter_active', '=', True)]
293+ if gl_account_id and x[1] == 'DEST':
294+ dom += [('destination_ids', '=', gl_account_id)]
295+ account_ids = self.pool.get('account.analytic.account').search(cr, uid, dom, context=context)
296+ if not account_ids:
297+ valid_ad = False
298+ aa_ko[x[1]][account] = True
299+ errors.append(_('%s %s not found or inactive , AD in file ignored') % (x[2], account))
300+ else:
301+ aa_ok[x[1]][account] = account_ids[0]
302+ ad[x[1]] = aa_ok[x[1]].get(account)
303+ if valid_ad:
304+ data_ad_set.add('%s-%s-%s' % (ad['DEST'], ad['OC'], round(ad_value[2], 2)))
305+
306+ if valid_ad and existing_ad_set:
307+ if data_ad_set != existing_ad_set:
308+ errors.append(_('Already has a valid Analytical Distribution'))
309+ else:
310+ data_ad_set = set()
311+
312+ return errors, list(data_ad_set)
313+
314
315 '''
316 Simulate routine
317@@ -570,7 +670,7 @@
318 nb_file_header_lines = NB_OF_HEADER_LINES
319 nb_file_lines_columns = NB_LINES_COLUMNS
320 first_line_index = nb_file_header_lines + 1
321- if wiz.with_ad == 'yes' and wiz.filetype != 'xml':
322+ if wiz.with_ad == 'yes':
323 nb_file_header_lines += 2
324 first_line_index += 2
325
326@@ -799,6 +899,17 @@
327 # Line 20: Sourcing group
328 # Nothing to do
329
330+ cc_cache = {}
331+ # Line 22: AD
332+ if values.get(22) and len(values[22]) > 1:
333+ errors, ad_info = self.check_ad(cr, uid, values[22][1:], wiz.order_id.analytic_distribution_id, cc_cache=cc_cache, context=context)
334+ if errors:
335+ values_header_errors.append(_('Line 22 of the file: Analytical Distribution ignored: \n - %s') % (" - \n".join(errors)))
336+ elif ad_info:
337+ header_values['ad_info'] = ad_info
338+
339+
340+ #header_values['ad'] = [(x[1], x[2], x[3]) for x in
341 '''
342 The header values have been imported, start the importation of
343 lines
344@@ -884,7 +995,8 @@
345 if vals[4]:
346 qty = float(vals[4])
347
348- file_lines[x] = (line_number, product_id, uom_id, qty, ext_ref)
349+ # AD on line
350+ file_lines[x] = (line_number, product_id, uom_id, qty, ext_ref, vals[20:])
351
352 '''
353 Get the best matching line :
354@@ -992,7 +1104,7 @@
355 'percent_completed': percent_completed}, context=context)
356 vals = values.get(file_line[0], [])
357 if file_line[1] == 'match':
358- err_msg = wl_obj.import_line(cr, uid, po_line, vals, context=context)
359+ err_msg = wl_obj.import_line(cr, uid, po_line, vals, cc_cache, context=context)
360 if file_line[0] in not_ok_file_lines:
361 wl_obj.write(cr, uid, [po_line], {'type_change': 'error', 'error_msg': not_ok_file_lines[file_line[0]]}, context=context)
362 elif file_line[1] == 'split':
363@@ -1002,7 +1114,7 @@
364 'parent_line_id': po_line,
365 'imp_dcd': False,
366 'po_line_id': False}, context=context)
367- err_msg = wl_obj.import_line(cr, uid, new_wl_id, vals, context=context)
368+ err_msg = wl_obj.import_line(cr, uid, new_wl_id, vals, cc_cache, context=context)
369 if file_line[0] in not_ok_file_lines:
370 wl_obj.write(cr, uid, [new_wl_id], {'type_change': 'error', 'error_msg': not_ok_file_lines[file_line[0]]}, context=context)
371 # Commit modifications
372@@ -1028,7 +1140,7 @@
373 'in_line_number': values.get(po_line, [])[0] and int(values.get(po_line, [])[0]) or False,
374 'in_ext_ref': values.get(po_line, [])[1] or False,
375 'simu_id': wiz.id}, context=context)
376- err_msg = wl_obj.import_line(cr, uid, new_wl_id, vals, context=context)
377+ err_msg = wl_obj.import_line(cr, uid, new_wl_id, vals, cc_cache, context=context)
378 if po_line in not_ok_file_lines:
379 wl_obj.write(cr, uid, [new_wl_id], {'type_change': 'error', 'error_msg': not_ok_file_lines[po_line]}, context=context)
380
381@@ -1154,17 +1266,14 @@
382 w_vals = {'state': 'import_progress',}
383 self.write(cr, uid, [wiz.id], w_vals, context=context)
384
385- """
386- UFTP-59: import PO header
387- 20/Mar/14 2:51 PM was asked to import only:
388- 1)Supplier Ref, 2) RTS date, 3) Shipment date
389- just uncomment the 3 other fields if asked later
390- """
391 po_vals = {
392 'partner_ref': wiz.imp_supplier_ref or wiz.in_supplier_ref,
393 'ready_to_ship_date': wiz.imp_ready_to_ship_date or wiz.in_ready_to_ship_date,
394 'shipment_date': wiz.imp_shipment_date or wiz.in_shipment_date,
395 }
396+ if wiz.ad_info:
397+ po_vals['analytic_distribution_id'] = self.create_ad(cr, uid, wiz.ad_info, wiz.order_id.partner_id.partner_type, wiz.order_id.currency_id.id, context)
398+
399 self.pool.get('purchase.order').write(cr, uid, [wiz.order_id.id], po_vals, context=context)
400
401 lines = [x.id for x in wiz.simu_line_ids]
402@@ -1266,17 +1375,21 @@
403 if not line.chg_text and (line.imp_product_id or (not line.imp_product_id and line.in_comment)) \
404 and price_change:
405 res[line.id]['chg_text'] += _('\nPRICE')
406+ if not line.chg_text and line.ad_info:
407+ res[line.id]['chg_text'] += _('\nAD')
408 drd_change = not(res[line.id]['in_drd'] == line.imp_drd)
409 dcd_change = not(res[line.id]['in_dcd'] == line.imp_dcd)
410 to_delete = line.imp_comment == '[DELETE]'
411 if to_delete:
412 delete_line_numbers.add(line.in_line_number)
413
414- if line.simu_id.state != 'draft' and (prod_change or qty_change or price_change or drd_change or dcd_change or to_delete):
415+ if line.simu_id.state != 'draft' and (prod_change or qty_change or price_change or drd_change or dcd_change or to_delete or line.ad_info):
416 res[line.id]['change_ok'] = True
417 elif line.type_change == 'del':
418 res[line.id]['imp_discrepancy'] = -(line.in_qty*line.in_price)
419 else:
420+ if line.ad_info:
421+ res[line.id]['chg_text'] = "%s%s" % (line.chg_text, _("\nAD"))
422 res[line.id]['imp_discrepancy'] = line.imp_qty*line.imp_price
423 if line.imp_uom:
424 res[line.id]['in_uom'] = line.imp_uom.id
425@@ -1379,6 +1492,7 @@
426 readonly=True),
427 'chg_text': fields.function(_get_line_info, method=True, multi='line', type='char', size=216, string='CHG',
428 readonly=True, store=True),
429+ 'ad_info': fields.text(string='New AD', readonly=True),
430 }
431
432 def get_error_msg(self, cr, uid, ids, context=None):
433@@ -1394,7 +1508,7 @@
434
435 return True
436
437- def import_line(self, cr, uid, ids, values, context=None):
438+ def import_line(self, cr, uid, ids, values, cc_cache, context=None):
439 '''
440 Write the line with the values
441 '''
442@@ -1472,6 +1586,17 @@
443 if not prod_id:
444 write_vals['type_change'] = 'error'
445
446+ write_vals['ad_info'] = False
447+ if not write_vals.get('type_change') and len(values) > 20:
448+ existing_ad = line.po_line_id and line.po_line_id.analytic_distribution_id or line.simu_id.order_id.analytic_distribution_id
449+ if line.po_line_id.analytic_distribution_state != 'valid':
450+ existing_ad = False
451+ errors_ad, ad_info = self.pool.get('wizard.import.po.simulation.screen').check_ad(cr, uid, values[20:], existing_ad, product_id=write_vals.get('imp_product_id'), po_type=line.simu_id.order_id.order_type,cc_cache=cc_cache, context=context)
452+ if errors_ad:
453+ errors += errors_ad
454+ elif ad_info:
455+ write_vals['ad_info'] = ad_info
456+
457 # Qty
458 err_msg = _('Incorrect float value for field \'Product Qty\'')
459 try:
460@@ -1668,7 +1793,33 @@
461
462 if line.type_change == 'del' and line.po_line_id:
463 wf_service.trg_validate(uid, 'purchase.order.line', line.po_line_id.id, 'cancel', cr)
464- elif line.type_change == 'split' and line.parent_line_id:
465+ simu_obj.write(cr, uid, [line.simu_id.id], {'percent_completed': percent_completed}, context=context)
466+ cr.commit()
467+ continue
468+
469+ line_vals = {
470+ 'product_id': line.imp_product_id.id,
471+ 'product_uom': line.imp_uom.id,
472+ 'price_unit': line.imp_price,
473+ 'product_qty': line.imp_qty,
474+ }
475+ if line.imp_drd:
476+ line_vals['date_planned'] = line.imp_drd
477+ if line.imp_project_ref:
478+ line_vals['project_ref'] = line.imp_project_ref
479+ if line.imp_origin:
480+ line_vals['origin'] = line.imp_origin
481+ if line.imp_external_ref:
482+ line_vals['external_ref'] = line.imp_external_ref
483+ if line.imp_dcd:
484+ line_vals['confirmed_delivery_date'] = line.imp_dcd
485+ if line.imp_stock_take_date:
486+ line_vals['stock_take_date'] = line.imp_stock_take_date,
487+
488+ if line.ad_info:
489+ line_vals['analytic_distribution_id'] = simu_obj.create_ad(cr, uid, line.ad_info, line.simu_id.order_id.partner_id.partner_type, line.simu_id.order_id.currency_id.id, context)
490+
491+ if line.type_change == 'split' and line.parent_line_id:
492 # Call the split line wizard
493 po_line_id = False
494 if line.parent_line_id and line.parent_line_id.po_line_id:
495@@ -1699,36 +1850,19 @@
496 if not new_product_split and not new_po_line_id:
497 continue # split line has failed or case not to be done
498
499- line_vals = {'product_uom': line.imp_uom.id,
500- 'product_id': line.imp_product_id.id,
501- 'price_unit': line.imp_price,
502- 'is_line_split': True,
503- }
504- if line.imp_drd:
505- line_vals['date_planned'] = line.imp_drd
506- if line.imp_project_ref:
507- line_vals['project_ref'] = line.imp_project_ref
508- if line.imp_origin:
509- line_vals['origin'] = line.imp_origin
510- if line.imp_external_ref:
511- line_vals['external_ref'] = line.imp_external_ref
512- if line.imp_stock_take_date:
513- line_vals['stock_take_date'] = line.imp_stock_take_date,
514-
515- # UF-2537 after split reinject import qty computed in
516- # simu for import consistency versus simu
517- # (or set qty of a new product split line)
518- line_vals['product_qty'] = line.imp_qty
519+ line_vals['is_line_split'] = True
520
521 if new_product_split:
522 line_vals.update({
523 'order_id': line.simu_id.order_id.id,
524 'line_number': line.in_line_number,
525- 'confirmed_delivery_date': line.imp_dcd or False,
526 'esc_confirmed': True if line.imp_dcd else False,
527 'original_line_id': line.parent_line_id.po_line_id.id,
528 })
529- if line.parent_line_id.po_line_id.analytic_distribution_id:
530+ if 'confirmed_delivery_date' not in line_vals:
531+ line_vals['confirmed_delivery_date'] = False
532+
533+ if not line_vals.get('analytic_distribution_id') and line.parent_line_id.po_line_id.analytic_distribution_id:
534 line_vals.update({
535 'analytic_distribution_id': self.pool.get('analytic.distribution').copy(cr, uid, line.parent_line_id.po_line_id.analytic_distribution_id.id, {}, context=context),
536 })
537@@ -1740,7 +1874,6 @@
538 line_obj.update_fo_lines(cr, uid, line.parent_line_id.po_line_id.id, context=context)
539 else:
540 if line.imp_dcd:
541- line_vals['confirmed_delivery_date'] = line.imp_dcd
542 line_vals['esc_confirmed'] = line.esc_conf
543 line_obj.write(cr, uid, [new_po_line_id], line_vals,
544 context=context)
545@@ -1750,11 +1883,8 @@
546 # note: if total qty of splited lines is > to original qty
547 # the original line qty was truncated in term of qty
548 # (never be greater than line.parent_line_id.in_qty)
549- line_vals = {
550- 'product_qty': line.parent_line_id.imp_qty,
551- }
552 line_obj.write(cr, uid, [line.parent_line_id.po_line_id.id],
553- line_vals, context=context)
554+ {'product_qty': line.parent_line_id.imp_qty}, context=context)
555
556 job_comment = context.get('job_comment', [])
557 job_comment.append({
558@@ -1764,26 +1894,15 @@
559 })
560 context['job_comment'] = job_comment
561 elif line.type_change == 'new':
562- line_vals = {'order_id': line.simu_id.order_id.id,
563- 'product_id': line.imp_product_id.id,
564- 'product_uom': line.imp_uom.id,
565- 'price_unit': line.imp_price,
566- 'product_qty': line.imp_qty,
567- 'date_planned': line.imp_drd or line.simu_id.order_id.delivery_requested_date,
568- 'set_as_validated_n': True,
569- 'display_sync_ref': True,
570- 'created_by_vi_import': True,
571- }
572- if line.imp_stock_take_date:
573- line_vals['stock_take_date'] = line.imp_stock_take_date,
574- if line.imp_dcd:
575- line_vals['confirmed_delivery_date'] = line.imp_dcd
576- if line.imp_project_ref:
577- line_vals['project_ref'] = line.imp_project_ref
578- if line.imp_origin:
579- line_vals['origin'] = line.imp_origin
580- if line.imp_external_ref:
581- line_vals['external_ref'] = line.imp_external_ref
582+ line_vals.update({
583+ 'order_id': line.simu_id.order_id.id,
584+ 'set_as_validated_n': True,
585+ 'display_sync_ref': True,
586+ 'created_by_vi_import': True,
587+ })
588+ if not line_vals.get('date_planned'):
589+ line_vals['date_planned'] = line.simu_id.order_id.delivery_requested_date
590+
591 if line.esc_conf:
592 line_vals['esc_confirmed'] = line.esc_conf
593 new_line_id = line_obj.create(cr, uid, line_vals, context=context)
594@@ -1796,21 +1915,6 @@
595 })
596 context['job_comment'] = job_comment
597 elif line.po_line_id:
598- line_vals = {'product_id': line.imp_product_id.id,
599- 'product_uom': line.imp_uom.id,
600- 'price_unit': line.imp_price,
601- 'product_qty': line.imp_qty,
602- }
603- if line.imp_drd:
604- line_vals['date_planned'] = line.imp_drd
605- if line.imp_dcd:
606- line_vals['confirmed_delivery_date'] = line.imp_dcd
607- if line.imp_project_ref:
608- line_vals['project_ref'] = line.imp_project_ref
609- if line.imp_origin:
610- line_vals['origin'] = line.imp_origin
611- if line.imp_external_ref:
612- line_vals['external_ref'] = line.imp_external_ref
613 if line.esc_conf:
614 line_vals['esc_confirmed'] = line.esc_conf
615 if context.get('auto_import_ok') and line.simu_id.order_id.stock_take_date:
616@@ -1818,7 +1922,6 @@
617
618 line_obj.write(cr, uid, [line.po_line_id.id], line_vals, context=context)
619 simu_obj.write(cr, uid, [line.simu_id.id], {'percent_completed': percent_completed}, context=context)
620- # Commit modifications
621 cr.commit()
622
623 if ids:
624
625=== modified file 'bin/addons/msf_doc_import/wizard/wizard_po_simulation_screen_view.xml'
626--- bin/addons/msf_doc_import/wizard/wizard_po_simulation_screen_view.xml 2018-08-14 13:52:14 +0000
627+++ bin/addons/msf_doc_import/wizard/wizard_po_simulation_screen_view.xml 2019-04-25 15:27:08 +0000
628@@ -54,6 +54,7 @@
629 <field name="in_shipment_date" />
630 <field name="imp_shipment_date" nolabel="1" />
631 <label> </label>
632+ <field name="ad_info" widget="boolean" />
633 <group colspan="4" string="Header notes">
634 <field name="imp_notes" colspan="4" nolabel="1" />
635 </group>
636
637=== modified file 'bin/addons/msf_profile/i18n/fr_MF.po'
638--- bin/addons/msf_profile/i18n/fr_MF.po 2019-04-08 09:30:07 +0000
639+++ bin/addons/msf_profile/i18n/fr_MF.po 2019-04-25 15:27:08 +0000
640@@ -13212,6 +13212,7 @@
641
642 #. module: msf_tools
643 #: code:addons/msf_tools/automated_import.py:204
644+#: code:addons/msf_tools/automated_export.py:197
645 #, python-format
646 msgid "Unable to connect with given login and password"
647 msgstr "Impossible de se connecter avec le login/mot de passe donné"
648@@ -31708,6 +31709,8 @@
649 #: field:wizard.import.threshold.value.line,data:0
650 #: field:wizard.import.threshold.value.line,filename:0
651 #: field:internal.request.import,error_filename:0
652+#: field:wizard.import.ppl.to.create.ship,data:0
653+#: field:wizard.import.ppl.to.create.ship,filename:0
654 msgid "Lines with errors"
655 msgstr "Lignes avec des erreurs"
656
657@@ -37894,6 +37897,7 @@
658 #: report:addons/tender_flow/report/tender_rfq_comparison_xls.mako:658
659 #: report:empty.picking.ticket:0
660 #: report:picking.ticket:0
661+#: code:addons/msf_doc_import/wizard/wizard_import_ppl_to_create_ship.py:71
662 #, python-format
663 msgid "Date"
664 msgstr "Date"
665@@ -39269,7 +39273,7 @@
666 msgid "Is inactive for destination given in context"
667 msgstr "Is inactive for destination given in context"
668
669-#. modules: sales_followup, account, account_payment, documents_done, consumption_calculation, sale, specific_rules, transport_mgmt, account_override, purchase_allocation_report, product_list, tender_flow, msf_doc_import, stock, product_attributes
670+#. modules: sales_followup, account, account_payment, documents_done, consumption_calculation, sale, specific_rules, transport_mgmt, account_override, purchase_allocation_report, product_list, tender_flow, msf_doc_import, stock, product_attributes, purchase
671 #: field:account.move.line,date_created:0
672 #: field:account.move.reconcile,create_date:0
673 #: report:allocation.invoices.report:0
674@@ -39293,6 +39297,7 @@
675 #: selection:account.journal.column,field:0
676 #: code:addons/product_attributes/product_attributes.py:908
677 #: field:product.product,uf_create_date:0
678+#: field:purchase.order.line,create_date:0
679 #, python-format
680 msgid "Creation date"
681 msgstr "Date de création"
682@@ -59229,6 +59234,7 @@
683 #: code:addons/msf_instance/msf_instance.py:768
684 #: code:addons/msf_instance/msf_instance.py:775
685 #: code:addons/product/product.py:771
686+#: code:addons/msf_doc_import/purchase_order.py:68
687 #, python-format
688 msgid "Warning"
689 msgstr "Avertissement"
690@@ -62165,6 +62171,13 @@
691 msgid "Not able to connect to FTP server at location %s"
692 msgstr "Impossible de se connecter au serveur FTP à l'adresse %s"
693
694+#. module: msf_tools
695+#: code:addons/msf_tools/automated_export.py:192
696+#: code:addons/msf_tools/automated_import.py:199
697+#, python-format
698+msgid "Not able to connect to SFTP server at location %s"
699+msgstr "Impossible de se connecter au serveur SFTP à l'adresse %s"
700+
701 #. module: msf_doc_import
702 #: code:addons/msf_doc_import/check_line.py:397
703 #, python-format
704@@ -64386,6 +64399,7 @@
705 #: code:addons/msf_doc_import/wizard/wizard_import_supplier_catalogue.py:443
706 #: code:addons/msf_doc_import/wizard/wizard_import_tender_line.py:250
707 #: code:addons/msf_doc_import/wizard/wizard_import_threshold_value_line.py:267
708+#: code:addons/msf_doc_import/wizard/wizard_import_ppl_to_create_ship.py:505
709 #, python-format
710 msgid "Nothing to import"
711 msgstr "Rien à importer"
712@@ -66830,6 +66844,7 @@
713 #: field:wizard.import.ir.line,import_error_ok:0
714 #: field:wizard.import.po.line,import_error_ok:0
715 #: field:wizard.import.tender.line,import_error_ok:0
716+#: field:wizard.import.ppl.to.create.ship,import_error_ok:0
717 msgid "Error at import"
718 msgstr "Erreur lors de l'import"
719
720@@ -74643,6 +74658,11 @@
721 #: code:addons/sale/wizard/internal_request_import.py:166
722 #: code:addons/msf_outgoing/wizard/create_picking_processor.py:55
723 #: selection:internal.request.import,state:0
724+#: code:addons/msf_doc_import/wizard/wizard_import_ppl_to_create_ship.py:98
725+#: code:addons/msf_doc_import/wizard/wizard_import_ppl_to_create_ship.py:119
726+#: code:addons/msf_doc_import/wizard/wizard_import_ppl_to_create_ship.py:129
727+#: code:addons/msf_doc_import/wizard/wizard_import_ppl_to_create_ship.py:392
728+#: code:addons/msf_doc_import/wizard/wizard_import_ppl_to_create_ship.py:517
729 #, python-format, python-format
730 msgid "Error"
731 msgstr "Erreur"
732@@ -82207,6 +82227,7 @@
733 #: field:account.invoice.line,cost_center_id:0
734 #: code:addons/register_accounting/wizard/wizard_register_import.py:570
735 #: report:addons/account/report/free_allocation_report.mako:205
736+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:606
737 #, python-format
738 msgid "Cost Center"
739 msgstr "Centre de Coût"
740@@ -84319,6 +84340,7 @@
741 #: field:account.analytic.line,purchase_order_id:0
742 #: selection:procurement.request.sourcing.document,linked_model:0
743 #: report:addons/stock/report/stock_reception_report_xls.mako:227
744+#: field:purchase.order.manual.export,purchase_id:0
745 msgid "Purchase Order"
746 msgstr "Bon de Commande"
747
748@@ -87629,8 +87651,9 @@
749 msgid "Display analytic periods?"
750 msgstr "Display analytic periods?"
751
752-#. module: sale
753+#. module: sale, purchase
754 #: field:sale.order,date_confirm:0
755+#: field:purchase.order.line,confirmation_date:0
756 msgid "Confirmation Date"
757 msgstr "Date de Confirmation"
758
759@@ -94178,9 +94201,10 @@
760 msgid "Real project amount (value)"
761 msgstr "Montant réel projet (valeur)"
762
763-#. module: msf_outgoing
764+#. module: msf_outgoing, msf_doc_import
765 #: code:addons/msf_outgoing/wizard/picking_processor.py:820
766 #: code:addons/msf_outgoing/wizard/ppl_processor.py:779
767+#: code:addons/msf_doc_import/wizard/wizard_import_ppl_to_create_ship.py:130
768 #, python-format
769 msgid "No line to split !"
770 msgstr "Pas de ligne à diviser!"
771@@ -96457,6 +96481,7 @@
772 #: view:sync.client.message_to_send:0
773 #: report:addons/account/report/free_allocation_report.mako:202
774 #: report:addons/stock_override/report/report_stock_move_xls.mako:143
775+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:606
776 #, python-format
777 msgid "Destination"
778 msgstr "Destination"
779@@ -98080,6 +98105,7 @@
780 #: view:stock.move.cancel.wizard:0
781 #: view:stock.picking.cancel.wizard:0
782 #: view:wizard.pick.import:0
783+#: code:addons/msf_doc_import/purchase_order.py:60
784 #, python-format
785 msgid "Cancel"
786 msgstr "Annuler"
787@@ -98752,7 +98778,7 @@
788 msgid "Removal date"
789 msgstr "Date de Retrait"
790
791-#. modules: msf_budget, sync_client, res_currency_tables, financing_contract, msf_homere_interface, msf_instance, export_import_lang
792+#. modules: msf_budget, sync_client, res_currency_tables, financing_contract, msf_homere_interface, msf_instance, export_import_lang, msf_doc_import
793 #: view:base.language.export:0
794 #: view:base.language.import:0
795 #: view:msf.language.import:0
796@@ -98763,6 +98789,7 @@
797 #: view:confirm.import.currencies:0
798 #: view:warning.import.currencies:0
799 #: code:addons/sync_client/backup.py:124
800+#: code:addons/msf_doc_import/purchase_order.py:61
801 #, python-format
802 msgid "OK"
803 msgstr "OK"
804@@ -105336,6 +105363,7 @@
805 msgstr "\n"
806 " Attention: Les lignes suivantes ont des qté ou des montants sur plus de 15 chiffres. Veuillez vérifier la qté et le prix unitaire pour éviter des pertes d'information:\n"
807 " "
808+<<<<<<< TREE
809
810 #. module: msf_homere_interface
811 #: code:addons/msf_homere_interface/wizard/hr_payroll_import.py:580
812@@ -105388,3 +105416,109 @@
813 #, python-format
814 msgid "Product %s, BN: %s not enough stock to process quantity %s %s (stock level: %s)"
815 msgstr "Produit %s, Lot: %s, pas assez de stock pour traiter la qantité %s %s (quantité en stock: %s)"
816+=======
817+
818+#. module: msf_doc_import
819+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:1379
820+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:1392
821+#, python-format
822+msgid "\n"
823+"AD"
824+msgstr "\n"
825+"DA"
826+
827+#. module: msf_doc_import
828+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:584
829+#, python-format
830+msgid "%% in AD must be a number (value found %s), AD in file ignored"
831+msgstr "%% de la DA doit être un nombre (valeur trouvé %s), DA du fichier ignorée"
832+
833+#. module: msf_doc_import
834+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:616
835+#, python-format
836+msgid "%s %s not found or inactive , AD in file ignored"
837+msgstr "%s %s non trouvé ou inactif, DA du fichier ignoré"
838+
839+#. module: msf_doc_import
840+#: code:addons/msf_doc_import/wizard/wizard_in_simulation_screen.py:1542
841+#: code:addons/msf_doc_import/wizard/wizard_in_simulation_screen.py:1550
842+#, python-format
843+msgid "A batch number is defined on the imported file but the product doesn't require batch number - Batch ignored"
844+msgstr "Un numéro de lot est défini dans le fichier d'import mais le produit ne nécessite de pas lot - Numéro de Lot ignoré"
845+
846+#. module: msf_doc_import
847+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:625
848+#, python-format
849+msgid "Already has a valid Analytical Distribution"
850+msgstr "Possède déjà une DA valide"
851+
852+#. module: msf_doc_import
853+#: field:purchase.order,can_be_auto_exported:0
854+msgid "Can be auto exported ?"
855+msgstr "Peut être auto exporté ?"
856+
857+#. module: msf_doc_import
858+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:574
859+#, python-format
860+msgid "Invalid AD format: %d columns found, multiple of 4 expected"
861+msgstr "Format DA invalide: %d colonnes trouvés, une multiple de 4 est attendu"
862+
863+#. module: msf_doc_import
864+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:907
865+#, python-format
866+msgid "Line 22 of the file: Analytical Distribution ignored: \n"
867+" - %s"
868+msgstr "Ligne 22 du fichier: DA ignorée: \n"
869+" - %s"
870+
871+#. module: msf_doc_import
872+#: code:addons/msf_doc_import/purchase_order.py:49
873+#, python-format
874+msgid "Manually run export of PO"
875+msgstr "Exécuter manuellement l'export du BdC"
876+
877+#. module: msf_doc_import
878+#: field:wizard.import.po.simulation.screen.line,ad_info:0
879+msgid "New AD"
880+msgstr "Nouvelle AD"
881+
882+#. module: msf_doc_import
883+#: field:wizard.import.po.simulation.screen,ad_info:0
884+msgid "New Header AD"
885+msgstr "Nouvelle DA en en-tête"
886+
887+#. module: msf_doc_import
888+#: code:addons/msf_doc_import/purchase_order.py:78
889+#, python-format
890+msgid "PO %s %d lines rejected"
891+msgstr "BdC %s %d lignes rejetées"
892+
893+#. module: msf_doc_import
894+#: code:addons/msf_doc_import/purchase_order.py:76
895+#, python-format
896+msgid "PO %s successfully exported"
897+msgstr "BdC %s exporté avec succès"
898+
899+#. module: purchase
900+#: view:purchase.order:0
901+msgid "Run PO export job"
902+msgstr "Exécuter la tâche d'export du BdC"
903+
904+#. module: msf_doc_import
905+#: code:addons/msf_doc_import/wizard/wizard_po_simulation_screen.py:592
906+#, python-format
907+msgid "Sum of AD %% must be 100 (value found %s), AD in file ignored"
908+msgstr "La somme des %% de la DA doit être de 100 (valeur du fichier %s), DA du fichier ignorée"
909+
910+#. module: msf_doc_import
911+#: code:addons/msf_doc_import/purchase_order.py:52
912+#, python-format
913+msgid "The PO was already exported, do you want to export it again ?"
914+msgstr "Le BdC a déjà été exporté, voulez le ré-exporter ?"
915+
916+#. module: msf_doc_import
917+#: code:addons/msf_doc_import/purchase_order.py:68
918+#, python-format
919+msgid "The job to export PO is not active."
920+msgstr "La tâche d'export du PO n'est pas active."
921+>>>>>>> MERGE-SOURCE
922
923=== modified file 'bin/addons/msf_tools/automated_export.py'
924--- bin/addons/msf_tools/automated_export.py 2018-06-08 09:33:50 +0000
925+++ bin/addons/msf_tools/automated_export.py 2019-04-25 15:27:08 +0000
926@@ -219,7 +219,7 @@
927 sftp = pysftp.Connection(obj.ftp_url, username=obj.ftp_login, password=obj.ftp_password, cnopts=cnopts)
928 except:
929 self.infolog(cr, uid, _('%s :: SFTP connection failed') % obj.name)
930- raise osv.except_osv(_('Error'), _('Not able to connect to FTP server at location %s') % obj.ftp_url)
931+ raise osv.except_osv(_('Error'), _('Not able to connect to SFTP server at location %s') % obj.ftp_url)
932
933 if not context.get('no_raise_if_ok'):
934 raise osv.except_osv(_('Info'), _('Connection succeeded'))
935
936=== modified file 'bin/addons/msf_tools/automated_export_job.py'
937--- bin/addons/msf_tools/automated_export_job.py 2018-07-16 16:08:08 +0000
938+++ bin/addons/msf_tools/automated_export_job.py 2019-04-25 15:27:08 +0000
939@@ -29,12 +29,12 @@
940 from osv import fields
941
942 from tools.translate import _
943-
944+import logging
945
946
947 class automated_export_job(osv.osv):
948 _name = 'automated.export.job'
949-
950+ logger = logging.getLogger('automated.export.job')
951 def _get_name(self, cr, uid, ids, field_name, args, context=None):
952 """
953 Build the name of the job by using the function_id and the date and time
954@@ -136,17 +136,17 @@
955
956 ftp_connec = None
957 sftp = None
958- context.update({'no_raise_if_ok': True})
959- if job.export_id.ftp_ok and job.export_id.ftp_protocol == 'ftp':
960- ftp_connec = self.pool.get('automated.export').ftp_test_connection(cr, uid, job.export_id.id, context=context)
961- elif job.export_id.ftp_ok and job.export_id.ftp_protocol == 'sftp':
962- sftp = self.pool.get('automated.export').sftp_test_connection(cr, uid, job.export_id.id, context=context)
963- context.pop('no_raise_if_ok')
964+ try:
965+ context.update({'no_raise_if_ok': True})
966+ if job.export_id.ftp_ok and job.export_id.ftp_protocol == 'ftp':
967+ ftp_connec = self.pool.get('automated.export').ftp_test_connection(cr, uid, job.export_id.id, context=context)
968+ elif job.export_id.ftp_ok and job.export_id.ftp_protocol == 'sftp':
969+ sftp = self.pool.get('automated.export').sftp_test_connection(cr, uid, job.export_id.id, context=context)
970+ context.pop('no_raise_if_ok')
971+ # Process export
972+ error_message = []
973+ state = 'done'
974
975- # Process export
976- error_message = []
977- state = 'done'
978- try:
979 processed, rejected, headers = getattr(
980 self.pool.get(job.export_id.function_id.model_id.model),
981 job.export_id.function_id.method_to_call
982@@ -175,6 +175,7 @@
983 'state': state,
984 }, context=context)
985 except Exception as e:
986+ self.logger.error('Unable to process export Job %s (%s)' % (job.id, job.name), exc_info=True)
987 self.write(cr, uid, [job.id], {
988 'start_time': start_time,
989 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
990
991=== modified file 'bin/addons/msf_tools/automated_import.py'
992--- bin/addons/msf_tools/automated_import.py 2019-02-05 10:58:40 +0000
993+++ bin/addons/msf_tools/automated_import.py 2019-04-25 15:27:08 +0000
994@@ -224,7 +224,7 @@
995 sftp = pysftp.Connection(obj.ftp_url, username=obj.ftp_login, password=obj.ftp_password, cnopts=cnopts)
996 except:
997 self.infolog(cr, uid, _('%s :: SFTP connection failed') % obj.name)
998- raise osv.except_osv(_('Error'), _('Not able to connect to FTP server at location %s') % obj.ftp_url)
999+ raise osv.except_osv(_('Error'), _('Not able to connect to SFTP server at location %s') % obj.ftp_url)
1000
1001 if not context.get('no_raise_if_ok'):
1002 raise osv.except_osv(_('Info'), _('Connection succeeded'))
1003
1004=== modified file 'bin/addons/msf_tools/automated_import_job.py'
1005--- bin/addons/msf_tools/automated_import_job.py 2019-02-05 13:34:53 +0000
1006+++ bin/addons/msf_tools/automated_import_job.py 2019-04-25 15:27:08 +0000
1007@@ -38,59 +38,27 @@
1008 from StringIO import StringIO
1009 from mission_stock.mission_stock import UnicodeWriter
1010
1011+from threading import RLock
1012
1013-def all_files_under(path, startswith=False):
1014+def all_files_under(path, startswith=False, already=None):
1015 """
1016 Iterates through all files that are under the given path.
1017 :param path: Path on which we want to iterate
1018 """
1019- res = []
1020+ if already is None:
1021+ already = []
1022+
1023 for cur_path, dirnames, filenames in os.walk(path):
1024 if startswith:
1025 filenames = [fn for fn in filenames if fn.startswith(startswith)]
1026- res.extend([os.path.join(cur_path, fn) for fn in filenames])
1027- break # don't parse children
1028- return res
1029-
1030-def get_oldest_filename(job, ftp_connec=None, sftp=None, already=None):
1031- '''
1032- Get the oldest file in local or on FTP server
1033- '''
1034- if already is None:
1035- already = []
1036- logging.getLogger('automated.import').info(_('Getting the oldest file at location %s') % job.import_id.src_path)
1037- if not job.import_id.ftp_source_ok:
1038- return min(all_files_under(job.import_id.src_path, job.import_id.function_id.startswith), key=os.path.getmtime)
1039- elif job.import_id.ftp_protocol == 'ftp':
1040- files = []
1041- ftp_connec.dir(job.import_id.src_path, files.append)
1042- file_names = []
1043- for file in files:
1044- if file.startswith('d'): # directory
1045- continue
1046- if job.import_id.function_id.startswith and not file.split(' ')[-1].startswith(job.import_id.function_id.startswith):
1047- continue
1048- file_names.append( posixpath.join(job.import_id.src_path, file.split(' ')[-1]) )
1049 res = []
1050- for file in file_names:
1051- if file not in already:
1052- dt = ftp_connec.sendcmd('MDTM %s' % file).split(' ')[-1]
1053- dt = time.strptime(dt, '%Y%m%d%H%M%S') # '20180228170748'
1054- res.append((dt, file))
1055- return min(res, key=lambda x:x[1])[1] if res else False
1056- elif job.import_id.ftp_protocol == 'sftp':
1057- latest = 0
1058- latestfile = False
1059- with sftp.cd(job.import_id.src_path):
1060- for fileattr in sftp.listdir_attr():
1061- if sftp.isfile(fileattr.filename) and fileattr.st_mtime > latest:
1062- if job.import_id.function_id.startswith and not fileattr.filename.startswith(job.import_id.function_id.startswith):
1063- continue
1064- posix_name = posixpath.join(job.import_id.src_path, fileattr.filename)
1065- if posix_name not in already:
1066- latest = fileattr.st_mtime
1067- latestfile = posix_name
1068- return latestfile
1069+ for fn in filenames:
1070+ full_name = os.path.join(cur_path, fn)
1071+ if full_name not in already:
1072+ res.append((os.stat(full_name).st_ctime, full_name))
1073+ return res
1074+ return []
1075+
1076
1077
1078 def get_file_content(file, from_ftp=False, ftp_connec=None, sftp=None):
1079@@ -191,7 +159,10 @@
1080
1081 class automated_import_job(osv.osv):
1082 _name = 'automated.import.job'
1083+ _order = 'id desc'
1084
1085+ _processing = {}
1086+ _lock = RLock()
1087 def _get_name(self, cr, uid, ids, field_name, args, context=None):
1088 """
1089 Build the name of the job by using the function_id and the date and time
1090@@ -215,6 +186,22 @@
1091
1092 return res
1093
1094+ def is_processing_filename(self, filename):
1095+ with self._lock:
1096+ if filename not in self._processing:
1097+ self._processing[filename] = True
1098+ return False
1099+ logging.getLogger('automated.import').info(_('Ignore already processing %s') % filename)
1100+ return True
1101+
1102+ def end_processing_filename(self, filename):
1103+ with self._lock:
1104+ try:
1105+ del(self._processing[filename])
1106+ except KeyError:
1107+ pass
1108+
1109+
1110 _columns = {
1111 'name': fields.function(
1112 _get_name,
1113@@ -279,7 +266,47 @@
1114 'state': lambda *a: 'draft',
1115 }
1116
1117- _order = 'id desc'
1118+
1119+ def get_oldest_filename(self, job, ftp_connec=None, sftp=None, already=None):
1120+ '''
1121+ Get the oldest file in local or on FTP server
1122+ '''
1123+ if already is None:
1124+ already = []
1125+ logging.getLogger('automated.import').info(_('Getting the oldest file at location %s') % job.import_id.src_path)
1126+
1127+ res = []
1128+ if not job.import_id.ftp_source_ok:
1129+ res = all_files_under(job.import_id.src_path, job.import_id.function_id.startswith, already)
1130+ elif job.import_id.ftp_protocol == 'ftp':
1131+ files = []
1132+ ftp_connec.dir(job.import_id.src_path, files.append)
1133+ file_names = []
1134+ for file in files:
1135+ if file.startswith('d'): # directory
1136+ continue
1137+ if job.import_id.function_id.startswith and not file.split(' ')[-1].startswith(job.import_id.function_id.startswith):
1138+ continue
1139+ file_names.append( posixpath.join(job.import_id.src_path, file.split(' ')[-1]) )
1140+ for file in file_names:
1141+ if file not in already:
1142+ dt = ftp_connec.sendcmd('MDTM %s' % file).split(' ')[-1]
1143+ dt = time.strptime(dt, '%Y%m%d%H%M%S') # '20180228170748'
1144+ res.append((dt, file))
1145+
1146+ elif job.import_id.ftp_protocol == 'sftp':
1147+ with sftp.cd(job.import_id.src_path):
1148+ for fileattr in sftp.listdir_attr():
1149+ if sftp.isfile(fileattr.filename):
1150+ if job.import_id.function_id.startswith and not fileattr.filename.startswith(job.import_id.function_id.startswith):
1151+ continue
1152+ posix_name = posixpath.join(job.import_id.src_path, fileattr.filename)
1153+ if posix_name not in already:
1154+ res.append((fileattr.st_mtime, posix_name))
1155+ for x in sorted(res, key=lambda x:x[0]):
1156+ if not self.is_processing_filename(x[1]):
1157+ return x[1]
1158+ return False
1159
1160 def manual_process_import(self, cr, uid, ids, context=None):
1161 if isinstance(ids, (int, long)):
1162@@ -332,10 +359,20 @@
1163 ftp_connec = None
1164 sftp = None
1165 context.update({'no_raise_if_ok': True, 'auto_import_ok': True})
1166- if import_data.ftp_ok and import_data.ftp_protocol == 'ftp':
1167- ftp_connec = self.pool.get('automated.import').ftp_test_connection(cr, uid, import_data.id, context=context)
1168- elif import_data.ftp_ok and import_data.ftp_protocol == 'sftp':
1169- sftp = self.pool.get('automated.import').sftp_test_connection(cr, uid, import_data.id, context=context)
1170+ try:
1171+ if import_data.ftp_ok and import_data.ftp_protocol == 'ftp':
1172+ ftp_connec = self.pool.get('automated.import').ftp_test_connection(cr, uid, import_data.id, context=context)
1173+ elif import_data.ftp_ok and import_data.ftp_protocol == 'sftp':
1174+ sftp = self.pool.get('automated.import').sftp_test_connection(cr, uid, import_data.id, context=context)
1175+ except Exception, e:
1176+ if job.id:
1177+ if isinstance(e, osv.except_osv):
1178+ msg = e.value
1179+ else:
1180+ msg = e
1181+ self.write(cr, uid, job_id, {'state': 'error', 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'), 'start_time': start_time, 'comment': tools.ustr(msg)}, context=context)
1182+ cr.commit()
1183+ raise
1184
1185 try:
1186 for path in [('src_path', 'r', 'ftp_source_ok'), ('dest_path', 'w', 'ftp_dest_ok'), ('dest_path_failure', 'w', 'ftp_dest_fail_ok'), ('report_path', 'w', 'ftp_report_ok')]:
1187@@ -348,168 +385,176 @@
1188 if job.file_to_import:
1189 raise e
1190
1191- if not job.file_to_import:
1192+ try:
1193+ oldest_file = False
1194+ orig_file_name = False
1195+ if not job.file_to_import:
1196+ try:
1197+ oldest_file = self.get_oldest_filename(job, ftp_connec, sftp, already_done)
1198+ orig_file_name = oldest_file
1199+ already_done.append(oldest_file)
1200+ if not oldest_file:
1201+ raise ValueError()
1202+ filename = os.path.split(oldest_file)[1]
1203+ file_content = get_file_content(oldest_file, import_data.ftp_source_ok, ftp_connec, sftp)
1204+ md5 = hashlib.md5(file_content).hexdigest()
1205+ data64 = base64.encodestring(file_content)
1206+ except ValueError:
1207+ no_file = True
1208+ except Exception as e:
1209+ no_file = True
1210+ error = tools.ustr(traceback.format_exc())
1211+
1212+ if not error:
1213+ if no_file:
1214+ if not prev_job_id:
1215+ error = _('No file to import in %s !') % import_data.src_path
1216+ else:
1217+ # files already processed in previous loop: delete the in_progress job
1218+ self.unlink(cr, 1, [job_id], context=context)
1219+ job_id = prev_job_id
1220+ break
1221+
1222+ elif md5 and self.search_exist(cr, uid, [('import_id', '=', import_data.id), ('file_sum', '=', md5)], context=context):
1223+ error = _('A file with same checksum has been already imported !')
1224+ move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)
1225+ self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
1226+
1227+ if error:
1228+ self.infolog(cr, uid, '%s :: %s' % (import_data.name , error))
1229+ self.write(cr, uid, [job.id], {
1230+ 'filename': filename,
1231+ 'file_to_import': data64,
1232+ 'start_time': start_time,
1233+ 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
1234+ 'nb_processed_records': 0,
1235+ 'nb_rejected_records': 0,
1236+ 'comment': error,
1237+ 'file_sum': md5,
1238+ 'state': 'done' if no_file else 'error',
1239+ }, context=context)
1240+ continue
1241+ else: # file to import given
1242+ no_file = True
1243+ if job.import_id.ftp_source_ok:
1244+ raise osv.except_osv(_('Error'), _('You cannot manually select a file to import if given source path is set on FTP server'))
1245+ oldest_file = open(os.path.join(job.import_id.src_path, job.filename), 'wb+')
1246+ oldest_file.write(base64.decodestring(job.file_to_import))
1247+ oldest_file.close()
1248+ md5 = hashlib.md5(job.file_to_import).hexdigest()
1249+
1250+ if job.file_sum != md5:
1251+ if self.search_exist(cr, uid, [('file_sum', '=', md5), ('id', '!=', job.id)], context=context):
1252+ self.write(cr, uid, [job.id], {'file_sum': md5}, context=context)
1253+ return {
1254+ 'type': 'ir.actions.act_window',
1255+ 'res_model': self._name,
1256+ 'res_id': job_id,
1257+ 'view_type': 'form',
1258+ 'view_mode': 'form,tree',
1259+ 'target': 'new',
1260+ 'view_id': [data_obj.get_object_reference(cr, uid, 'msf_tools', 'automated_import_job_file_view')[1]],
1261+ 'context': context,
1262+ }
1263+
1264+ oldest_file = os.path.join(job.import_id.src_path, job.filename)
1265+ filename = job.filename
1266+ data64 = base64.encodestring(job.file_to_import)
1267+
1268+ # Process import
1269+ error_message = []
1270+ state = 'done'
1271 try:
1272- oldest_file = get_oldest_filename(job, ftp_connec, sftp, already_done)
1273- already_done.append(oldest_file)
1274- if not oldest_file:
1275- raise ValueError()
1276- filename = os.path.split(oldest_file)[1]
1277- md5 = hashlib.md5(get_file_content(oldest_file, import_data.ftp_source_ok, ftp_connec, sftp)).hexdigest()
1278- data64 = base64.encodestring(get_file_content(oldest_file, import_data.ftp_source_ok, ftp_connec, sftp))
1279- except ValueError:
1280- no_file = True
1281+ if import_data.ftp_source_ok and import_data.ftp_protocol == 'ftp':
1282+ prefix = '%s_' % filename.split('.')[0]
1283+ suffix = '.xls' if self.pool.get('stock.picking').get_import_filetype(cr, uid, filename) == 'excel' else '.xml'
1284+ temp_file = tempfile.NamedTemporaryFile(delete=False, prefix=prefix, suffix=suffix)
1285+ ftp_connec.retrbinary('RETR %s' % oldest_file, temp_file.write)
1286+ temp_file.close()
1287+ oldest_file = temp_file.name
1288+ elif import_data.ftp_source_ok and import_data.ftp_protocol == 'sftp':
1289+ tmp_dest_path = os.path.join(tempfile.gettempdir(), filename)
1290+ sftp.get(oldest_file, tmp_dest_path)
1291+ oldest_file = tmp_dest_path
1292+
1293+ processed, rejected, headers = getattr(
1294+ self.pool.get(import_data.function_id.model_id.model),
1295+ import_data.function_id.method_to_call
1296+ )(cr, uid, oldest_file, context=context)
1297+ if processed:
1298+ nb_processed += self.generate_file_report(cr, uid, job, processed, headers, ftp_connec=ftp_connec, sftp=sftp)
1299+
1300+ if rejected:
1301+ nb_rejected += self.generate_file_report(cr, uid, job, rejected, headers, rejected=True, ftp_connec=ftp_connec, sftp=sftp)
1302+ state = 'error'
1303+ for resjected_line in rejected:
1304+ line_message = ''
1305+ if resjected_line[0]:
1306+ line_message = _('Line %s: ') % resjected_line[0]
1307+ line_message += resjected_line[2]
1308+ error_message.append(line_message)
1309+
1310+ if context.get('rejected_confirmation'):
1311+ nb_rejected += context.get('rejected_confirmation')
1312+ state = 'error'
1313+
1314+ self.infolog(cr, uid, _('%s :: Import job done with %s records processed and %s rejected') % (import_data.name, len(processed), nb_rejected))
1315+
1316+ if import_data.function_id.model_id.model == 'purchase.order':
1317+ po_id = self.pool.get('purchase.order').get_po_id_from_file(cr, uid, oldest_file, context=context)
1318+ if po_id and (nb_processed or nb_rejected):
1319+ po_name = self.pool.get('purchase.order').read(cr, uid, po_id, ['name'], context=context)['name']
1320+ nb_total_pol = self.pool.get('purchase.order.line').search(cr, uid, [('order_id', '=', po_id)], count=True, context=context)
1321+ msg = _('%s: ') % po_name
1322+ if nb_processed:
1323+ msg += _('%s out of %s lines have been updated') % (nb_processed, nb_total_pol)
1324+ if nb_rejected:
1325+ msg += _(' and ')
1326+ if nb_rejected:
1327+ msg += _('%s out of %s lines have been rejected') % (nb_rejected, nb_total_pol)
1328+ if nb_processed or nb_rejected:
1329+ self.pool.get('purchase.order').log(cr, uid, po_id, msg)
1330+
1331+ if context.get('job_comment'):
1332+ for msg_dict in context['job_comment']:
1333+ self.pool.get(msg_dict['res_model']).log(cr, uid, msg_dict['res_id'], msg_dict['msg'])
1334+ error_message.append(msg_dict['msg'])
1335+
1336+ self.write(cr, uid, [job.id], {
1337+ 'filename': filename,
1338+ 'start_time': start_time,
1339+ 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
1340+ 'nb_processed_records': nb_processed,
1341+ 'nb_rejected_records': nb_rejected,
1342+ 'comment': '\n'.join(error_message),
1343+ 'file_sum': md5,
1344+ 'file_to_import': data64,
1345+ 'state': state,
1346+ }, context=context)
1347+ is_success = True if not rejected else False
1348+ move_to_process_path(import_data, ftp_connec, sftp, filename, success=is_success)
1349+ self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
1350+ cr.commit()
1351 except Exception as e:
1352- no_file = True
1353- error = tools.ustr(traceback.format_exc())
1354-
1355- if not error:
1356- if no_file:
1357- if not prev_job_id:
1358- error = _('No file to import in %s !') % import_data.src_path
1359- else:
1360- # files already processed in previous loop: delete the in_progress job
1361- self.unlink(cr, 1, [job_id], context=context)
1362- job_id = prev_job_id
1363- break
1364-
1365- elif md5 and self.search_exist(cr, uid, [('import_id', '=', import_data.id), ('file_sum', '=', md5)], context=context):
1366- error = _('A file with same checksum has been already imported !')
1367- move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)
1368- self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
1369-
1370- if error:
1371- self.infolog(cr, uid, '%s :: %s' % (import_data.name , error))
1372+ cr.rollback()
1373+ trace_b = tools.ustr(traceback.format_exc())
1374+ self.infolog(cr, uid, '%s :: %s' % (import_data.name, trace_b))
1375 self.write(cr, uid, [job.id], {
1376- 'filename': filename,
1377- 'file_to_import': data64,
1378+ 'filename': False,
1379 'start_time': start_time,
1380 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
1381 'nb_processed_records': 0,
1382 'nb_rejected_records': 0,
1383- 'comment': error,
1384+ 'comment': trace_b,
1385 'file_sum': md5,
1386- 'state': 'done' if no_file else 'error',
1387+ 'file_to_import': data64,
1388+ 'state': 'error',
1389 }, context=context)
1390- continue
1391- else: # file to import given
1392- no_file = True
1393- if job.import_id.ftp_source_ok:
1394- raise osv.except_osv(_('Error'), _('You cannot manually select a file to import if given source path is set on FTP server'))
1395- oldest_file = open(os.path.join(job.import_id.src_path, job.filename), 'wb+')
1396- oldest_file.write(base64.decodestring(job.file_to_import))
1397- oldest_file.close()
1398- md5 = hashlib.md5(job.file_to_import).hexdigest()
1399-
1400- if job.file_sum != md5:
1401- if self.search_exist(cr, uid, [('file_sum', '=', md5), ('id', '!=', job.id)], context=context):
1402- self.write(cr, uid, [job.id], {'file_sum': md5}, context=context)
1403- return {
1404- 'type': 'ir.actions.act_window',
1405- 'res_model': self._name,
1406- 'res_id': job_id,
1407- 'view_type': 'form',
1408- 'view_mode': 'form,tree',
1409- 'target': 'new',
1410- 'view_id': [data_obj.get_object_reference(cr, uid, 'msf_tools', 'automated_import_job_file_view')[1]],
1411- 'context': context,
1412- }
1413-
1414- oldest_file = os.path.join(job.import_id.src_path, job.filename)
1415- filename = job.filename
1416- data64 = base64.encodestring(job.file_to_import)
1417-
1418- # Process import
1419- error_message = []
1420- state = 'done'
1421- try:
1422- if import_data.ftp_source_ok and import_data.ftp_protocol == 'ftp':
1423- prefix = '%s_' % filename.split('.')[0]
1424- suffix = '.xls' if self.pool.get('stock.picking').get_import_filetype(cr, uid, filename) == 'excel' else '.xml'
1425- temp_file = tempfile.NamedTemporaryFile(delete=False, prefix=prefix, suffix=suffix)
1426- ftp_connec.retrbinary('RETR %s' % oldest_file, temp_file.write)
1427- temp_file.close()
1428- oldest_file = temp_file.name
1429- elif import_data.ftp_source_ok and import_data.ftp_protocol == 'sftp':
1430- tmp_dest_path = os.path.join(tempfile.gettempdir(), filename)
1431- sftp.get(oldest_file, tmp_dest_path)
1432- oldest_file = tmp_dest_path
1433-
1434- processed, rejected, headers = getattr(
1435- self.pool.get(import_data.function_id.model_id.model),
1436- import_data.function_id.method_to_call
1437- )(cr, uid, oldest_file, context=context)
1438- if processed:
1439- nb_processed += self.generate_file_report(cr, uid, job, processed, headers, ftp_connec=ftp_connec, sftp=sftp)
1440-
1441- if rejected:
1442- nb_rejected += self.generate_file_report(cr, uid, job, rejected, headers, rejected=True, ftp_connec=ftp_connec, sftp=sftp)
1443- state = 'error'
1444- for resjected_line in rejected:
1445- line_message = ''
1446- if resjected_line[0]:
1447- line_message = _('Line %s: ') % resjected_line[0]
1448- line_message += resjected_line[2]
1449- error_message.append(line_message)
1450-
1451- if context.get('rejected_confirmation'):
1452- nb_rejected += context.get('rejected_confirmation')
1453- state = 'error'
1454-
1455- self.infolog(cr, uid, _('%s :: Import job done with %s records processed and %s rejected') % (import_data.name, len(processed), nb_rejected))
1456-
1457- if import_data.function_id.model_id.model == 'purchase.order':
1458- po_id = self.pool.get('purchase.order').get_po_id_from_file(cr, uid, oldest_file, context=context)
1459- if po_id and (nb_processed or nb_rejected):
1460- po_name = self.pool.get('purchase.order').read(cr, uid, po_id, ['name'], context=context)['name']
1461- nb_total_pol = self.pool.get('purchase.order.line').search(cr, uid, [('order_id', '=', po_id)], count=True, context=context)
1462- msg = _('%s: ') % po_name
1463- if nb_processed:
1464- msg += _('%s out of %s lines have been updated') % (nb_processed, nb_total_pol)
1465- if nb_rejected:
1466- msg += _(' and ')
1467- if nb_rejected:
1468- msg += _('%s out of %s lines have been rejected') % (nb_rejected, nb_total_pol)
1469- if nb_processed or nb_rejected:
1470- self.pool.get('purchase.order').log(cr, uid, po_id, msg)
1471-
1472- if context.get('job_comment'):
1473- for msg_dict in context['job_comment']:
1474- self.pool.get(msg_dict['res_model']).log(cr, uid, msg_dict['res_id'], msg_dict['msg'])
1475- error_message.append(msg_dict['msg'])
1476-
1477- self.write(cr, uid, [job.id], {
1478- 'filename': filename,
1479- 'start_time': start_time,
1480- 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
1481- 'nb_processed_records': nb_processed,
1482- 'nb_rejected_records': nb_rejected,
1483- 'comment': '\n'.join(error_message),
1484- 'file_sum': md5,
1485- 'file_to_import': data64,
1486- 'state': state,
1487- }, context=context)
1488- is_success = True if not rejected else False
1489- move_to_process_path(import_data, ftp_connec, sftp, filename, success=is_success)
1490- self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
1491- cr.commit()
1492- except Exception as e:
1493- cr.rollback()
1494- trace_b = tools.ustr(traceback.format_exc())
1495- self.infolog(cr, uid, '%s :: %s' % (import_data.name, trace_b))
1496- self.write(cr, uid, [job.id], {
1497- 'filename': False,
1498- 'start_time': start_time,
1499- 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
1500- 'nb_processed_records': 0,
1501- 'nb_rejected_records': 0,
1502- 'comment': trace_b,
1503- 'file_sum': md5,
1504- 'file_to_import': data64,
1505- 'state': 'error',
1506- }, context=context)
1507- move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)
1508- self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
1509+ move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)
1510+ self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
1511+ finally:
1512+ if orig_file_name:
1513+ self.end_processing_filename(orig_file_name)
1514
1515 if 'row' in context:
1516 # causing LmF when running job manually
1517
1518=== modified file 'bin/addons/purchase/purchase_order_line.py'
1519--- bin/addons/purchase/purchase_order_line.py 2019-03-29 18:03:01 +0000
1520+++ bin/addons/purchase/purchase_order_line.py 2019-04-25 15:27:08 +0000
1521@@ -368,6 +368,38 @@
1522 res[pol['id']] = "%s%s"%(d_state, pol['have_analytic_distribution_from_header'] and _(" (from header)") or "")
1523 return res
1524
1525+ def get_distribution_account(self, cr, uid, product_record, nomen_record, po_type, product_cache=None, categ_cache=None, context=None):
1526+ if product_cache is None:
1527+ product_cache = {}
1528+ if categ_cache is None:
1529+ categ_cache = {}
1530+
1531+ a = False
1532+ # To my mind there is 4 cases for a PO line (because of 2 criteria that affect account: "PO is inkind or not" and "line have a product or a nomenclature"):
1533+ # - PO is an inkind donation AND PO line have a product: take donation expense account on product OR on product category, else raise an error
1534+ # - PO is NOT inkind and PO line have a product: take product expense account OR category expense account
1535+ # - PO is inkind but not PO Line product => this should not happens ! Should be raise an error but return False (if not we could'nt write a PO line)
1536+ # - other case: take expense account on family that's attached to nomenclature
1537+ if product_record and po_type =='in_kind':
1538+ a = product_record.donation_expense_account and product_record.donation_expense_account.id or False
1539+ if not a:
1540+ a = product_record.categ_id.donation_expense_account and product_record.categ_id.donation_expense_account.id or False
1541+ elif product_record:
1542+ if product_record.product_tmpl_id in product_cache:
1543+ a = product_cache[product_record.product_tmpl_id]
1544+ else:
1545+ a = product_record.product_tmpl_id.property_account_expense.id or False
1546+ product_cache[product_record.product_tmpl_id] = a
1547+ if not a:
1548+ if product_record.categ_id in categ_cache:
1549+ a = categ_cache[product_record.categ_id]
1550+ else:
1551+ a = product_record.categ_id.property_account_expense_categ.id or False
1552+ categ_cache[product_record.categ_id] = a
1553+ else:
1554+ a = nomen_record and nomen_record.category_id and nomen_record.category_id.property_account_expense_categ and nomen_record.category_id.property_account_expense_categ.id or False
1555+ return a
1556+
1557 def _get_distribution_account(self, cr, uid, ids, name, arg, context=None):
1558 """
1559 Get account for given lines regarding:
1560@@ -384,36 +416,7 @@
1561 categ_dict = {}
1562 for line in self.browse(cr, uid, ids):
1563 # Prepare some values
1564- res[line.id] = False
1565- a = False
1566- # Check if PO is inkind
1567- is_inkind = False
1568- if line.order_id and line.order_id.order_type == 'in_kind':
1569- is_inkind = True
1570- # To my mind there is 4 cases for a PO line (because of 2 criteria that affect account: "PO is inkind or not" and "line have a product or a nomenclature"):
1571- # - PO is an inkind donation AND PO line have a product: take donation expense account on product OR on product category, else raise an error
1572- # - PO is NOT inkind and PO line have a product: take product expense account OR category expense account
1573- # - PO is inkind but not PO Line product => this should not happens ! Should be raise an error but return False (if not we could'nt write a PO line)
1574- # - other case: take expense account on family that's attached to nomenclature
1575- if line.product_id and is_inkind:
1576- a = line.product_id.donation_expense_account and line.product_id.donation_expense_account.id or False
1577- if not a:
1578- a = line.product_id.categ_id.donation_expense_account and line.product_id.categ_id.donation_expense_account.id or False
1579- elif line.product_id:
1580- if line.product_id.product_tmpl_id in product_tmpl_dict:
1581- a = product_tmpl_dict[line.product_id.product_tmpl_id]
1582- else:
1583- a = line.product_id.product_tmpl_id.property_account_expense.id or False
1584- product_tmpl_dict[line.product_id.product_tmpl_id] = a
1585- if not a:
1586- if line.product_id.categ_id in categ_dict:
1587- a = categ_dict[line.product_id.categ_id]
1588- else:
1589- a = line.product_id.categ_id.property_account_expense_categ.id or False
1590- categ_dict[line.product_id.categ_id] = a
1591- else:
1592- a = line.nomen_manda_2 and line.nomen_manda_2.category_id and line.nomen_manda_2.category_id.property_account_expense_categ and line.nomen_manda_2.category_id.property_account_expense_categ.id or False
1593- res[line.id] = a
1594+ res[line.id] = self.get_distribution_account(cr, uid, line.product_id, line.nomen_manda_2, line.order_id.order_type, product_cache=product_tmpl_dict, categ_cache=categ_dict, context=None)
1595 return res
1596
1597 def _get_product_info(self, cr, uid, ids, field_name=None, arg=None, context=None):
1598
1599=== modified file 'bin/addons/purchase/purchase_view.xml'
1600--- bin/addons/purchase/purchase_view.xml 2019-02-27 14:15:35 +0000
1601+++ bin/addons/purchase/purchase_view.xml 2019-04-25 15:27:08 +0000
1602@@ -1,13 +1,13 @@
1603 <?xml version="1.0" encoding="utf-8"?>
1604 <openerp>
1605 <data>
1606- <menuitem icon="terp-purchase" id="base.menu_purchase_root" name="Purchases" sequence="3" groups="group_purchase_manager,group_purchase_user" web_icon="images/purchases.png" web_icon_hover="images/purchases-hover.png"/>
1607+ <menuitem icon="terp-purchase" id="base.menu_purchase_root" name="Purchases" sequence="3" web_icon="images/purchases.png" web_icon_hover="images/purchases-hover.png"/>
1608 <menuitem id="menu_procurement_management" name="Purchase Management" parent="base.menu_purchase_root" sequence="1"/>
1609- <menuitem id="menu_purchase_config_purchase" name="Configuration" groups="group_purchase_manager" parent="base.menu_purchase_root" sequence="100"/>
1610+ <menuitem id="menu_purchase_config_purchase" name="Configuration" parent="base.menu_purchase_root" sequence="100"/>
1611 <menuitem id="menu_purchase_config_pricelist" name="Pricelists" parent="menu_purchase_config_purchase" sequence="50"/>
1612 <menuitem action="product.product_pricelist_action" id="menu_product_pricelist_action_purhase" parent="menu_purchase_config_pricelist" sequence="20"/>
1613 <menuitem action="product.product_pricelist_action2" id="menu_product_pricelist_action2_purchase" parent="menu_purchase_config_pricelist" sequence="10"/>
1614- <menuitem action="product.product_pricelist_type_action" id="menu_purchase_product_pricelist_type" parent="menu_purchase_config_pricelist" sequence="2" groups="base.group_extended"/>
1615+ <menuitem action="product.product_pricelist_type_action" id="menu_purchase_product_pricelist_type" parent="menu_purchase_config_pricelist" sequence="2" />
1616 <menuitem id="menu_product_in_config_purchase" name="Product" parent="menu_purchase_config_purchase" sequence="30"/>
1617 <menuitem action="product.product_category_action_form" id="menu_product_category_config_purchase" parent="purchase.menu_product_in_config_purchase" sequence="10"/>
1618 <menuitem id="menu_purchase_unit_measure_purchase" name="Units of Measure" parent="purchase.menu_product_in_config_purchase" sequence="20"/>
1619@@ -45,7 +45,7 @@
1620
1621 <!--Inventory control-->
1622 <menuitem id="menu_procurement_management_inventory" name="Receive Products" parent="base.menu_purchase_root" sequence="4"/>
1623- <menuitem action="stock.action_picking_tree4" id="menu_action_picking_tree4" parent="menu_procurement_management_inventory" name="Incoming Shipments" sequence="9" groups="base.group_extended"/>
1624+ <menuitem action="stock.action_picking_tree4" id="menu_action_picking_tree4" parent="menu_procurement_management_inventory" name="Incoming Shipments" sequence="9" />
1625 <menuitem action="stock.action_reception_picking_move" id="menu_action_picking_tree_in_move" parent="menu_procurement_management_inventory" sequence="11"/>
1626
1627 <!--Invoice control-->
1628@@ -364,8 +364,12 @@
1629 </field>
1630 </page>
1631 <page string="Notes">
1632+ <group colspan="4" col="6">
1633 <field name="import_in_progress" readonly="1" invisible="0"/>
1634 <field name="auto_exported_ok" readonly="1"/>
1635+ <field name="can_be_auto_exported" invisible="1" />
1636+ <button name="auto_export_manual" string="Run PO export job" attrs="{'invisible': [('can_be_auto_exported', '!=', True)]}" type="object" icon="terp-camera_test" colspan="2"/>
1637+ </group>
1638 <field colspan="4" name="notes" nolabel="1"/>
1639 <separator colspan="4" string="Message ESC"/>
1640 <field name="message_esc" colspan="4" nolabel="1" readonly="1" attrs="{'invisible': [('partner_type', '!=', 'esc')]}"/>
1641
1642=== modified file 'bin/addons/purchase/purchase_workflow.py'
1643--- bin/addons/purchase/purchase_workflow.py 2019-03-04 11:09:31 +0000
1644+++ bin/addons/purchase/purchase_workflow.py 2019-04-25 15:27:08 +0000
1645@@ -657,10 +657,10 @@
1646 raise osv.except_osv(_('Error'), _('Line %s: Please choose a product before confirming the line') % pol.line_number)
1647
1648 if pol.order_type != 'direct' and not pol.from_synchro_return_goods:
1649- # create incoming shipment (IN):
1650+ # create incoming shipment (IN):
1651 in_id = self.pool.get('stock.picking').search(cr, uid, [
1652 ('purchase_id', '=', pol.order_id.id),
1653- ('state', 'not in', ['done', 'cancel', 'shipped']),
1654+ ('state', 'not in', ['done', 'cancel', 'shipped', 'updated', 'import']),
1655 ('type', '=', 'in'),
1656 ])
1657 created = False

Subscribers

People subscribed via source and target branches