Merge lp:~dorian-kemps/unifield-server/US-6448 into lp:unifield-server

Proposed by jftempo
Status: Needs review
Proposed branch: lp:~dorian-kemps/unifield-server/US-6448
Merge into: lp:unifield-server
Diff against target: 280 lines (+58/-56)
2 files modified
bin/addons/msf_tools/automated_import.py (+0/-1)
bin/addons/msf_tools/automated_import_job.py (+58/-55)
To merge this branch: bzr merge lp:~dorian-kemps/unifield-server/US-6448
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+388477@code.launchpad.net
To post a comment you must log in.

Unmerged revisions

5562. By Dorian

US-6448 [FIX] Automated import: fixed the manual job

5561. By Dorian

US-6448 [MERGE] Merge with trunk

5560. By Dorian

US-6448 [PROGRESS] Auto import: manual import is done in background

5559. By Dorian

US-6448 [PROGRESS]

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'bin/addons/msf_tools/automated_import.py'
--- bin/addons/msf_tools/automated_import.py 2019-08-02 14:52:35 +0000
+++ bin/addons/msf_tools/automated_import.py 2020-07-31 12:01:18 +0000
@@ -357,7 +357,6 @@
357357
358 return res358 return res
359359
360
361 def _generate_ir_cron(self, import_brw):360 def _generate_ir_cron(self, import_brw):
362 """361 """
363 Returns the values for the ir.cron to create according to automated.import values362 Returns the values for the ir.cron to create according to automated.import values
364363
=== modified file 'bin/addons/msf_tools/automated_import_job.py'
--- bin/addons/msf_tools/automated_import_job.py 2019-11-13 16:23:09 +0000
+++ bin/addons/msf_tools/automated_import_job.py 2020-07-31 12:01:18 +0000
@@ -30,6 +30,8 @@
30import logging30import logging
31import posixpath31import posixpath
32import traceback32import traceback
33import threading
34import pooler
3335
34from osv import osv36from osv import osv
35from osv import fields37from osv import fields
@@ -311,8 +313,22 @@
311 def manual_process_import(self, cr, uid, ids, context=None):313 def manual_process_import(self, cr, uid, ids, context=None):
312 if isinstance(ids, (int, long)):314 if isinstance(ids, (int, long)):
313 ids = [ids]315 ids = [ids]
316 data_obj = self.pool.get('ir.model.data')
317
314 wiz = self.read(cr, uid, ids[0], ['import_id'], context)318 wiz = self.read(cr, uid, ids[0], ['import_id'], context)
315 return self.process_import(cr, uid, wiz['import_id'][0], started_job_id=ids[0], context=None)319
320 # Background import
321 thread = threading.Thread(target=self.process_import, args=(cr, uid, wiz['import_id'][0], ids[0], None))
322 thread.start()
323
324 return {
325 'type': 'ir.actions.act_window',
326 'res_model': self._name,
327 'res_id': ids[0],
328 'view_type': 'form',
329 'view_mode': 'form,tree',
330 'context': context,
331 }
316332
317 def process_import(self, cr, uid, import_id, started_job_id=False, context=None):333 def process_import(self, cr, uid, import_id, started_job_id=False, context=None):
318 """334 """
@@ -326,7 +342,6 @@
326 :return: True342 :return: True
327 """343 """
328 import_obj = self.pool.get('automated.import')344 import_obj = self.pool.get('automated.import')
329 data_obj = self.pool.get('ir.model.data')
330345
331 if context is None:346 if context is None:
332 context = {}347 context = {}
@@ -334,7 +349,9 @@
334 if isinstance(import_id, (int, long)):349 if isinstance(import_id, (int, long)):
335 import_id = [import_id]350 import_id = [import_id]
336351
337 import_data = import_obj.browse(cr, uid, import_id[0], context=context)352 new_cr = pooler.get_db(cr.dbname).cursor()
353
354 import_data = import_obj.browse(new_cr, uid, import_id[0], context=context)
338 no_file = False355 no_file = False
339 already_done = []356 already_done = []
340 job_id = False357 job_id = False
@@ -344,12 +361,14 @@
344 nb_processed = 0361 nb_processed = 0
345 if started_job_id:362 if started_job_id:
346 job_id = started_job_id363 job_id = started_job_id
364 self.write(new_cr, uid, job_id, {'state': 'in_progress'}, context=context)
347 prev_job_id = False365 prev_job_id = False
348 else:366 else:
349 prev_job_id = job_id367 prev_job_id = job_id
350 job_id = self.create(cr, uid, {'import_id': import_data.id, 'state': 'in_progress'}, context=context)368 job_id = self.create(new_cr, uid, {'import_id': import_data.id, 'state': 'in_progress'}, context=context)
351 cr.commit() # keep trace of the job in case of error369 new_cr.commit() # keep trace of the job in case of error
352 job = self.browse(cr, uid, job_id, context=context)370
371 job = self.browse(new_cr, uid, job_id, context=context)
353 started_job_id = False372 started_job_id = False
354 md5 = False373 md5 = False
355 error = None374 error = None
@@ -361,17 +380,18 @@
361 context.update({'no_raise_if_ok': True, 'auto_import_ok': True})380 context.update({'no_raise_if_ok': True, 'auto_import_ok': True})
362 try:381 try:
363 if import_data.ftp_ok and import_data.ftp_protocol == 'ftp':382 if import_data.ftp_ok and import_data.ftp_protocol == 'ftp':
364 ftp_connec = self.pool.get('automated.import').ftp_test_connection(cr, uid, import_data.id, context=context)383 ftp_connec = self.pool.get('automated.import').ftp_test_connection(new_cr, uid, import_data.id, context=context)
365 elif import_data.ftp_ok and import_data.ftp_protocol == 'sftp':384 elif import_data.ftp_ok and import_data.ftp_protocol == 'sftp':
366 sftp = self.pool.get('automated.import').sftp_test_connection(cr, uid, import_data.id, context=context)385 sftp = self.pool.get('automated.import').sftp_test_connection(new_cr, uid, import_data.id, context=context)
367 except Exception, e:386 except Exception, e:
368 if job.id:387 if job.id:
369 if isinstance(e, osv.except_osv):388 if isinstance(e, osv.except_osv):
370 msg = e.value389 msg = e.value
371 else:390 else:
372 msg = e391 msg = e
373 self.write(cr, uid, job_id, {'state': 'error', 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'), 'start_time': start_time, 'comment': tools.ustr(msg)}, context=context)392 self.write(new_cr, uid, job_id, {'state': 'error', 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'), 'start_time': start_time, 'comment': tools.ustr(msg)}, context=context)
374 cr.commit()393 new_cr.commit()
394 new_cr.close(True)
375 raise395 raise
376396
377 try:397 try:
@@ -381,6 +401,7 @@
381 except osv.except_osv as e:401 except osv.except_osv as e:
382 error = tools.ustr(e)402 error = tools.ustr(e)
383 no_file = True403 no_file = True
404 new_cr.close(True)
384 # In case of manual processing, raise the error405 # In case of manual processing, raise the error
385 if job.file_to_import:406 if job.file_to_import:
386 raise e407 raise e
@@ -411,18 +432,18 @@
411 error = _('No file to import in %s !') % import_data.src_path432 error = _('No file to import in %s !') % import_data.src_path
412 else:433 else:
413 # files already processed in previous loop: delete the in_progress job434 # files already processed in previous loop: delete the in_progress job
414 self.unlink(cr, 1, [job_id], context=context)435 self.unlink(new_cr, 1, [job_id], context=context)
415 job_id = prev_job_id436 job_id = prev_job_id
416 break437 break
417438
418 elif md5 and self.search_exist(cr, uid, [('import_id', '=', import_data.id), ('file_sum', '=', md5)], context=context):439 elif md5 and self.search_exist(new_cr, uid, [('import_id', '=', import_data.id), ('file_sum', '=', md5)], context=context):
419 error = _('A file with same checksum has been already imported !')440 error = _('A file with same checksum has been already imported !')
420 move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)441 move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)
421 self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))442 self.infolog(new_cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
422443
423 if error:444 if error:
424 self.infolog(cr, uid, '%s :: %s' % (import_data.name , error))445 self.infolog(new_cr, uid, '%s :: %s' % (import_data.name , error))
425 self.write(cr, uid, [job.id], {446 self.write(new_cr, uid, [job.id], {
426 'filename': filename,447 'filename': filename,
427 'file_to_import': data64,448 'file_to_import': data64,
428 'start_time': start_time,449 'start_time': start_time,
@@ -434,7 +455,7 @@
434 'state': 'done' if no_file else 'error',455 'state': 'done' if no_file else 'error',
435 }, context=context)456 }, context=context)
436 continue457 continue
437 else: # file to import given458 else: # file to import given
438 no_file = True459 no_file = True
439 if job.import_id.ftp_source_ok:460 if job.import_id.ftp_source_ok:
440 raise osv.except_osv(_('Error'), _('You cannot manually select a file to import if given source path is set on FTP server'))461 raise osv.except_osv(_('Error'), _('You cannot manually select a file to import if given source path is set on FTP server'))
@@ -444,18 +465,8 @@
444 md5 = hashlib.md5(job.file_to_import).hexdigest()465 md5 = hashlib.md5(job.file_to_import).hexdigest()
445466
446 if job.file_sum != md5:467 if job.file_sum != md5:
447 if self.search_exist(cr, uid, [('file_sum', '=', md5), ('id', '!=', job.id)], context=context):468 if self.search_exist(new_cr, uid, [('file_sum', '=', md5), ('id', '!=', job.id)], context=context):
448 self.write(cr, uid, [job.id], {'file_sum': md5}, context=context)469 self.write(new_cr, uid, [job.id], {'file_sum': md5}, context=context)
449 return {
450 'type': 'ir.actions.act_window',
451 'res_model': self._name,
452 'res_id': job_id,
453 'view_type': 'form',
454 'view_mode': 'form,tree',
455 'target': 'new',
456 'view_id': [data_obj.get_object_reference(cr, uid, 'msf_tools', 'automated_import_job_file_view')[1]],
457 'context': context,
458 }
459470
460 oldest_file = os.path.join(job.import_id.src_path, job.filename)471 oldest_file = os.path.join(job.import_id.src_path, job.filename)
461 filename = job.filename472 filename = job.filename
@@ -467,7 +478,7 @@
467 try:478 try:
468 if import_data.ftp_source_ok and import_data.ftp_protocol == 'ftp':479 if import_data.ftp_source_ok and import_data.ftp_protocol == 'ftp':
469 prefix = '%s_' % filename.split('.')[0]480 prefix = '%s_' % filename.split('.')[0]
470 suffix = '.xls' if self.pool.get('stock.picking').get_import_filetype(cr, uid, filename) == 'excel' else '.xml'481 suffix = '.xls' if self.pool.get('stock.picking').get_import_filetype(new_cr, uid, filename) == 'excel' else '.xml'
471 temp_file = tempfile.NamedTemporaryFile(delete=False, prefix=prefix, suffix=suffix)482 temp_file = tempfile.NamedTemporaryFile(delete=False, prefix=prefix, suffix=suffix)
472 ftp_connec.retrbinary('RETR %s' % oldest_file, temp_file.write)483 ftp_connec.retrbinary('RETR %s' % oldest_file, temp_file.write)
473 temp_file.close()484 temp_file.close()
@@ -480,12 +491,12 @@
480 processed, rejected, headers = getattr(491 processed, rejected, headers = getattr(
481 self.pool.get(import_data.function_id.model_id.model),492 self.pool.get(import_data.function_id.model_id.model),
482 import_data.function_id.method_to_call493 import_data.function_id.method_to_call
483 )(cr, uid, oldest_file, context=context)494 )(new_cr, uid, oldest_file, context=context)
484 if processed:495 if processed:
485 nb_processed += self.generate_file_report(cr, uid, job, processed, headers, ftp_connec=ftp_connec, sftp=sftp)496 nb_processed += self.generate_file_report(new_cr, uid, job, processed, headers, ftp_connec=ftp_connec, sftp=sftp)
486497
487 if rejected:498 if rejected:
488 nb_rejected += self.generate_file_report(cr, uid, job, rejected, headers, rejected=True, ftp_connec=ftp_connec, sftp=sftp)499 nb_rejected += self.generate_file_report(new_cr, uid, job, rejected, headers, rejected=True, ftp_connec=ftp_connec, sftp=sftp)
489 state = 'error'500 state = 'error'
490 for resjected_line in rejected:501 for resjected_line in rejected:
491 line_message = ''502 line_message = ''
@@ -498,13 +509,13 @@
498 nb_rejected += context.get('rejected_confirmation')509 nb_rejected += context.get('rejected_confirmation')
499 state = 'error'510 state = 'error'
500511
501 self.infolog(cr, uid, _('%s :: Import job done with %s records processed and %s rejected') % (import_data.name, len(processed), nb_rejected))512 self.infolog(new_cr, uid, _('%s :: Import job done with %s records processed and %s rejected') % (import_data.name, len(processed), nb_rejected))
502513
503 if import_data.function_id.model_id.model == 'purchase.order':514 if import_data.function_id.model_id.model == 'purchase.order':
504 po_id = context.get('po_id', False) or self.pool.get('purchase.order').get_po_id_from_file(cr, uid, oldest_file, context=context) or False515 po_id = context.get('po_id', False) or self.pool.get('purchase.order').get_po_id_from_file(cr, uid, oldest_file, context=context) or False
505 if po_id and (nb_processed or nb_rejected):516 if po_id and (nb_processed or nb_rejected):
506 po_name = self.pool.get('purchase.order').read(cr, uid, po_id, ['name'], context=context)['name']517 po_name = self.pool.get('purchase.order').read(new_cr, uid, po_id, ['name'], context=context)['name']
507 nb_total_pol = self.pool.get('purchase.order.line').search(cr, uid, [('order_id', '=', po_id)], count=True, context=context)518 nb_total_pol = self.pool.get('purchase.order.line').search(new_cr, uid, [('order_id', '=', po_id)], count=True, context=context)
508 msg = _('%s: ') % po_name519 msg = _('%s: ') % po_name
509 if nb_processed:520 if nb_processed:
510 msg += _('%s out of %s lines have been updated') % (nb_processed, nb_total_pol)521 msg += _('%s out of %s lines have been updated') % (nb_processed, nb_total_pol)
@@ -513,14 +524,14 @@
513 if nb_rejected:524 if nb_rejected:
514 msg += _('%s out of %s lines have been rejected') % (nb_rejected, nb_total_pol)525 msg += _('%s out of %s lines have been rejected') % (nb_rejected, nb_total_pol)
515 if nb_processed or nb_rejected:526 if nb_processed or nb_rejected:
516 self.pool.get('purchase.order').log(cr, uid, po_id, msg)527 self.pool.get('purchase.order').log(new_cr, uid, po_id, msg)
517528
518 if context.get('job_comment'):529 if context.get('job_comment'):
519 for msg_dict in context['job_comment']:530 for msg_dict in context['job_comment']:
520 self.pool.get(msg_dict['res_model']).log(cr, uid, msg_dict['res_id'], msg_dict['msg'])531 self.pool.get(msg_dict['res_model']).log(new_cr, uid, msg_dict['res_id'], msg_dict['msg'])
521 error_message.append(msg_dict['msg'])532 error_message.append(msg_dict['msg'])
522533
523 self.write(cr, uid, [job.id], {534 self.write(new_cr, uid, [job.id], {
524 'filename': filename,535 'filename': filename,
525 'start_time': start_time,536 'start_time': start_time,
526 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),537 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
@@ -533,13 +544,13 @@
533 }, context=context)544 }, context=context)
534 is_success = True if not rejected else False545 is_success = True if not rejected else False
535 move_to_process_path(import_data, ftp_connec, sftp, filename, success=is_success)546 move_to_process_path(import_data, ftp_connec, sftp, filename, success=is_success)
536 self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))547 self.infolog(new_cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
537 cr.commit()548 new_cr.commit()
538 except Exception as e:549 except Exception as e:
539 cr.rollback()550 new_cr.rollback()
540 trace_b = tools.ustr(traceback.format_exc())551 trace_b = tools.ustr(traceback.format_exc())
541 self.infolog(cr, uid, '%s :: %s' % (import_data.name, trace_b))552 self.infolog(new_cr, uid, '%s :: %s' % (import_data.name, trace_b))
542 self.write(cr, uid, [job.id], {553 self.write(new_cr, uid, [job.id], {
543 'filename': False,554 'filename': False,
544 'start_time': start_time,555 'start_time': start_time,
545 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),556 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
@@ -551,26 +562,18 @@
551 'state': 'error',562 'state': 'error',
552 }, context=context)563 }, context=context)
553 move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)564 move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)
554 self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))565 self.infolog(new_cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
555 finally:566 finally:
556 if orig_file_name:567 if orig_file_name:
557 self.end_processing_filename(orig_file_name)568 self.end_processing_filename(orig_file_name)
558569
570 new_cr.commit()
571 new_cr.close(True)
572
559 if 'row' in context:573 if 'row' in context:
560 # causing LmF when running job manually574 # causing LmF when running job manually
561 context.pop('row')575 context.pop('row')
562576
563 return {
564 'type': 'ir.actions.act_window',
565 'res_model': self._name,
566 'res_id': job_id,
567 'view_type': 'form',
568 'view_mode': 'form,tree',
569 'target': 'current',
570 'context': context,
571 }
572
573
574 def generate_file_report(self, cr, uid, job_brw, data_lines, headers, rejected=False, ftp_connec=None, sftp=None):577 def generate_file_report(self, cr, uid, job_brw, data_lines, headers, rejected=False, ftp_connec=None, sftp=None):
575 """578 """
576 Create a csv file that contains the processed lines and put this csv file579 Create a csv file that contains the processed lines and put this csv file

Subscribers

People subscribed via source and target branches