Merge lp:~dorian-kemps/unifield-server/US-6448 into lp:unifield-server

Proposed by jftempo
Status: Needs review
Proposed branch: lp:~dorian-kemps/unifield-server/US-6448
Merge into: lp:unifield-server
Diff against target: 280 lines (+58/-56)
2 files modified
bin/addons/msf_tools/automated_import.py (+0/-1)
bin/addons/msf_tools/automated_import_job.py (+58/-55)
To merge this branch: bzr merge lp:~dorian-kemps/unifield-server/US-6448
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+388477@code.launchpad.net
To post a comment you must log in.

Unmerged revisions

5562. By Dorian

US-6448 [FIX] Automated import: fixed the manual job

5561. By Dorian

US-6448 [MERGE] Merge with trunk

5560. By Dorian

US-6448 [PROGRESS] Auto import: manual import is done in background

5559. By Dorian

US-6448 [PROGRESS]

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/addons/msf_tools/automated_import.py'
2--- bin/addons/msf_tools/automated_import.py 2019-08-02 14:52:35 +0000
3+++ bin/addons/msf_tools/automated_import.py 2020-07-31 12:01:18 +0000
4@@ -357,7 +357,6 @@
5
6 return res
7
8-
9 def _generate_ir_cron(self, import_brw):
10 """
11 Returns the values for the ir.cron to create according to automated.import values
12
13=== modified file 'bin/addons/msf_tools/automated_import_job.py'
14--- bin/addons/msf_tools/automated_import_job.py 2019-11-13 16:23:09 +0000
15+++ bin/addons/msf_tools/automated_import_job.py 2020-07-31 12:01:18 +0000
16@@ -30,6 +30,8 @@
17 import logging
18 import posixpath
19 import traceback
20+import threading
21+import pooler
22
23 from osv import osv
24 from osv import fields
25@@ -311,8 +313,22 @@
26 def manual_process_import(self, cr, uid, ids, context=None):
27 if isinstance(ids, (int, long)):
28 ids = [ids]
29+ data_obj = self.pool.get('ir.model.data')
30+
31 wiz = self.read(cr, uid, ids[0], ['import_id'], context)
32- return self.process_import(cr, uid, wiz['import_id'][0], started_job_id=ids[0], context=None)
33+
34+ # Background import
35+ thread = threading.Thread(target=self.process_import, args=(cr, uid, wiz['import_id'][0], ids[0], None))
36+ thread.start()
37+
38+ return {
39+ 'type': 'ir.actions.act_window',
40+ 'res_model': self._name,
41+ 'res_id': ids[0],
42+ 'view_type': 'form',
43+ 'view_mode': 'form,tree',
44+ 'context': context,
45+ }
46
47 def process_import(self, cr, uid, import_id, started_job_id=False, context=None):
48 """
49@@ -326,7 +342,6 @@
50 :return: True
51 """
52 import_obj = self.pool.get('automated.import')
53- data_obj = self.pool.get('ir.model.data')
54
55 if context is None:
56 context = {}
57@@ -334,7 +349,9 @@
58 if isinstance(import_id, (int, long)):
59 import_id = [import_id]
60
61- import_data = import_obj.browse(cr, uid, import_id[0], context=context)
62+ new_cr = pooler.get_db(cr.dbname).cursor()
63+
64+ import_data = import_obj.browse(new_cr, uid, import_id[0], context=context)
65 no_file = False
66 already_done = []
67 job_id = False
68@@ -344,12 +361,14 @@
69 nb_processed = 0
70 if started_job_id:
71 job_id = started_job_id
72+ self.write(new_cr, uid, job_id, {'state': 'in_progress'}, context=context)
73 prev_job_id = False
74 else:
75 prev_job_id = job_id
76- job_id = self.create(cr, uid, {'import_id': import_data.id, 'state': 'in_progress'}, context=context)
77- cr.commit() # keep trace of the job in case of error
78- job = self.browse(cr, uid, job_id, context=context)
79+ job_id = self.create(new_cr, uid, {'import_id': import_data.id, 'state': 'in_progress'}, context=context)
80+ new_cr.commit() # keep trace of the job in case of error
81+
82+ job = self.browse(new_cr, uid, job_id, context=context)
83 started_job_id = False
84 md5 = False
85 error = None
86@@ -361,17 +380,18 @@
87 context.update({'no_raise_if_ok': True, 'auto_import_ok': True})
88 try:
89 if import_data.ftp_ok and import_data.ftp_protocol == 'ftp':
90- ftp_connec = self.pool.get('automated.import').ftp_test_connection(cr, uid, import_data.id, context=context)
91+ ftp_connec = self.pool.get('automated.import').ftp_test_connection(new_cr, uid, import_data.id, context=context)
92 elif import_data.ftp_ok and import_data.ftp_protocol == 'sftp':
93- sftp = self.pool.get('automated.import').sftp_test_connection(cr, uid, import_data.id, context=context)
94+ sftp = self.pool.get('automated.import').sftp_test_connection(new_cr, uid, import_data.id, context=context)
95 except Exception, e:
96 if job.id:
97 if isinstance(e, osv.except_osv):
98 msg = e.value
99 else:
100 msg = e
101- self.write(cr, uid, job_id, {'state': 'error', 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'), 'start_time': start_time, 'comment': tools.ustr(msg)}, context=context)
102- cr.commit()
103+ self.write(new_cr, uid, job_id, {'state': 'error', 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'), 'start_time': start_time, 'comment': tools.ustr(msg)}, context=context)
104+ new_cr.commit()
105+ new_cr.close(True)
106 raise
107
108 try:
109@@ -381,6 +401,7 @@
110 except osv.except_osv as e:
111 error = tools.ustr(e)
112 no_file = True
113+ new_cr.close(True)
114 # In case of manual processing, raise the error
115 if job.file_to_import:
116 raise e
117@@ -411,18 +432,18 @@
118 error = _('No file to import in %s !') % import_data.src_path
119 else:
120 # files already processed in previous loop: delete the in_progress job
121- self.unlink(cr, 1, [job_id], context=context)
122+ self.unlink(new_cr, 1, [job_id], context=context)
123 job_id = prev_job_id
124 break
125
126- elif md5 and self.search_exist(cr, uid, [('import_id', '=', import_data.id), ('file_sum', '=', md5)], context=context):
127+ elif md5 and self.search_exist(new_cr, uid, [('import_id', '=', import_data.id), ('file_sum', '=', md5)], context=context):
128 error = _('A file with same checksum has been already imported !')
129 move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)
130- self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
131+ self.infolog(new_cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
132
133 if error:
134- self.infolog(cr, uid, '%s :: %s' % (import_data.name , error))
135- self.write(cr, uid, [job.id], {
136+ self.infolog(new_cr, uid, '%s :: %s' % (import_data.name , error))
137+ self.write(new_cr, uid, [job.id], {
138 'filename': filename,
139 'file_to_import': data64,
140 'start_time': start_time,
141@@ -434,7 +455,7 @@
142 'state': 'done' if no_file else 'error',
143 }, context=context)
144 continue
145- else: # file to import given
146+ else: # file to import given
147 no_file = True
148 if job.import_id.ftp_source_ok:
149 raise osv.except_osv(_('Error'), _('You cannot manually select a file to import if given source path is set on FTP server'))
150@@ -444,18 +465,8 @@
151 md5 = hashlib.md5(job.file_to_import).hexdigest()
152
153 if job.file_sum != md5:
154- if self.search_exist(cr, uid, [('file_sum', '=', md5), ('id', '!=', job.id)], context=context):
155- self.write(cr, uid, [job.id], {'file_sum': md5}, context=context)
156- return {
157- 'type': 'ir.actions.act_window',
158- 'res_model': self._name,
159- 'res_id': job_id,
160- 'view_type': 'form',
161- 'view_mode': 'form,tree',
162- 'target': 'new',
163- 'view_id': [data_obj.get_object_reference(cr, uid, 'msf_tools', 'automated_import_job_file_view')[1]],
164- 'context': context,
165- }
166+ if self.search_exist(new_cr, uid, [('file_sum', '=', md5), ('id', '!=', job.id)], context=context):
167+ self.write(new_cr, uid, [job.id], {'file_sum': md5}, context=context)
168
169 oldest_file = os.path.join(job.import_id.src_path, job.filename)
170 filename = job.filename
171@@ -467,7 +478,7 @@
172 try:
173 if import_data.ftp_source_ok and import_data.ftp_protocol == 'ftp':
174 prefix = '%s_' % filename.split('.')[0]
175- suffix = '.xls' if self.pool.get('stock.picking').get_import_filetype(cr, uid, filename) == 'excel' else '.xml'
176+ suffix = '.xls' if self.pool.get('stock.picking').get_import_filetype(new_cr, uid, filename) == 'excel' else '.xml'
177 temp_file = tempfile.NamedTemporaryFile(delete=False, prefix=prefix, suffix=suffix)
178 ftp_connec.retrbinary('RETR %s' % oldest_file, temp_file.write)
179 temp_file.close()
180@@ -480,12 +491,12 @@
181 processed, rejected, headers = getattr(
182 self.pool.get(import_data.function_id.model_id.model),
183 import_data.function_id.method_to_call
184- )(cr, uid, oldest_file, context=context)
185+ )(new_cr, uid, oldest_file, context=context)
186 if processed:
187- nb_processed += self.generate_file_report(cr, uid, job, processed, headers, ftp_connec=ftp_connec, sftp=sftp)
188+ nb_processed += self.generate_file_report(new_cr, uid, job, processed, headers, ftp_connec=ftp_connec, sftp=sftp)
189
190 if rejected:
191- nb_rejected += self.generate_file_report(cr, uid, job, rejected, headers, rejected=True, ftp_connec=ftp_connec, sftp=sftp)
192+ nb_rejected += self.generate_file_report(new_cr, uid, job, rejected, headers, rejected=True, ftp_connec=ftp_connec, sftp=sftp)
193 state = 'error'
194 for resjected_line in rejected:
195 line_message = ''
196@@ -498,13 +509,13 @@
197 nb_rejected += context.get('rejected_confirmation')
198 state = 'error'
199
200- self.infolog(cr, uid, _('%s :: Import job done with %s records processed and %s rejected') % (import_data.name, len(processed), nb_rejected))
201+ self.infolog(new_cr, uid, _('%s :: Import job done with %s records processed and %s rejected') % (import_data.name, len(processed), nb_rejected))
202
203 if import_data.function_id.model_id.model == 'purchase.order':
204 po_id = context.get('po_id', False) or self.pool.get('purchase.order').get_po_id_from_file(cr, uid, oldest_file, context=context) or False
205 if po_id and (nb_processed or nb_rejected):
206- po_name = self.pool.get('purchase.order').read(cr, uid, po_id, ['name'], context=context)['name']
207- nb_total_pol = self.pool.get('purchase.order.line').search(cr, uid, [('order_id', '=', po_id)], count=True, context=context)
208+ po_name = self.pool.get('purchase.order').read(new_cr, uid, po_id, ['name'], context=context)['name']
209+ nb_total_pol = self.pool.get('purchase.order.line').search(new_cr, uid, [('order_id', '=', po_id)], count=True, context=context)
210 msg = _('%s: ') % po_name
211 if nb_processed:
212 msg += _('%s out of %s lines have been updated') % (nb_processed, nb_total_pol)
213@@ -513,14 +524,14 @@
214 if nb_rejected:
215 msg += _('%s out of %s lines have been rejected') % (nb_rejected, nb_total_pol)
216 if nb_processed or nb_rejected:
217- self.pool.get('purchase.order').log(cr, uid, po_id, msg)
218+ self.pool.get('purchase.order').log(new_cr, uid, po_id, msg)
219
220 if context.get('job_comment'):
221 for msg_dict in context['job_comment']:
222- self.pool.get(msg_dict['res_model']).log(cr, uid, msg_dict['res_id'], msg_dict['msg'])
223+ self.pool.get(msg_dict['res_model']).log(new_cr, uid, msg_dict['res_id'], msg_dict['msg'])
224 error_message.append(msg_dict['msg'])
225
226- self.write(cr, uid, [job.id], {
227+ self.write(new_cr, uid, [job.id], {
228 'filename': filename,
229 'start_time': start_time,
230 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
231@@ -533,13 +544,13 @@
232 }, context=context)
233 is_success = True if not rejected else False
234 move_to_process_path(import_data, ftp_connec, sftp, filename, success=is_success)
235- self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
236- cr.commit()
237+ self.infolog(new_cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
238+ new_cr.commit()
239 except Exception as e:
240- cr.rollback()
241+ new_cr.rollback()
242 trace_b = tools.ustr(traceback.format_exc())
243- self.infolog(cr, uid, '%s :: %s' % (import_data.name, trace_b))
244- self.write(cr, uid, [job.id], {
245+ self.infolog(new_cr, uid, '%s :: %s' % (import_data.name, trace_b))
246+ self.write(new_cr, uid, [job.id], {
247 'filename': False,
248 'start_time': start_time,
249 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'),
250@@ -551,26 +562,18 @@
251 'state': 'error',
252 }, context=context)
253 move_to_process_path(import_data, ftp_connec, sftp, filename, success=False)
254- self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
255+ self.infolog(new_cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename))
256 finally:
257 if orig_file_name:
258 self.end_processing_filename(orig_file_name)
259
260+ new_cr.commit()
261+ new_cr.close(True)
262+
263 if 'row' in context:
264 # causing LmF when running job manually
265 context.pop('row')
266
267- return {
268- 'type': 'ir.actions.act_window',
269- 'res_model': self._name,
270- 'res_id': job_id,
271- 'view_type': 'form',
272- 'view_mode': 'form,tree',
273- 'target': 'current',
274- 'context': context,
275- }
276-
277-
278 def generate_file_report(self, cr, uid, job_brw, data_lines, headers, rejected=False, ftp_connec=None, sftp=None):
279 """
280 Create a csv file that contains the processed lines and put this csv file

Subscribers

People subscribed via source and target branches