Merge lp:~jfb-tempo-consulting/unifield-server/US-11601 into lp:unifield-server
- US-11601
- Merge into trunk
Proposed by
jftempo
Status: | Merged |
---|---|
Merged at revision: | 6535 |
Proposed branch: | lp:~jfb-tempo-consulting/unifield-server/US-11601 |
Merge into: | lp:unifield-server |
Diff against target: |
428 lines (+185/-61) 6 files modified
bin/addons/account_hq_entries/wizard/esc_lines_import.py (+19/-3) bin/addons/msf_instance/msf_instance.py (+1/-1) bin/addons/msf_tools/automated_import.py (+24/-14) bin/addons/msf_tools/automated_import_data.xml (+6/-0) bin/addons/msf_tools/automated_import_job.py (+86/-27) bin/tools/webdav.py (+49/-16) |
To merge this branch: | bzr merge lp:~jfb-tempo-consulting/unifield-server/US-11601 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+447509@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/addons/account_hq_entries/wizard/esc_lines_import.py' |
2 | --- bin/addons/account_hq_entries/wizard/esc_lines_import.py 2023-06-23 11:55:48 +0000 |
3 | +++ bin/addons/account_hq_entries/wizard/esc_lines_import.py 2023-08-04 14:37:16 +0000 |
4 | @@ -9,6 +9,7 @@ |
5 | from tools.translate import _ |
6 | import time |
7 | import base64 |
8 | +import os |
9 | import re |
10 | from psycopg2 import IntegrityError |
11 | |
12 | @@ -47,6 +48,7 @@ |
13 | |
14 | _defaults = { |
15 | 'state': 'draft', |
16 | + 'start_date': lambda *x: fields.datetime.now() |
17 | } |
18 | |
19 | def __init__(self, pool, cr): |
20 | @@ -126,7 +128,7 @@ |
21 | if not self.browse(cr, uid, ids[0], context=context).file: |
22 | raise osv.except_osv(_('Warning'), _('No file to import')) |
23 | |
24 | - threading.Thread(target=self.load_bg, args=(cr.dbname, uid, ids[0], context)).start() |
25 | + threading.Thread(target=self.load_bg, args=(cr.dbname, uid, ids[0], False, context)).start() |
26 | self.write(cr, uid, ids[0], {'state': 'inprogress', 'progress': 0}, context=context) |
27 | |
28 | view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_hq_entries', 'esc_line_import_progress_wizard')[1] |
29 | @@ -142,8 +144,21 @@ |
30 | 'context': context, |
31 | } |
32 | |
33 | - |
34 | - def load_bg(self, dbname, uid, wiz_id, context=None): |
35 | + def auto_import(self, cr, uid, file_to_import, context=None): |
36 | + |
37 | + import_id = self.create(cr, uid, { |
38 | + 'file': base64.encodestring(open(file_to_import, 'r').read()), |
39 | + 'filename': os.path.split(file_to_import)[1], |
40 | + 'state': 'inprogress', |
41 | + }) |
42 | + cr.commit() |
43 | + self.load_bg(cr.dbname, uid, import_id, auto_import=True, context=context) |
44 | + curr_state = self.browse(cr, uid, import_id, fields_to_fetch=['state'], context=context).state |
45 | + self.write(cr, uid, import_id, {'state': 'ack'}, context=context) |
46 | + return import_id, curr_state |
47 | + |
48 | + |
49 | + def load_bg(self, dbname, uid, wiz_id, auto_import=False, context=None): |
50 | def manage_error(line_index, msg, row): |
51 | errors.append(_('Line %s, %s') % (line_index, _(msg))) |
52 | line_data = [] |
53 | @@ -162,6 +177,7 @@ |
54 | |
55 | created_ids = {} |
56 | consignee_instances = {} |
57 | + |
58 | try: |
59 | cr = pooler.get_db(dbname).cursor() |
60 | cr2 = pooler.get_db(dbname).cursor() |
61 | |
62 | === modified file 'bin/addons/msf_instance/msf_instance.py' |
63 | --- bin/addons/msf_instance/msf_instance.py 2023-07-12 14:08:51 +0000 |
64 | +++ bin/addons/msf_instance/msf_instance.py 2023-08-04 14:37:16 +0000 |
65 | @@ -823,7 +823,7 @@ |
66 | temp_create = True |
67 | |
68 | if not upload_ok: |
69 | - upload_ok, error = dav.upload(temp_fileobj, temp_drive_file, buffer_size=buffer_size, log=True, progress_obj=progress_obj) |
70 | + upload_ok, error = dav.upload(temp_fileobj, temp_drive_file, buffer_size=buffer_size, log=True, progress_obj=progress_obj, continuation=True) |
71 | |
72 | # please don't change the following to else: |
73 | if upload_ok: |
74 | |
75 | === modified file 'bin/addons/msf_tools/automated_import.py' |
76 | --- bin/addons/msf_tools/automated_import.py 2022-08-19 12:59:43 +0000 |
77 | +++ bin/addons/msf_tools/automated_import.py 2023-08-04 14:37:16 +0000 |
78 | @@ -372,10 +372,13 @@ |
79 | return self.local_connection.get_file_content(path) |
80 | return self.connection.get_file_content(path) |
81 | |
82 | - def move_to_process_path(self, filename, success): |
83 | + def move_to_process_path(self, filename, success, local_src=False): |
84 | """ |
85 | Move the file `file` from `src_path` to `dest_path` |
86 | + local_src : if empty use the source_path + filename |
87 | + else local_src + filename |
88 | :return: return True |
89 | + |
90 | """ |
91 | |
92 | |
93 | @@ -388,25 +391,32 @@ |
94 | |
95 | logging.getLogger('automated.import').info(_('Moving %s to %s') % (filename, dest_path)) |
96 | |
97 | - if self.source_is_remote and dest_is_remote: |
98 | - # from remote to remote (rename) |
99 | - src_file_name = posixpath.join(self.source, filename) |
100 | - dest_file_name = posixpath.join(dest_path, '%s_%s' % (time.strftime('%Y%m%d_%H%M%S'), filename)) |
101 | - self.connection.rename(src_file_name, dest_file_name) |
102 | - elif not self.source_is_remote and dest_is_remote: |
103 | - # from local to remote |
104 | - local_file = os.path.join(self.source, filename) |
105 | - remote_file = posixpath.join(dest_path, '%s_%s' % (time.strftime('%Y%m%d_%H%M%S'), filename)) |
106 | - self.connection.push(local_file, remote_file) |
107 | - os.remove(local_file) |
108 | - elif self.source_is_remote and not dest_is_remote: |
109 | + if dest_is_remote: |
110 | + if self.source_is_remote and not local_src: |
111 | + # from remote to remote (rename) |
112 | + src_file_name = posixpath.join(self.source, filename) |
113 | + dest_file_name = posixpath.join(dest_path, '%s_%s' % (time.strftime('%Y%m%d_%H%M%S'), filename)) |
114 | + self.connection.rename(src_file_name, dest_file_name) |
115 | + else: |
116 | + # from local to remote |
117 | + if local_src: |
118 | + local_file = os.path.join(local_src, filename) |
119 | + else: |
120 | + local_file = os.path.join(self.source, filename) |
121 | + remote_file = posixpath.join(dest_path, '%s_%s' % (time.strftime('%Y%m%d_%H%M%S'), filename)) |
122 | + self.connection.push(local_file, remote_file) |
123 | + os.remove(local_file) |
124 | + elif self.source_is_remote and not local_src: |
125 | # from remote to local |
126 | src_file_name = posixpath.join(self.source, filename) |
127 | destfile = os.path.join(dest_path, '%s_%s' % (time.strftime('%Y%m%d_%H%M%S'), self.connection.remove_special_chars(filename))) |
128 | self.connection.get(src_file_name, destfile, delete=True) |
129 | else: |
130 | # from local to local |
131 | - src_file_name = os.path.join(self.source, filename) |
132 | + if local_src: |
133 | + src_file_name = os.path.join(local_src, filename) |
134 | + else: |
135 | + src_file_name = os.path.join(self.source, filename) |
136 | destfile = os.path.join(dest_path, '%s_%s' % (time.strftime('%Y%m%d_%H%M%S'), self.connection.remove_special_chars(filename))) |
137 | shutil.move(src_file_name, destfile) |
138 | |
139 | |
140 | === modified file 'bin/addons/msf_tools/automated_import_data.xml' |
141 | --- bin/addons/msf_tools/automated_import_data.xml 2022-08-16 16:02:19 +0000 |
142 | +++ bin/addons/msf_tools/automated_import_data.xml 2023-08-04 14:37:16 +0000 |
143 | @@ -155,5 +155,11 @@ |
144 | <field name="method_to_call">import_data_from_csv</field> |
145 | </record> |
146 | |
147 | + <record id="auto_import_fnct_iil" model="automated.import.function"> |
148 | + <field name="name">Import IIL</field> |
149 | + <field name="model_id" model="ir.model" search="[('model', '=', 'esc.line.import')]" /> |
150 | + <field name="method_to_call">auto_import</field> |
151 | + </record> |
152 | + |
153 | </data> |
154 | </openerp> |
155 | |
156 | === modified file 'bin/addons/msf_tools/automated_import_job.py' |
157 | --- bin/addons/msf_tools/automated_import_job.py 2023-04-21 09:37:00 +0000 |
158 | +++ bin/addons/msf_tools/automated_import_job.py 2023-08-04 14:37:16 +0000 |
159 | @@ -38,6 +38,7 @@ |
160 | from mission_stock.mission_stock import UnicodeWriter |
161 | |
162 | from threading import RLock |
163 | +from service.web_services import report_spool |
164 | |
165 | |
166 | class automated_import_job(osv.osv): |
167 | @@ -347,35 +348,93 @@ |
168 | remote.connection.get(oldest_file, tmp_dest_file) |
169 | oldest_file = tmp_dest_file |
170 | |
171 | - processed, rejected, headers = getattr( |
172 | + import_results = getattr( |
173 | self.pool.get(import_data.function_id.model_id.model), |
174 | import_data.function_id.method_to_call |
175 | )(cr, uid, oldest_file, context=context) |
176 | - if processed: |
177 | - nb_processed += self.generate_file_report(cr, uid, job, processed, headers, remote=remote) |
178 | - |
179 | - if rejected: |
180 | - nb_rejected = nb_processed # US-7624 If one row is not correct, all processed rows are rejected |
181 | - self.generate_file_report(cr, uid, job, rejected, headers, remote=remote, rejected=True) |
182 | - state = 'error' |
183 | - for resjected_line in rejected: |
184 | - line_message = '' |
185 | - if resjected_line[0]: |
186 | - line_message = _('Line %s: ') % resjected_line[0] |
187 | - line_message += resjected_line[2] |
188 | - error_message.append(line_message) |
189 | - |
190 | - if import_data.function_id.method_to_call == 'auto_import_destination': |
191 | - error_message.append(_("no data will be imported until all the error messages are corrected")) |
192 | - tools.cache.clean_caches_for_db(cr.dbname) |
193 | - tools.read_cache.clean_caches_for_db(cr.dbname) |
194 | - |
195 | - |
196 | - if context.get('rejected_confirmation'): |
197 | - nb_rejected += context.get('rejected_confirmation') |
198 | - state = 'error' |
199 | - |
200 | - self.infolog(cr, uid, _('%s :: Import job done with %s records processed and %s rejected') % (import_data.name, len(processed), nb_rejected)) |
201 | + |
202 | + if import_data.function_id.model_id.model == 'esc.line.import': |
203 | + import_wiz_id, import_state = import_results |
204 | + import_wiz_obj = self.pool.get('esc.line.import').browse(cr, uid, import_wiz_id, context=context) |
205 | + nb_processed = import_wiz_obj.created |
206 | + nb_rejected = import_wiz_obj.nberrors |
207 | + is_success = bool(nb_processed) # if at least one IIL created |
208 | + import_error = '' |
209 | + if nb_rejected: |
210 | + import_error = import_wiz_obj.error |
211 | + |
212 | + # Generate Report |
213 | + r_filename = '%s_iil_%s_%s.txt' % ( |
214 | + time.strftime('%Y%m%d_%H%M%S'), |
215 | + import_wiz_id, |
216 | + nb_rejected and 'with_rejected_lines' or 'ok' |
217 | + ) |
218 | + |
219 | + report_file_name = remote.get_report_file_name(r_filename) |
220 | + fp_file = open(report_file_name, 'wb') |
221 | + fp_file.write("File: %s\r\nStart: %s\r\nEnd: %s\r\nState: %s\r\nFile lines processed: %s\r\nIIL Created: %s\r\nLines rejected: %s\r\n\r\n%s" % ( |
222 | + import_wiz_obj.filename, |
223 | + import_wiz_obj.start_date, |
224 | + import_wiz_obj.end_date, |
225 | + import_state, |
226 | + import_wiz_obj.total, |
227 | + import_wiz_obj.created, |
228 | + import_wiz_obj.nberrors, |
229 | + import_error |
230 | + )) |
231 | + fp_file.close() |
232 | + remote.push_report(report_file_name, r_filename) |
233 | + |
234 | + if nb_rejected: |
235 | + # export datas : |
236 | + report_name = "esc_line_import_rejected" |
237 | + rp_spool = report_spool() |
238 | + res_export = rp_spool.exp_report(cr.dbname, uid, report_name, [import_wiz_id], {}, context) |
239 | + file_res = {'state': False} |
240 | + while not file_res.get('state'): |
241 | + file_res = rp_spool.exp_report_get(cr.dbname, uid, res_export) |
242 | + time.sleep(0.5) |
243 | + |
244 | + if file_res.get('result'): |
245 | + tmp_dir = tempfile.mkdtemp() |
246 | + rejected_fn = 'rejected_lines_%s' % filename |
247 | + rejected_full_filename = os.path.join(tmp_dir, rejected_fn) |
248 | + fp_rejected = open(rejected_full_filename, 'wb') |
249 | + fp_rejected.write(base64.decodestring(file_res.get('result'))) |
250 | + fp_rejected.close() |
251 | + remote.move_to_process_path(rejected_fn, success=False, local_src=tmp_dir) |
252 | + |
253 | + else: |
254 | + processed, rejected, headers = import_results |
255 | + |
256 | + is_success = True |
257 | + if processed: |
258 | + nb_processed += self.generate_file_report(cr, uid, job, processed, headers, remote=remote) |
259 | + |
260 | + if rejected: |
261 | + is_success = False |
262 | + nb_rejected = self.generate_file_report(cr, uid, job, rejected, headers, remote=remote, rejected=True) |
263 | + if import_data.function_id.model_id.model == 'hr.employee': |
264 | + nb_rejected = nb_processed # US-7624 If one row is not correct, all processed rows are rejected |
265 | + state = 'error' |
266 | + for resjected_line in rejected: |
267 | + line_message = '' |
268 | + if resjected_line[0]: |
269 | + line_message = _('Line %s: ') % resjected_line[0] |
270 | + line_message += resjected_line[2] |
271 | + error_message.append(line_message) |
272 | + |
273 | + if import_data.function_id.method_to_call == 'auto_import_destination': |
274 | + error_message.append(_("no data will be imported until all the error messages are corrected")) |
275 | + tools.cache.clean_caches_for_db(cr.dbname) |
276 | + tools.read_cache.clean_caches_for_db(cr.dbname) |
277 | + |
278 | + |
279 | + if context.get('rejected_confirmation'): |
280 | + nb_rejected += context.get('rejected_confirmation') |
281 | + state = 'error' |
282 | + |
283 | + self.infolog(cr, uid, _('%s :: Import job done with %s records processed and %s rejected') % (import_data.name, nb_processed, nb_rejected)) |
284 | |
285 | if import_data.function_id.model_id.model == 'purchase.order': |
286 | po_id = context.get('po_id', False) or self.pool.get('purchase.order').get_po_id_from_file(cr, uid, oldest_file, context=context) or False |
287 | @@ -407,7 +466,7 @@ |
288 | 'file_to_import': data64, |
289 | 'state': state, |
290 | }, context=context) |
291 | - is_success = True if not rejected else False |
292 | + |
293 | remote.move_to_process_path(filename, success=is_success) |
294 | self.infolog(cr, uid, _('%s :: Import file (%s) moved to destination path') % (import_data.name, filename)) |
295 | cr.commit() |
296 | |
297 | === modified file 'bin/tools/webdav.py' |
298 | --- bin/tools/webdav.py 2022-08-18 09:24:04 +0000 |
299 | +++ bin/tools/webdav.py 2023-08-04 14:37:16 +0000 |
300 | @@ -13,6 +13,7 @@ |
301 | import os |
302 | import posixpath |
303 | import time |
304 | +import urlparse |
305 | |
306 | class ConnectionFailed(Exception): |
307 | pass |
308 | @@ -33,28 +34,50 @@ |
309 | if not port: |
310 | port = 443 if protocol == 'https' else 80 |
311 | self.path = path or '' |
312 | - if not self.path.endswith('/'): |
313 | - self.path = '%s/' % self.path |
314 | - |
315 | - # oneDrive: need to split /site/ and path |
316 | - # in our config site is /personal/unifield_xxx_yyy/ |
317 | - # path is /Documents/Unifield/ |
318 | - self.baseurl = '{0}://{1}:{2}/{3}/'.format(protocol, host, port, '/'.join(self.path.split('/')[0:3]) ) |
319 | - |
320 | - if len(self.path.split('/')) < 5: |
321 | - self.path = '%sDocuments/' % self.path |
322 | + |
323 | + self.url = '{0}://{1}:{2}'.format(protocol, host, port) |
324 | |
325 | self.login() |
326 | |
327 | def login(self): |
328 | - ctx_auth = AuthenticationContext(self.baseurl) |
329 | + ctx_auth = AuthenticationContext(self.url) |
330 | |
331 | if ctx_auth.acquire_token_for_user(self.username, cgi.escape(self.password)): |
332 | self.request = ClientRequest(ctx_auth) |
333 | - self.request.context = ClientContext(self.baseurl, ctx_auth) |
334 | - |
335 | + self.request.context = ClientContext(self.url, ctx_auth) |
336 | if not ctx_auth.provider.FedAuth or not ctx_auth.provider.rtFa: |
337 | raise ConnectionFailed(ctx_auth.get_last_error()) |
338 | + |
339 | + |
340 | + # get the server_site url |
341 | + if not self.path.startswith('/'): |
342 | + self.path = '/%s' % self.path |
343 | + options = RequestOptions(self.url) |
344 | + options.method = HttpMethod.Get |
345 | + options.set_header("X-HTTP-Method", "GET") |
346 | + options.set_header('accept', 'application/json;odata=verbose') |
347 | + self.request.context.authenticate_request(options) |
348 | + self.request.context.ensure_form_digest(options) |
349 | + |
350 | + result = requests.post(url="%s/%s/_api/contextinfo" % (self.url, self.path), headers=options.headers, auth=options.auth) |
351 | + if result.status_code not in (200, 201): |
352 | + raise requests.exceptions.RequestException("Path %s not found" % self.path) |
353 | + js = result.json() |
354 | + baseurl = js.get('d', {}).get('GetContextWebInformation', {}).get('WebFullUrl') |
355 | + if not baseurl.endswith('/'): |
356 | + baseurl = '%s/' % baseurl |
357 | + |
358 | + if not baseurl: |
359 | + raise requests.exceptions.RequestException("Full Url not found %s" % self.path) |
360 | + parsed_base = urlparse.urlparse(baseurl).path |
361 | + self.baseurl = '%s%s' % (self.url, parsed_base) |
362 | + if not self.path.startswith('/'): |
363 | + self.path = '/%s' % self.path |
364 | + if not self.path.endswith('/') and len(self.path) > 1: |
365 | + self.path = '%s/' % (self.path, ) |
366 | + |
367 | + # set auth ctx with the full correct url or move fails |
368 | + self.request.context = ClientContext(self.baseurl, ctx_auth) |
369 | else: |
370 | raise requests.exceptions.RequestException(ctx_auth.get_last_error()) |
371 | |
372 | @@ -113,17 +136,22 @@ |
373 | raise Exception(self.parse_error(result)) |
374 | return True |
375 | |
376 | - def move(self, remote_path, dest): |
377 | + def move(self, remote_path, dest, retry=True): |
378 | webUri = '%s%s' % (self.path, remote_path) |
379 | destUri = '%s%s' % (self.path, dest) |
380 | # falgs=1 to overwrite existing file |
381 | request_url = "%s_api/web/getfilebyserverrelativeurl('%s')/moveto(newurl='%s',flags=1)" % (self.baseurl, webUri, destUri) |
382 | result = self.format_request(request_url, 'POST') |
383 | if result.status_code not in (200, 201): |
384 | + error = self.parse_error(result) |
385 | + if retry and ('timed out' in error or '2130575252' in error): |
386 | + logging.getLogger('cloud.backup').info('OneDrive move: session time out') |
387 | + self.login() |
388 | + return self.move(remote_path, dest, retry=False) |
389 | raise Exception(self.parse_error(result)) |
390 | return True |
391 | |
392 | - def upload(self, fileobj, remote_path, buffer_size=None, log=False, progress_obj=False): |
393 | + def upload(self, fileobj, remote_path, buffer_size=None, log=False, progress_obj=False, continuation=False): |
394 | if not self.session_uuid: |
395 | self.session_uuid = uuid.uuid1() |
396 | |
397 | @@ -137,6 +165,9 @@ |
398 | except: |
399 | size = None |
400 | |
401 | + if not continuation: |
402 | + self.session_offset = -1 |
403 | + |
404 | if self.session_offset != -1: |
405 | fileobj.seek(self.session_offset) |
406 | |
407 | @@ -190,6 +221,7 @@ |
408 | progress_obj.write({'name': percent}) |
409 | |
410 | logger.info('OneDrive: %d bytes sent on %s bytes %s' % (self.session_offset, size or 'unknown', percent_txt)) |
411 | + self.session_offset = -1 |
412 | return (True, '') |
413 | |
414 | def list(self, remote_path): |
415 | @@ -203,13 +235,14 @@ |
416 | self.request.context.authenticate_request(options) |
417 | self.request.context.ensure_form_digest(options) |
418 | result = requests.get(url=request_url, headers=options.headers, auth=options.auth) |
419 | + if result.status_code not in (200, 201): |
420 | + raise requests.exceptions.RequestException(self.parse_error(result)) |
421 | |
422 | result = result.json() |
423 | files=[] |
424 | for i in range(len(result['d']['results'])): |
425 | item = result['d']['results'][i] |
426 | files.append(item) |
427 | - |
428 | return files |
429 | |
430 |