Merge lp:~unifield-team/unifield-server/us-966-clean into lp:unifield-server
- us-966-clean
- Merge into trunk
Proposed by
Quentin THEURET @Amaris
Status: | Merged |
---|---|
Merged at revision: | 3778 |
Proposed branch: | lp:~unifield-team/unifield-server/us-966-clean |
Merge into: | lp:unifield-server |
Diff against target: |
1775 lines (+1464/-65) 16 files modified
bin/addons/import_data/import_data.py (+79/-53) bin/addons/msf_homere_interface/hr.py (+15/-0) bin/addons/msf_homere_interface/wizard/hr_expat_import.py (+22/-6) bin/addons/msf_tools/__init__.py (+4/-1) bin/addons/msf_tools/__openerp__.py (+8/-1) bin/addons/msf_tools/automated_import.py (+443/-0) bin/addons/msf_tools/automated_import_data.xml (+102/-0) bin/addons/msf_tools/automated_import_function.py (+107/-0) bin/addons/msf_tools/automated_import_job.py (+366/-0) bin/addons/msf_tools/msf_tools.py (+1/-4) bin/addons/msf_tools/views/automated_import_function_view.xml (+58/-0) bin/addons/msf_tools/views/automated_import_job_view.xml (+120/-0) bin/addons/msf_tools/views/automated_import_view.xml (+77/-0) bin/addons/sync_server/__openerp__.py (+1/-0) bin/addons/sync_server/data/automated_import_sync_groups.xml (+18/-0) bin/osv/orm.py (+43/-0) |
To merge this branch: | bzr merge lp:~unifield-team/unifield-server/us-966-clean |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+295600@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/addons/import_data/import_data.py' |
2 | --- bin/addons/import_data/import_data.py 2016-01-05 14:50:57 +0000 |
3 | +++ bin/addons/import_data/import_data.py 2016-05-26 09:26:36 +0000 |
4 | @@ -190,12 +190,23 @@ |
5 | 'import_mode': lambda *a: 'create', |
6 | } |
7 | |
8 | - def _import(self, dbname, uid, ids, context=None): |
9 | + def _import(self, cr, uid, ids, context=None, use_new_cursor=True, auto_import=False): |
10 | """if context includes 'import_data_field_max_size' dict, |
11 | this dict specifies the max tolerated field length at import |
12 | (key: field name, value: field size) |
13 | """ |
14 | - cr = pooler.get_db(dbname).cursor() |
15 | + dbname = cr.dbname |
16 | + if use_new_cursor: |
17 | + cr = pooler.get_db(cr.dbname).cursor() |
18 | + |
19 | + if context is None: |
20 | + context = {} |
21 | + |
22 | + if isinstance(ids, (int, long)): |
23 | + ids = [ids] |
24 | + |
25 | + processed = [] |
26 | + rejected = [] |
27 | |
28 | obj = self.read(cr, uid, ids[0]) |
29 | import_mode = obj.get('import_mode') |
30 | @@ -282,7 +293,9 @@ |
31 | self._cache[dbname]['product.international.status']['name'].update({iv['name']: iv['id']}) |
32 | |
33 | errorfile = TemporaryFile('w+') |
34 | - writer = csv.writer(errorfile, quotechar='"', delimiter=';') |
35 | + writer = False |
36 | + if not auto_import: |
37 | + writer = csv.writer(errorfile, quotechar='"', delimiter=';') |
38 | |
39 | fields_def = impobj.fields_get(cr, uid, context=context) |
40 | i = 0 |
41 | @@ -342,6 +355,13 @@ |
42 | |
43 | raise osv.except_osv(_('Warning !'), _('%s does not exist')%(value,)) |
44 | |
45 | + def write_error_row(row, index, error=""): |
46 | + if not auto_import and writer: |
47 | + row.append(error) |
48 | + writer.writerow(row) |
49 | + else: |
50 | + rejected.append((index, row, error)) |
51 | + |
52 | i = 1 |
53 | nb_error = 0 |
54 | nb_succes = 0 |
55 | @@ -350,7 +370,8 @@ |
56 | if self.pre_hook.get(impobj._name): |
57 | # for headers mod. |
58 | col_datas = self.pre_hook[impobj._name](impobj, cr, uid, headers, {}, col_datas) |
59 | - writer.writerow(headers) |
60 | + if not auto_import and writer: |
61 | + writer.writerow(headers) |
62 | |
63 | for row in reader: |
64 | newo2m = False |
65 | @@ -394,9 +415,8 @@ |
66 | logging.getLogger('import data').info( |
67 | 'Error %s'% (msg, )) |
68 | cr.rollback() |
69 | - row.append("Line %s, row: %s, %s" % (i, n, |
70 | - msg, )) |
71 | - writer.writerow(row) |
72 | + error = "Line %s, row: %s, %s" % (i, n, msg, ) |
73 | + write_error_row(row, i, error) |
74 | nb_error += 1 |
75 | line_ok = False |
76 | break |
77 | @@ -464,62 +484,64 @@ |
78 | impobj.create(cr, uid, data, context={'from_import_menu': True}) |
79 | nb_succes += 1 |
80 | cr.commit() |
81 | + processed.append((i, row)) |
82 | except osv.except_osv, e: |
83 | logging.getLogger('import data').info('Error %s'%e.value) |
84 | cr.rollback() |
85 | - row.append("Line %s, row: %s, %s"%(i, n, e.value)) |
86 | - writer.writerow(row) |
87 | + error = "Line %s, row: %s, %s"%(i, n, e.value) |
88 | + write_error_row(row, i, error) |
89 | nb_error += 1 |
90 | except Exception, e: |
91 | cr.rollback() |
92 | logging.getLogger('import data').info('Error %s'%e) |
93 | - row.append("Line %s, row: %s, %s"%(i, n, e)) |
94 | - writer.writerow(row) |
95 | + error = "Line %s, row: %s, %s"%(i, n, e) |
96 | + write_error_row(row, i, error) |
97 | nb_error += 1 |
98 | |
99 | if self.post_load_hook.get(impobj._name): |
100 | self.post_load_hook[impobj._name](impobj, cr, uid) |
101 | fileobj.close() |
102 | - import_type = 'Import' |
103 | - if import_mode == 'update': |
104 | - import_type = 'Update' |
105 | - summary = '''Datas Import Summary: |
106 | -Object: %s |
107 | -Records updated: %s |
108 | -Records created: %s |
109 | -'''%(objname, nb_update_success, nb_succes) |
110 | - else: |
111 | - summary = '''Datas Import Summary: |
112 | -Object: %s |
113 | -Records created: %s |
114 | -'''%(objname, nb_succes) |
115 | - |
116 | - if nb_error: |
117 | - summary += '''Records rejected: %s |
118 | - |
119 | -Find in attachment the rejected lines'''%(nb_error) |
120 | - |
121 | - request_obj = self.pool.get('res.request') |
122 | - req_id = request_obj.create(cr, uid, |
123 | - {'name': "%s %s"%(import_type, objname,), |
124 | - 'act_from': uid, |
125 | - 'act_to': uid, |
126 | - 'body': summary, |
127 | - }) |
128 | - if req_id: |
129 | - request_obj.request_send(cr, uid, [req_id]) |
130 | - |
131 | - if nb_error: |
132 | - errorfile.seek(0) |
133 | - attachment = self.pool.get('ir.attachment') |
134 | - attachment.create(cr, uid, { |
135 | - 'name': 'rejected-lines.csv', |
136 | - 'datas_fname': 'rejected-lines.csv', |
137 | - 'description': 'Rejected Lines', |
138 | - 'res_model': 'res.request', |
139 | - 'res_id': req_id, |
140 | - 'datas': base64.encodestring(errorfile.read()), |
141 | - }) |
142 | + if not auto_import: |
143 | + import_type = 'Import' |
144 | + if import_mode == 'update': |
145 | + import_type = 'Update' |
146 | + summary = '''Datas Import Summary: |
147 | + Object: %s |
148 | + Records updated: %s |
149 | + Records created: %s |
150 | + '''%(objname, nb_update_success, nb_succes) |
151 | + else: |
152 | + summary = '''Datas Import Summary: |
153 | + Object: %s |
154 | + Records created: %s |
155 | + '''%(objname, nb_succes) |
156 | + |
157 | + if nb_error: |
158 | + summary += '''Records rejected: %s |
159 | + |
160 | + Find in attachment the rejected lines'''%(nb_error) |
161 | + |
162 | + request_obj = self.pool.get('res.request') |
163 | + req_id = request_obj.create(cr, uid, |
164 | + {'name': "%s %s"%(import_type, objname,), |
165 | + 'act_from': uid, |
166 | + 'act_to': uid, |
167 | + 'body': summary, |
168 | + }) |
169 | + if req_id: |
170 | + request_obj.request_send(cr, uid, [req_id]) |
171 | + |
172 | + if nb_error: |
173 | + errorfile.seek(0) |
174 | + attachment = self.pool.get('ir.attachment') |
175 | + attachment.create(cr, uid, { |
176 | + 'name': 'rejected-lines.csv', |
177 | + 'datas_fname': 'rejected-lines.csv', |
178 | + 'description': 'Rejected Lines', |
179 | + 'res_model': 'res.request', |
180 | + 'res_id': req_id, |
181 | + 'datas': base64.encodestring(errorfile.read()), |
182 | + }) |
183 | |
184 | if impobj == 'product.product': |
185 | # Clear the cache |
186 | @@ -528,10 +550,14 @@ |
187 | |
188 | errorfile.close() |
189 | cr.commit() |
190 | - cr.close(True) |
191 | + if use_new_cursor: |
192 | + cr.close(True) |
193 | + |
194 | + if auto_import: |
195 | + return processed, rejected, headers |
196 | |
197 | def import_csv(self, cr, uid, ids, context=None): |
198 | - thread = threading.Thread(target=self._import, args=(cr.dbname, uid, ids, context)) |
199 | + thread = threading.Thread(target=self._import, args=(cr, uid, ids, context)) |
200 | thread.start() |
201 | return {'type': 'ir.actions.act_window_close'} |
202 | |
203 | |
204 | === modified file 'bin/addons/msf_homere_interface/hr.py' |
205 | --- bin/addons/msf_homere_interface/hr.py 2016-01-04 13:16:43 +0000 |
206 | +++ bin/addons/msf_homere_interface/hr.py 2016-05-26 09:26:36 +0000 |
207 | @@ -326,5 +326,20 @@ |
208 | |
209 | return super(hr_employee, self).name_search(cr, uid, name, args, operator, context, limit) |
210 | |
211 | + def auto_import(self, cr, uid, file_to_import): |
212 | + import base64 |
213 | + import os |
214 | + processed = [] |
215 | + rejected = [] |
216 | + headers = [] |
217 | + |
218 | + import_obj = self.pool.get('hr.expat.employee.import') |
219 | + import_id = import_obj.create(cr, uid, { |
220 | + 'file': base64.encodestring(open(file_to_import, 'r').read()), |
221 | + 'filename': os.path.split(file_to_import)[1], |
222 | + }) |
223 | + processed, rejected, headers = import_obj.button_validate(cr, uid, [import_id], auto_import=True) |
224 | + return processed, rejected, headers |
225 | + |
226 | hr_employee() |
227 | # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
228 | |
229 | === modified file 'bin/addons/msf_homere_interface/wizard/hr_expat_import.py' |
230 | --- bin/addons/msf_homere_interface/wizard/hr_expat_import.py 2015-03-24 08:40:43 +0000 |
231 | +++ bin/addons/msf_homere_interface/wizard/hr_expat_import.py 2016-05-26 09:26:36 +0000 |
232 | @@ -37,7 +37,7 @@ |
233 | 'filename': fields.char(string="Imported filename", size=256), |
234 | } |
235 | |
236 | - def button_validate(self, cr, uid, ids, context=None): |
237 | + def button_validate(self, cr, uid, ids, context=None, auto_import=False): |
238 | """ |
239 | Import XLS file |
240 | """ |
241 | @@ -46,12 +46,22 @@ |
242 | line.cells[cell_index] and line.cells[cell_index].data \ |
243 | or False |
244 | |
245 | + def manage_error(line_index, msg, name='', code='', status=''): |
246 | + if auto_import: |
247 | + rejected_lines.append((line_index, [name, code, status], msg)) |
248 | + else: |
249 | + raise osv.except_osv(_('Error'), _(msg)) |
250 | + |
251 | + processed_lines = [] |
252 | + rejected_lines = [] |
253 | + headers = [_('Name'), _('Code'), _('Status')] |
254 | hr_emp_obj = self.pool.get('hr.employee') |
255 | # Some verifications |
256 | if not context: |
257 | context = {} |
258 | if isinstance(ids, (int, long)): |
259 | ids = [ids] |
260 | + |
261 | for wiz in self.browse(cr, uid, ids): |
262 | # Prepare some values |
263 | created = 0 |
264 | @@ -72,25 +82,28 @@ |
265 | # get cells |
266 | name = get_xml_spreadheet_cell_value(0) |
267 | if not name: |
268 | + manage_error(line_index, 'No name defined') |
269 | continue |
270 | code = get_xml_spreadheet_cell_value(1) |
271 | if not code: |
272 | msg = "At least one employee in the import file does not" \ |
273 | " have an ID number; make sure all employees in the" \ |
274 | " file have an ID number and run the import again." |
275 | - raise osv.except_osv(_('Error'), _(msg)) |
276 | + manage_error(line_index, msg, name) |
277 | active_str = get_xml_spreadheet_cell_value(2) |
278 | if not active_str: |
279 | - msg = "Active column is missing or empty at line %d" |
280 | - raise osv.except_osv(_('Error'), _(msg) % (line_index, )) |
281 | + msg = "Active column is missing or empty at line %d" % line_index |
282 | + manage_error(line_index, msg, name, code) |
283 | active_str = active_str.lower() |
284 | if active_str not in ('active', 'inactive'): |
285 | msg = "Active column invalid value line %d" \ |
286 | - " (should be Active/Inactive)" |
287 | - raise osv.except_osv(_('Error'), _(msg) % (line_index, )) |
288 | + " (should be Active/Inactive)" % line_index |
289 | + manage_error(line_index, msg, name, code, active_str) |
290 | active = active_str == 'active' or False |
291 | |
292 | processed += 1 |
293 | + if auto_import: |
294 | + processed_lines.append((line_index, [name, code, active_str])) |
295 | |
296 | ids = hr_emp_obj.search(cr, uid, |
297 | [('identification_id', '=', code)]) |
298 | @@ -114,6 +127,9 @@ |
299 | |
300 | context.update({'message': ' ', 'from': 'expat_import'}) |
301 | |
302 | + if auto_import: |
303 | + return processed_lines, rejected_lines, headers |
304 | + |
305 | view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'msf_homere_interface', 'payroll_import_confirmation') |
306 | view_id = view_id and view_id[1] or False |
307 | |
308 | |
309 | === modified file 'bin/addons/msf_tools/__init__.py' |
310 | --- bin/addons/msf_tools/__init__.py 2012-04-04 07:07:34 +0000 |
311 | +++ bin/addons/msf_tools/__init__.py 2016-05-26 09:26:36 +0000 |
312 | @@ -15,8 +15,11 @@ |
313 | # GNU Affero General Public License for more details. |
314 | # |
315 | # You should have received a copy of the GNU Affero General Public License |
316 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
317 | +# along with this progrimport automated_import_job_progressam. If not, see <http://www.gnu.org/licenses/>. |
318 | # |
319 | ############################################################################## |
320 | |
321 | import msf_tools |
322 | +import automated_import_function |
323 | +import automated_import |
324 | +import automated_import_job |
325 | |
326 | === modified file 'bin/addons/msf_tools/__openerp__.py' |
327 | --- bin/addons/msf_tools/__openerp__.py 2013-03-18 16:15:46 +0000 |
328 | +++ bin/addons/msf_tools/__openerp__.py 2016-05-26 09:26:36 +0000 |
329 | @@ -24,6 +24,7 @@ |
330 | "version": "1.0", |
331 | "depends": ["base", |
332 | "product", |
333 | + "object_query", |
334 | ], |
335 | "author": "MSF, TeMPO Consulting", |
336 | "website": "", |
337 | @@ -33,7 +34,13 @@ |
338 | """, |
339 | "init_xml": [ |
340 | ], |
341 | - 'update_xml': ['security/ir.model.access.csv',], |
342 | + 'update_xml': [ |
343 | + 'views/automated_import_view.xml', |
344 | + 'views/automated_import_function_view.xml', |
345 | + 'views/automated_import_job_view.xml', |
346 | + 'security/ir.model.access.csv', |
347 | + 'automated_import_data.xml', |
348 | + ], |
349 | 'demo_xml': [ |
350 | ], |
351 | 'test': [# tests should be performed in base classes to avoid cyclic dependencies |
352 | |
353 | === added file 'bin/addons/msf_tools/automated_import.py' |
354 | --- bin/addons/msf_tools/automated_import.py 1970-01-01 00:00:00 +0000 |
355 | +++ bin/addons/msf_tools/automated_import.py 2016-05-26 09:26:36 +0000 |
356 | @@ -0,0 +1,443 @@ |
357 | +# -*- coding: utf-8 -*- |
358 | +############################################################################## |
359 | +# |
360 | +# OpenERP, Open Source Management Solution |
361 | +# Copyright (C) 2016 TeMPO Consulting, MSF |
362 | +# |
363 | +# This program is free software: you can redistribute it and/or modify |
364 | +# it under the terms of the GNU Affero General Public License as |
365 | +# published by the Free Software Foundation, either version 3 of the |
366 | +# License, or (at your option) any later version. |
367 | +# |
368 | +# This program is distributed in the hope that it will be useful, |
369 | +# but WITHOUT ANY WARRANTY; without even the implied warranty of |
370 | +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
371 | +# GNU Affero General Public License for more details. |
372 | +# |
373 | +# You should have received a copy of the GNU Affero General Public License |
374 | +# along with this program. If not, see <http://www.gnu.org/licenses/>. |
375 | +# |
376 | +############################################################################## |
377 | + |
378 | +import os |
379 | +import time |
380 | + |
381 | +from osv import osv |
382 | +from osv import fields |
383 | + |
384 | +from tools.translate import _ |
385 | + |
386 | + |
387 | +class automated_import(osv.osv): |
388 | + _name = 'automated.import' |
389 | + |
390 | + def _check_paths(self, cr, uid, ids, context=None): |
391 | + """ |
392 | + Check if given paths are accessible and make checks that src path is not same path as report or dest path. |
393 | + :param cr: Cursor to the database |
394 | + :param uid: ID of the res.users that calls the method |
395 | + :param ids: List of ID of automated.import on which checks are made |
396 | + :param context: Context of the call |
397 | + :return: Return True or raise an error |
398 | + """ |
399 | + if context is None: |
400 | + context = {} |
401 | + |
402 | + if isinstance(ids, (int, long)): |
403 | + ids = [ids] |
404 | + |
405 | + for imp_brw in self.browse(cr, uid, ids, context=context): |
406 | + for path in [('src_path', 'r'), ('dest_path', 'w'), ('report_path', 'w')]: |
407 | + if imp_brw[path[0]]: |
408 | + self.path_is_accessible(imp_brw[path[0]], path[1]) |
409 | + |
410 | + if imp_brw.src_path: |
411 | + if imp_brw.src_path == imp_brw.dest_path: |
412 | + raise osv.except_osv( |
413 | + _('Error'), |
414 | + _('You cannot have same directory for \'Source Path\' and \'Destination Path\''), |
415 | + ) |
416 | + if imp_brw.src_path == imp_brw.report_path: |
417 | + raise osv.except_osv( |
418 | + _('Error'), |
419 | + _('You cannot have same directory for \'Source Path\' and \'Report Path\''), |
420 | + ) |
421 | + if imp_brw.active and not (imp_brw.src_path and imp_brw.dest_path and imp_brw.report_path): |
422 | + raise osv.except_osv( |
423 | + _('Error'), |
424 | + _('Before activation, the different paths should be set.') |
425 | + ) |
426 | + |
427 | + return True |
428 | + |
429 | + _columns = { |
430 | + 'name': fields.char( |
431 | + size=128, |
432 | + string='Name', |
433 | + required=True, |
434 | + ), |
435 | + 'src_path': fields.char( |
436 | + size=512, |
437 | + string='Source Path', |
438 | + ), |
439 | + 'dest_path': fields.char( |
440 | + size=512, |
441 | + string='Destination Path', |
442 | + ), |
443 | + 'report_path': fields.char( |
444 | + size=512, |
445 | + string='Report Path', |
446 | + ), |
447 | + 'start_time': fields.datetime( |
448 | + string='Date and time of first planned execution', |
449 | + ), |
450 | + 'interval': fields.integer( |
451 | + string='Interval number', |
452 | + ), |
453 | + 'interval_unit': fields.selection( |
454 | + selection=[ |
455 | + ('minutes', 'Minutes'), |
456 | + ('hours', 'Hours'), |
457 | + ('work_days', 'Work Days'), |
458 | + ('days', 'Days'), |
459 | + ('weeks', 'Weeks'), |
460 | + ('months', 'Months'), |
461 | + ], |
462 | + string='Interval Unit', |
463 | + ), |
464 | + 'function_id': fields.many2one( |
465 | + 'automated.import.function', |
466 | + string='Functionality', |
467 | + required=True, |
468 | + ), |
469 | + 'active': fields.boolean( |
470 | + string='Active', |
471 | + readonly=True, |
472 | + ), |
473 | + 'cron_id': fields.many2one( |
474 | + 'ir.cron', |
475 | + string='Associated cron job', |
476 | + readonly=True, |
477 | + ), |
478 | + 'priority': fields.integer( |
479 | + string='Priority', |
480 | + required=True, |
481 | + help="""Defines the priority of the automated import processing because some of them needs other data |
482 | +to import well some data (e.g: Product Categories needs Product nomenclatures).""" |
483 | + ), |
484 | + } |
485 | + |
486 | + _defaults = { |
487 | + 'interval': lambda *a: 1, |
488 | + 'interval_unit': lambda *a: 'hours', |
489 | + 'active': lambda *a: False, |
490 | + 'priority': lambda *a: 10, |
491 | + } |
492 | + |
493 | + _sql_constraints = [ |
494 | + ( |
495 | + 'import_name_uniq', |
496 | + 'unique(name)', |
497 | + _('Another Automated import with same name already exists (maybe inactive). Automated import name must be unique. Please select an other name.'), |
498 | + ), |
499 | + ( |
500 | + 'import_function_id_uniq', |
501 | + 'unique(function_id)', |
502 | + _('Another Automated import with same functionality already exists (maybe inactive). Only one automated import must be created for a '\ |
503 | + 'same functionality. Please select an other functionality.'), |
504 | + ), |
505 | + ( |
506 | + 'import_positive_interval', |
507 | + 'CHECK(interval >= 0)', |
508 | + _('Interval number cannot be negative !'), |
509 | + ), |
510 | + ] |
511 | + |
512 | + _constraints = [ |
513 | + (_check_paths, _('There is a problem with paths'), ['active', 'src_path', 'dest_path', 'report_path']), |
514 | + ] |
515 | + |
516 | + def job_in_progress(self, cr, uid, ids, context=None): |
517 | + """ |
518 | + Check if there is job in progress for this automated import. |
519 | + :param cr: Cursor to the database |
520 | + :param uid: ID of the res.users that calls the method |
521 | + :param ids: List of ID of automated.import on which the test is made |
522 | + :param context: Context of the call |
523 | + :return: Return True if there are jobs in progress |
524 | + """ |
525 | + job_progress_obj = self.pool.get('automated.import.job.progress') |
526 | + |
527 | + if context is None: |
528 | + context = {} |
529 | + |
530 | + if isinstance(ids, (int, long)): |
531 | + ids = [ids] |
532 | + |
533 | + # Use uid=1 to avoid return of only osv.memory that belongs to the current user |
534 | + return job_progress_obj.search(cr, 1, [('import_id', 'in', ids)], limit=1, context=context) |
535 | + |
536 | + def path_is_accessible(self, path, mode='r'): |
537 | + """ |
538 | + Returns if the given path is accessible in the given mode |
539 | + :param path: Local path to test |
540 | + :param mode: Mode to test (can be 'r' for read, 'w' for write) |
541 | + :return: True if the path is accessible or the error if not |
542 | + """ |
543 | + msg = None |
544 | + if not os.access(path, os.F_OK): |
545 | + msg = _('Path \'%s\' doesn\'t exist!') % path |
546 | + elif 'r' in mode and not os.access(path, os.R_OK): |
547 | + msg = _('Read is not allowed on \'%s\'!') % path |
548 | + elif 'w' in mode and not os.access(path, os.W_OK): |
549 | + msg = _('Write is not allowed on \'%s\'!') % path |
550 | + |
551 | + if msg: |
552 | + raise osv.except_osv(_('Error'), msg) |
553 | + |
554 | + return True |
555 | + |
556 | + def run_job_manually(self, cr, uid, ids, context=None, params=None): |
557 | + """ |
558 | + Create a new job with automated import parameters and display a view |
559 | + to add a file to import. Then, run it if user clicks on Run or delete |
560 | + it if user clicks on Cancel |
561 | + :param cr: Cursor to the database |
562 | + :param uid: ID of the res.users that calls this method |
563 | + :param ids: List of ID of automated.import that must be ran |
564 | + :param context: Context of the call |
565 | + :param params: Manual parameters in case of manual customized run |
566 | + :return: An action to go to the view of automated.import.job to add a file to import |
567 | + """ |
568 | + job_obj = self.pool.get('automated.import.job') |
569 | + data_obj = self.pool.get('ir.model.data') |
570 | + |
571 | + if context is None: |
572 | + context = {} |
573 | + |
574 | + if isinstance(ids, (int, long)): |
575 | + ids = [ids] |
576 | + |
577 | + if params is None: |
578 | + params = {} |
579 | + |
580 | + for import_brw in self.browse(cr, uid, ids, context=context): |
581 | + if not import_brw.src_path or not import_brw.dest_path or not import_brw.report_path: |
582 | + raise osv.except_osv( |
583 | + _('Error'), |
584 | + _('You should define all paths before run manually this job !'), |
585 | + ) |
586 | + params = { |
587 | + 'import_id': import_brw.id, |
588 | + 'state': 'draft', |
589 | + } |
590 | + job_id = job_obj.create(cr, uid, params, context=context) |
591 | + |
592 | + return { |
593 | + 'type': 'ir.actions.act_window', |
594 | + 'res_model': job_obj._name, |
595 | + 'res_id': job_id, |
596 | + 'view_type': 'form', |
597 | + 'view_mode': 'form', |
598 | + 'view_id': [data_obj.get_object_reference(cr, uid, 'msf_tools', 'automated_import_job_file_view')[1]], |
599 | + 'target': 'new', |
600 | + 'context': context, |
601 | + } |
602 | + |
603 | + |
604 | + def run_job(self, cr, uid, ids, context=None, params=None): |
605 | + """ |
606 | + Create a new job with automated import parameters and run it |
607 | + :param cr: Cursor to the database |
608 | + :param uid: ID of the res.users that calls this method |
609 | + :param ids: List of ID of automated.import that must be ran |
610 | + :param context: Context of the call |
611 | + :param params: Manual parameters in case of manual customized run |
612 | + :return: An action to go to the view of automated.import.job |
613 | + """ |
614 | + job_obj = self.pool.get('automated.import.job') |
615 | + |
616 | + if context is None: |
617 | + context = {} |
618 | + |
619 | + if isinstance(ids, (int, long)): |
620 | + ids = [ids] |
621 | + |
622 | + if params is None: |
623 | + params = {} |
624 | + |
625 | + for import_id in ids: |
626 | + params = { |
627 | + 'import_id': import_id, |
628 | + 'state': 'in_progress', |
629 | + } |
630 | + job_id = job_obj.create(cr, uid, params, context=context) |
631 | + cr.commit() |
632 | + res = job_obj.process_import(cr, uid, [job_id], context=context) |
633 | + cr.commit() |
634 | + |
635 | + return res |
636 | + |
637 | + def _generate_ir_cron(self, import_brw): |
638 | + """ |
639 | + Returns the values for the ir.cron to create according to automated.import values |
640 | + :param import_brw: automated.import browse_record |
641 | + :return: A dictionary with values for ir.cron |
642 | + """ |
643 | + # If no interval defined, stop the scheduled action |
644 | + numbercall = -1 |
645 | + if not import_brw.interval: |
646 | + numbercall = 0 |
647 | + |
648 | + return { |
649 | + 'name': _('[Automated import] %s') % import_brw.name, |
650 | + 'user_id': 1, |
651 | + 'active': import_brw.active, |
652 | + 'interval_number': import_brw.interval, |
653 | + 'interval_type': import_brw.interval_unit, |
654 | + 'numbercall': numbercall, |
655 | + 'nextcall': import_brw.start_time or time.strftime('%Y-%m-%d %H:%M:%S'), |
656 | + 'model': self._name, |
657 | + 'function': 'run_job', |
658 | + 'args': '(%s,)' % import_brw.id, |
659 | + 'priority': import_brw.priority, |
660 | + } |
661 | + |
662 | + def create(self, cr, uid, vals, context=None): |
663 | + """ |
664 | + Create the automated.import record. |
665 | + Make some checks (uniqueness of name, uniqueness of functionality...) |
666 | + Create an ir_cron record and linked it to the new automated.import |
667 | + :param cr: Cursor to the database |
668 | + :param uid: ID of the res.users that calls the method |
669 | + :param vals: Values for the new automated.import record |
670 | + :param context: Context of the call |
671 | + :return: The ID of the new automated.import created record |
672 | + """ |
673 | + cron_obj = self.pool.get('ir.cron') |
674 | + |
675 | + if context is None: |
676 | + context = {} |
677 | + |
678 | + # Call the super create |
679 | + new_id = super(automated_import, self).create(cr, uid, vals, context=context) |
680 | + |
681 | + # Generate new ir.cron |
682 | + import_brw = self.browse(cr, uid, new_id, context=context) |
683 | + cron_id = cron_obj.create(cr, uid, self._generate_ir_cron(import_brw), context=context) |
684 | + self.write(cr, uid, [new_id], {'cron_id': cron_id}, context=context) |
685 | + |
686 | + return new_id |
687 | + |
688 | + def write(self, cr, uid, ids, vals, context=None): |
689 | + """ |
690 | + Make some checks on new values (uniqueness of name, uniqueness of functionality...) |
691 | + Update the ir_cron |
692 | + Write new values on existing automated.import records |
693 | + :param cr: Cursor to the database |
694 | + :param uid: ID of the res.users that calls the method |
695 | + :param ids: List of ID of automated.import records to write |
696 | + :param vals: Values for the new automated.import record |
697 | + :param context: Context of the call |
698 | + :return: True |
699 | + """ |
700 | + cron_obj = self.pool.get('ir.cron') |
701 | + |
702 | + if context is None: |
703 | + context = {} |
704 | + |
705 | + if isinstance(ids, (int, long)): |
706 | + ids = [ids] |
707 | + |
708 | + res = super(automated_import, self).write(cr, uid, ids, vals, context=context) |
709 | + |
710 | + for import_brw in self.browse(cr, uid, ids, context=context): |
711 | + cron_vals = self._generate_ir_cron(import_brw) |
712 | + if import_brw.cron_id: |
713 | + cron_obj.write(cr, uid, [import_brw.cron_id.id], cron_vals, context=context) |
714 | + elif not vals.get('cron_id', False): |
715 | + cron_id = cron_obj.create(cr, uid, cron_vals, context=context) |
716 | + self.write(cr, uid, [import_brw.id], {'cron_id': cron_id}, context=context) |
717 | + |
718 | + return res |
719 | + |
720 | + def unlink(self, cr, uid, ids, context=None): |
721 | + """ |
722 | + Delete the associated ir_cron |
723 | + :param cr: Cursor to the database |
724 | + :param uid: ID of the res.users that calls this method |
725 | + :param ids: List of automated.import ID to remove |
726 | + :param context: Context of the call |
727 | + :return: True |
728 | + """ |
729 | + cron_obj = self.pool.get('ir.cron') |
730 | + job_obj = self.pool.get('automated.import.job') |
731 | + |
732 | + if context is None: |
733 | + context = {} |
734 | + |
735 | + if isinstance(ids, (int, long)): |
736 | + ids = [ids] |
737 | + |
738 | + if job_obj.search(cr, uid, [('import_id', 'in', ids)], limit=1, order='NO_ORDER', context=context): |
739 | + raise osv.except_osv( |
740 | + _('Error'), |
741 | + _('Please delete the automated import jobs that are linked to the Automatic import you try to delete!'), |
742 | + ) |
743 | + |
744 | + for import_brw in self.browse(cr, uid, ids, context=context): |
745 | + if import_brw.cron_id: |
746 | + cron_obj.unlink(cr, uid, [import_brw.cron_id.id], context=context) |
747 | + |
748 | + return super(automated_import, self).unlink(cr, uid, ids, context=context) |
749 | + |
750 | + def copy(self, cr, uid, import_id, new_vals=None, context=None): |
751 | + """ |
752 | + Display an error on copy as copy is not allowed on automated.import |
753 | + :param cr: Cursor to the database |
754 | + :param uid: ID of the res.users that calls this method |
755 | + :param import_id: ID of the automated.import to copy |
756 | + :param new_vals: Default values for the new automated.import record |
757 | + :param context: Context of the call |
758 | + :return: The ID of the new automated.import record |
759 | + """ |
760 | + raise osv.except_osv( |
761 | + _('Error'), |
762 | + _('Copy is not allowed for Automated imports!'), |
763 | + ) |
764 | + |
765 | + def active_import(self, cr, uid, ids, context=None): |
766 | + """ |
767 | + Make the automated.import as active |
768 | + :param cr: Cursor to the database |
769 | + :param uid: ID of the res.users that calls this method |
770 | + :param ids: List of ID of automated.import to activate |
771 | + :param context: Context of the call |
772 | + :return: True |
773 | + """ |
774 | + if context is None: |
775 | + context = {} |
776 | + |
777 | + if isinstance(ids, (int, long)): |
778 | + ids = [ids] |
779 | + |
780 | + return self.write(cr, uid, ids, {'active': True}, context=context) |
781 | + |
782 | + def deactive_import(self, cr, uid, ids, context=None): |
783 | + """ |
784 | + Make the automated.import as inactive |
785 | + :param cr: Cursor to the database |
786 | + :param uid: ID of the res.users that calls this method |
787 | + :param ids: List of ID of automated.import to activate |
788 | + :param context: Context of the call |
789 | + :return: True |
790 | + """ |
791 | + if context is None: |
792 | + context = {} |
793 | + |
794 | + if isinstance(ids, (int, long)): |
795 | + ids = [ids] |
796 | + |
797 | + return self.write(cr, uid, ids, {'active': False}, context=context) |
798 | + |
799 | +automated_import() |
800 | |
801 | === added file 'bin/addons/msf_tools/automated_import_data.xml' |
802 | --- bin/addons/msf_tools/automated_import_data.xml 1970-01-01 00:00:00 +0000 |
803 | +++ bin/addons/msf_tools/automated_import_data.xml 2016-05-26 09:26:36 +0000 |
804 | @@ -0,0 +1,102 @@ |
805 | +<?xml version="1.0" encoding="utf-8" ?> |
806 | +<openerp> |
807 | + <data> |
808 | + |
809 | + <record id="auto_import_fnct_res_partner" model="automated.import.function"> |
810 | + <field name="name">Import Suppliers</field> |
811 | + <field name="model_id" model="ir.model" search="[('model', '=', 'res.partner')]" /> |
812 | + <field name="method_to_call">import_data_from_csv</field> |
813 | + </record> |
814 | + |
815 | + <record id="auto_import_fnct_account_account" model="automated.import.function"> |
816 | + <field name="name">Import G/L Accounts</field> |
817 | + <field name="model_id" model="ir.model" search="[('model', '=', 'account.account')]" /> |
818 | + <field name="method_to_call">import_data_from_csv</field> |
819 | + </record> |
820 | + |
821 | + <record id="auto_import_fnct_account_journal" model="automated.import.function"> |
822 | + <field name="name">Import G/L Journals</field> |
823 | + <field name="model_id" model="ir.model" search="[('model', '=', 'account.journal')]" /> |
824 | + <field name="method_to_call">import_data_from_csv</field> |
825 | + </record> |
826 | + |
827 | + <record id="auto_import_fnct_analytic_account" model="automated.import.function"> |
828 | + <field name="name">Import Analytic Accounts</field> |
829 | + <field name="model_id" model="ir.model" search="[('model', '=', 'account.analytic.account')]" /> |
830 | + <field name="method_to_call">import_data_from_csv</field> |
831 | + </record> |
832 | + |
833 | + <record id="auto_import_fnct_analytic_journal" model="automated.import.function"> |
834 | + <field name="name">Import Analytic Journals</field> |
835 | + <field name="model_id" model="ir.model" search="[('model', '=', 'account.analytic.journal')]" /> |
836 | + <field name="method_to_call">import_data_from_csv</field> |
837 | + </record> |
838 | + |
839 | + <record id="auto_import_fnct_destination_account" model="automated.import.function"> |
840 | + <field name="name">Import Links between destinations and GL accounts</field> |
841 | + <field name="model_id" model="ir.model" search="[('model', '=', 'account.analytic.account')]" /> |
842 | + <field name="method_to_call">import_data_from_csv</field> |
843 | + </record> |
844 | + |
845 | + <record id="auto_import_fnct_sync_product_nomenclature" model="automated.import.function"> |
846 | + <field name="name">Import Product Nomenclatures</field> |
847 | + <field name="model_id" model="ir.model" search="[('model', '=', 'product.nomenclature')]" /> |
848 | + <field name="method_to_call">import_data_with_wizard</field> |
849 | + </record> |
850 | + |
851 | + <record id="auto_import_fnct_product_categories" model="automated.import.function"> |
852 | + <field name="name">Import Product Categories</field> |
853 | + <field name="model_id" model="ir.model" search="[('model', '=', 'product.category')]" /> |
854 | + <field name="method_to_call">import_data_with_wizard</field> |
855 | + </record> |
856 | + |
857 | + <record id="auto_import_fnct_employee" model="automated.import.function"> |
858 | + <field name="name">Import Employees</field> |
859 | + <field name="model_id" model="ir.model" search="[('model', '=', 'hr.employee')]" /> |
860 | + <field name="method_to_call">auto_import</field> |
861 | + </record> |
862 | + |
863 | + <record id="auto_import_fnct_product" model="automated.import.function"> |
864 | + <field name="name">Import Products</field> |
865 | + <field name="model_id" model="ir.model" search="[('model', '=', 'product.product')]" /> |
866 | + <field name="method_to_call">import_data_with_wizard</field> |
867 | + </record> |
868 | + |
869 | + <record id="auto_import_fnct_access_control_list" model="automated.import.function"> |
870 | + <field name="name">Import Access Control Lists</field> |
871 | + <field name="model_id" model="ir.model" search="[('model', '=', 'ir.model.access')]" /> |
872 | + <field name="method_to_call">import_data_from_csv</field> |
873 | + </record> |
874 | + |
875 | + <record id="auto_import_fnct_record_rules" model="automated.import.function"> |
876 | + <field name="name">Import Record Rules</field> |
877 | + <field name="model_id" model="ir.model" search="[('model', '=', 'ir.rule')]" /> |
878 | + <field name="method_to_call">import_data_from_csv</field> |
879 | + </record> |
880 | + |
881 | + <record id="auto_import_fnct_window_actions" model="automated.import.function"> |
882 | + <field name="name">Import Window Actions</field> |
883 | + <field name="model_id" model="ir.model" search="[('model', '=', 'ir.actions.act_window')]" /> |
884 | + <field name="method_to_call">import_data_from_csv</field> |
885 | + </record> |
886 | + |
887 | + <record id="auto_import_fnct_field_access_rule" model="automated.import.function"> |
888 | + <field name="name">Import Field Access Rules</field> |
889 | + <field name="model_id" model="ir.model" search="[('model', '=', 'msf_field_access_rights.field_access_rule')]" /> |
890 | + <field name="method_to_call">import_data_from_csv</field> |
891 | + </record> |
892 | + |
893 | + <record id="auto_import_fnct_field_access_rule_lines" model="automated.import.function"> |
894 | + <field name="name">Import Field Access Rule Lines</field> |
895 | + <field name="model_id" model="ir.model" search="[('model', '=', 'msf_field_access_rights.field_access_rule_line')]" /> |
896 | + <field name="method_to_call">import_data_from_csv</field> |
897 | + </record> |
898 | + |
899 | + <record id="auto_import_fnct_button_access_rules" model="automated.import.function"> |
900 | + <field name="name">Import Button Access Rules</field> |
901 | + <field name="model_id" model="ir.model" search="[('model', '=', 'msf_button_access_rights.button_access_rule')]" /> |
902 | + <field name="method_to_call">import_data_from_csv</field> |
903 | + </record> |
904 | + |
905 | + </data> |
906 | +</openerp> |
907 | |
908 | === added file 'bin/addons/msf_tools/automated_import_function.py' |
909 | --- bin/addons/msf_tools/automated_import_function.py 1970-01-01 00:00:00 +0000 |
910 | +++ bin/addons/msf_tools/automated_import_function.py 2016-05-26 09:26:36 +0000 |
911 | @@ -0,0 +1,107 @@ |
912 | +# -*- coding: utf-8 -*- |
913 | +############################################################################## |
914 | +# |
915 | +# OpenERP, Open Source Management Solution |
916 | +# Copyright (C) 2016 TeMPO Consulting, MSF |
917 | +# |
918 | +# This program is free software: you can redistribute it and/or modify |
919 | +# it under the terms of the GNU Affero General Public License as |
920 | +# published by the Free Software Foundation, either version 3 of the |
921 | +# License, or (at your option) any later version. |
922 | +# |
923 | +# This program is distributed in the hope that it will be useful, |
924 | +# but WITHOUT ANY WARRANTY; without even the implied warranty of |
925 | +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
926 | +# GNU Affero General Public License for more details. |
927 | +# |
928 | +# You should have received a copy of the GNU Affero General Public License |
929 | +# along with this program. If not, see <http://www.gnu.org/licenses/>. |
930 | +# |
931 | +############################################################################## |
932 | + |
933 | +from osv import osv |
934 | +from osv import fields |
935 | + |
936 | +from tools.translate import _ |
937 | + |
938 | + |
939 | +class automated_import_function(osv.osv): |
940 | + _name = 'automated.import.function' |
941 | + |
942 | + _columns = { |
943 | + 'name': fields.char( |
944 | + size=64, |
945 | + string='Name', |
946 | + required=True, |
947 | + ), |
948 | + 'model_id': fields.many2one( |
949 | + 'ir.model', |
950 | + string='Model', |
951 | + required=True, |
952 | + ), |
953 | + 'method_to_call': fields.char( |
954 | + size=128, |
955 | + string='Method to call', |
956 | + required=True, |
957 | + ), |
958 | + } |
959 | + |
960 | + _defaults = { |
961 | + 'method_to_call': lambda *a: 'import_data_from_csv', |
962 | + } |
963 | + |
964 | + def check_method_to_call(self, cr, uid, model_id, method): |
965 | + """ |
966 | + Check if the model implements the method |
967 | + :param cr: Cursor to the database |
968 | + :param uid: ID of the res.users that calls the method |
969 | + :param model: ID of ir.model |
970 | + :param method: method name |
971 | + :return: Return True or False |
972 | + """ |
973 | + model = self.pool.get('ir.model').browse(cr, uid, model_id) |
974 | + if not hasattr(self.pool.get(model.model), method): |
975 | + raise osv.except_osv( |
976 | + _('Error'), |
977 | + _('The method \'%s\' of the model \'%s\' is not callable') % (model.model, method), |
978 | + ) |
979 | + return True |
980 | + |
981 | + def create(self, cr, uid, vals, context=None): |
982 | + """ |
983 | + Run the check on method to call before create the new record |
984 | + :param cr: Cursor to the database |
985 | + :param uid: ID of the res.users that calls this method |
986 | + :param vals: Values to put on the new record |
987 | + :param context: Context of the call |
988 | + :return: ID of the new automated.import.function |
989 | + """ |
990 | + if vals.get('model_id') and vals.get('method_to_call'): |
991 | + self.check_method_to_call(cr, uid, vals.get('model_id'), vals.get('method_to_call')) |
992 | + |
993 | + return super(automated_import_function, self).create(cr, uid, vals, context=context) |
994 | + |
995 | + def write(self, cr, uid, ids, vals, context=None): |
996 | + """ |
997 | + Run the check on method to call before update the record(s) |
998 | + :param cr: Cursor to the database |
999 | + :param uid: ID of the res.users that calls this method |
1000 | + :param ids: List of ID of automated.import.function record to update |
1001 | + :param vals: Values to put on the new record |
1002 | + :param context: Context of the call |
1003 | + :return: True |
1004 | + """ |
1005 | + if isinstance(ids, (int, long)): |
1006 | + ids = [ids] |
1007 | + |
1008 | + if vals.get('model_id') and vals.get('method_to_call'): |
1009 | + self.check_method_to_call(cr, uid, vals.get('model_id'), vals.get('method_to_call')) |
1010 | + elif vals.get('model_id') or vals.get('method_to_call'): |
1011 | + for rec in self.browse(cr, uid, ids, context=context): |
1012 | + model = vals.get('model_id', rec.model_id) |
1013 | + func = vals.get('method_to_call', rec.method_to_call) |
1014 | + self.check_method_to_call(cr, uid, model, func) |
1015 | + |
1016 | + return super(automated_import_function, self).write(cr, uid, ids, vals, context=context) |
1017 | + |
1018 | +automated_import_function() |
1019 | \ No newline at end of file |
1020 | |
1021 | === added file 'bin/addons/msf_tools/automated_import_job.py' |
1022 | --- bin/addons/msf_tools/automated_import_job.py 1970-01-01 00:00:00 +0000 |
1023 | +++ bin/addons/msf_tools/automated_import_job.py 2016-05-26 09:26:36 +0000 |
1024 | @@ -0,0 +1,366 @@ |
1025 | +# -*- coding: utf-8 -*- |
1026 | +############################################################################## |
1027 | +# |
1028 | +# OpenERP, Open Source Management Solution |
1029 | +# Copyright (C) 2016 TeMPO Consulting, MSF |
1030 | +# |
1031 | +# This program is free software: you can redistribute it and/or modify |
1032 | +# it under the terms of the GNU Affero General Public License as |
1033 | +# published by the Free Software Foundation, either version 3 of the |
1034 | +# License, or (at your option) any later version. |
1035 | +# |
1036 | +# This program is distributed in the hope that it will be useful, |
1037 | +# but WITHOUT ANY WARRANTY; without even the implied warranty of |
1038 | +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
1039 | +# GNU Affero General Public License for more details. |
1040 | +# |
1041 | +# You should have received a copy of the GNU Affero General Public License |
1042 | +# along with this program. If not, see <http://www.gnu.org/licenses/>. |
1043 | +# |
1044 | +############################################################################## |
1045 | + |
1046 | +import os |
1047 | +import csv |
1048 | +import time |
1049 | +import shutil |
1050 | +import base64 |
1051 | +import hashlib |
1052 | + |
1053 | +from osv import osv |
1054 | +from osv import fields |
1055 | + |
1056 | +from tools.translate import _ |
1057 | + |
1058 | + |
1059 | +def all_files_under(path): |
1060 | + """ |
1061 | + Iterates through all files that are under the given path. |
1062 | + :param path: Path on which we want to iterate |
1063 | + """ |
1064 | + for cur_path, dirnames, filenames in os.walk(path): |
1065 | + for filename in filenames: |
1066 | + yield os.path.join(cur_path, filename) |
1067 | + |
1068 | + |
1069 | +def move_to_process_path(file, src_path, dest_path): |
1070 | + """ |
1071 | + Move the file `file` from `src_path` to `dest_path` |
1072 | + :param file: Name of the file to move |
1073 | + :param src_path: Source folder |
1074 | + :param dest_path: Destination folder |
1075 | + :return: return True |
1076 | + """ |
1077 | + srcname = os.path.join(src_path, file) |
1078 | + renamed = os.path.join(dest_path, '%s_%s' % (time.strftime('%Y%m%d_%H%M%S'), file)) |
1079 | + shutil.move(srcname, renamed) |
1080 | + return True |
1081 | + |
1082 | + |
1083 | +class automated_import_job(osv.osv): |
1084 | + _name = 'automated.import.job' |
1085 | + |
1086 | + def _get_name(self, cr, uid, ids, field_name, args, context=None): |
1087 | + """ |
1088 | + Build the name of the job by using the function_id and the date and time |
1089 | + :param cr: Cursor to the database |
1090 | + :param uid: ID of the res.users that calls this issue |
1091 | + :param ids: List of ID of automated.import.job to compute name |
1092 | + :param field_name: The name of the field to compute (here: name) |
1093 | + :param args: Additional parameters |
1094 | + :param context: Context of the call |
1095 | + :return: A dictionnary with automated.import.job ID as key and computed name as value |
1096 | + """ |
1097 | + if context is None: |
1098 | + context = {} |
1099 | + |
1100 | + if isinstance(ids, (int, long)): |
1101 | + ids = [ids] |
1102 | + |
1103 | + res = {} |
1104 | + for job in self.browse(cr, uid, ids, context=context): |
1105 | + res[job.id] = '%s - %s' % (job.import_id.function_id.name, job.start_time or _('Not started')) |
1106 | + |
1107 | + return res |
1108 | + |
1109 | + _columns = { |
1110 | + 'name': fields.function( |
1111 | + _get_name, |
1112 | + method=True, |
1113 | + type='char', |
1114 | + size=128, |
1115 | + string='Name', |
1116 | + store=True, |
1117 | + ), |
1118 | + 'import_id': fields.many2one( |
1119 | + 'automated.import', |
1120 | + string='Automated import', |
1121 | + required=True, |
1122 | + readonly=True, |
1123 | + ), |
1124 | + 'file_to_import': fields.binary( |
1125 | + string='File to import', |
1126 | + ), |
1127 | + 'filename': fields.char( |
1128 | + size=128, |
1129 | + string='Name of the file to import', |
1130 | + ), |
1131 | + 'file_sum': fields.char( |
1132 | + string='Check sum', |
1133 | + size=256, |
1134 | + readonly=True, |
1135 | + ), |
1136 | + 'start_time': fields.datetime( |
1137 | + string='Start time', |
1138 | + readonly=True, |
1139 | + ), |
1140 | + 'end_time': fields.datetime( |
1141 | + string='End time', |
1142 | + readonly=True, |
1143 | + ), |
1144 | + 'nb_processed_records': fields.integer( |
1145 | + string='# of processed records', |
1146 | + readonly=True, |
1147 | + ), |
1148 | + 'nb_rejected_records': fields.integer( |
1149 | + string='# of rejected records', |
1150 | + readonly=True, |
1151 | + ), |
1152 | + 'comment': fields.text( |
1153 | + string='Comment', |
1154 | + readonly=True, |
1155 | + ), |
1156 | + 'state': fields.selection( |
1157 | + selection=[ |
1158 | + ('draft', 'Draft'), |
1159 | + ('in_progress', 'In progress'), |
1160 | + ('done', 'Done'), |
1161 | + ('error', 'Exception'), |
1162 | + ], |
1163 | + string='State', |
1164 | + readonly=True, |
1165 | + required=True, |
1166 | + ), |
1167 | + } |
1168 | + |
1169 | + _defaults = { |
1170 | + 'state': lambda *a: 'draft', |
1171 | + } |
1172 | + |
1173 | + def process_import(self, cr, uid, ids, context=None): |
1174 | + """ |
1175 | + First, browse the source path, then select the oldest file and run import on this file. |
1176 | + After the processing of import, generate a report and move the processed file to the |
1177 | + processed folder. |
1178 | + :param cr: Cursor to the database |
1179 | + :param uid: ID of the res.users that calls this method |
1180 | + :param ids: List of ID of automated.import.job to process |
1181 | + :param context: Context of the call |
1182 | + :return: True |
1183 | + """ |
1184 | + import_obj = self.pool.get('automated.import') |
1185 | + data_obj = self.pool.get('ir.model.data') |
1186 | + |
1187 | + if context is None: |
1188 | + context = [] |
1189 | + |
1190 | + if isinstance(ids, (int, long)): |
1191 | + ids = [ids] |
1192 | + |
1193 | + for job in self.browse(cr, uid, ids, context=context): |
1194 | + nb_rejected = 0 |
1195 | + nb_processed = 0 |
1196 | + start_time = time.strftime('%Y-%m-%d %H:%M:%S') |
1197 | + no_file = False |
1198 | + md5 = False |
1199 | + error = None |
1200 | + data64 = None |
1201 | + filename = False |
1202 | + |
1203 | + try: |
1204 | + for path in [('src_path', 'r'), ('dest_path', 'w'), ('report_path', 'w')]: |
1205 | + import_obj.path_is_accessible(job.import_id[path[0]], path[1]) |
1206 | + except osv.except_osv as e: |
1207 | + error = str(e) |
1208 | + # In case of manual processing, raise the error |
1209 | + if job.file_to_import: |
1210 | + raise e |
1211 | + |
1212 | + if not job.file_to_import: |
1213 | + try: |
1214 | + oldest_file = min(all_files_under(job.import_id.src_path), key=os.path.getmtime) |
1215 | + filename = os.path.split(oldest_file)[1] |
1216 | + md5 = hashlib.md5(open(oldest_file).read()).hexdigest() |
1217 | + data64 = base64.encodestring(open(oldest_file).read()) |
1218 | + except ValueError: |
1219 | + no_file = True |
1220 | + |
1221 | + if not error: |
1222 | + if no_file: |
1223 | + error = _('No file to import in %s !') % job.import_id.src_path |
1224 | + elif md5 and self.search(cr, uid, [('import_id', '=', job.import_id.id), ('file_sum', '=', md5)], limit=1, order='NO_ORDER', context=context): |
1225 | + error = _('A file with same checksum has been already imported !') |
1226 | + move_to_process_path(filename, job.import_id.src_path, job.import_id.dest_path) |
1227 | + |
1228 | + if error: |
1229 | + self.write(cr, uid, [job.id], { |
1230 | + 'filename': filename, |
1231 | + 'file_to_import': data64, |
1232 | + 'start_time': start_time, |
1233 | + 'end_time': time.strftime('%Y-%m-%d'), |
1234 | + 'nb_processed_records': 0, |
1235 | + 'nb_rejected_records': 0, |
1236 | + 'comment': error, |
1237 | + 'file_sum': md5, |
1238 | + 'state': 'error', |
1239 | + }, context=context) |
1240 | + continue |
1241 | + else: |
1242 | + oldest_file = open(os.path.join(job.import_id.src_path, job.filename), 'wb+') |
1243 | + oldest_file.write(base64.decodestring(job.file_to_import)) |
1244 | + oldest_file.close() |
1245 | + md5 = hashlib.md5(job.file_to_import).hexdigest() |
1246 | + |
1247 | + if job.file_sum != md5: |
1248 | + if self.search(cr, uid, [('file_sum', '=', md5), ('id', '!=', job.id)], limit=1, order='NO_ORDER', context=context): |
1249 | + self.write(cr, uid, [job.id], {'file_sum': md5}, context=context) |
1250 | + return { |
1251 | + 'type': 'ir.actions.act_window', |
1252 | + 'res_model': self._name, |
1253 | + 'res_id': ids[0], |
1254 | + 'view_type': 'form', |
1255 | + 'view_mode': 'form,tree', |
1256 | + 'target': 'new', |
1257 | + 'view_id': [data_obj.get_object_reference(cr, uid, 'msf_tools', 'automated_import_job_file_view')[1]], |
1258 | + 'context': context, |
1259 | + } |
1260 | + |
1261 | + oldest_file = os.path.join(job.import_id.src_path, job.filename) |
1262 | + filename = job.filename |
1263 | + data64 = base64.encodestring(job.file_to_import) |
1264 | + |
1265 | + # Process import |
1266 | + try: |
1267 | + processed, rejected, headers = getattr( |
1268 | + self.pool.get(job.import_id.function_id.model_id.model), |
1269 | + job.import_id.function_id.method_to_call |
1270 | + )(cr, uid, oldest_file) |
1271 | + if processed: |
1272 | + nb_processed = self.generate_file_report(cr, uid, job, processed, headers) |
1273 | + |
1274 | + if rejected: |
1275 | + nb_rejected = self.generate_file_report(cr, uid, job, rejected, headers, rejected=True) |
1276 | + |
1277 | + self.write(cr, uid, [job.id], { |
1278 | + 'filename': filename, |
1279 | + 'start_time': start_time, |
1280 | + 'end_time': time.strftime('%Y-%m-%d %H:%M:%S'), |
1281 | + 'nb_processed_records': nb_processed, |
1282 | + 'nb_rejected_records': nb_rejected, |
1283 | + 'file_sum': md5, |
1284 | + 'file_to_import': data64, |
1285 | + 'state': 'done', |
1286 | + }, context=context) |
1287 | + except Exception as e: |
1288 | + self.write(cr, uid, [job.id], { |
1289 | + 'filename': False, |
1290 | + 'start_time': start_time, |
1291 | + 'end_time': time.strftime('%Y-%m-%d'), |
1292 | + 'nb_processed_records': 0, |
1293 | + 'nb_rejected_records': 0, |
1294 | + 'comment': str(e), |
1295 | + 'file_sum': md5, |
1296 | + 'file_to_import': data64, |
1297 | + 'state': 'error', |
1298 | + }, context=context) |
1299 | + finally: |
1300 | + move_to_process_path(filename, job.import_id.src_path, job.import_id.dest_path) |
1301 | + |
1302 | + return { |
1303 | + 'type': 'ir.actions.act_window', |
1304 | + 'res_model': self._name, |
1305 | + 'res_id': ids[0], |
1306 | + 'view_type': 'form', |
1307 | + 'view_mode': 'form,tree', |
1308 | + 'target': 'current', |
1309 | + 'context': context, |
1310 | + } |
1311 | + |
1312 | + |
1313 | + def generate_file_report(self, cr, uid, job_brw, data_lines, headers, rejected=False): |
1314 | + """ |
1315 | + Create a csv file that contains the processed lines and put this csv file |
1316 | + on the report_path directory and attach it to the automated.import.job. |
1317 | + :param cr: Cursor to the database |
1318 | + :param uid: ID of the res.users that calls this method |
1319 | + :param job_brw:Â browse_record of the automated.import.job that need a report |
1320 | + :param data_lines: List of tuple containing the line index and the line data |
1321 | + :param headers: List of field names in the file |
1322 | + :param rejected: If true, the data_lines tuple is composed of 3 members, else, composed of 2 members |
1323 | + :return: # of lines in file |
1324 | + """ |
1325 | + att_obj = self.pool.get('ir.attachment') |
1326 | + |
1327 | + filename = '%s_%s_%s.csv' % ( |
1328 | + time.strftime('%Y%m%d_%H%M%S'), |
1329 | + job_brw.import_id.function_id.model_id.model, |
1330 | + rejected and 'rejected' or 'processed' |
1331 | + ) |
1332 | + pth_filename = os.path.join(job_brw.import_id.report_path, filename) |
1333 | + delimiter = ',' |
1334 | + quotechar = '"' |
1335 | + |
1336 | + with open(pth_filename, 'wb') as csvfile: |
1337 | + spamwriter = csv.writer(csvfile, delimiter=delimiter, quotechar=quotechar, quoting=csv.QUOTE_MINIMAL) |
1338 | + headers_row = [_('Line number')] + headers |
1339 | + if rejected: |
1340 | + headers_row += [_('Error')] |
1341 | + spamwriter.writerow(headers_row) |
1342 | + for pl in data_lines: |
1343 | + pl_row = [pl[0]] + pl[1] |
1344 | + if rejected: |
1345 | + pl_row += [pl[2]] |
1346 | + spamwriter.writerow(pl_row) |
1347 | + |
1348 | + csvfile = open(pth_filename, 'r') |
1349 | + att_obj.create(cr, uid, { |
1350 | + 'name': filename, |
1351 | + 'datas_fname': filename, |
1352 | + 'description': '%s Lines' % (rejected and _('Rejected') or _('Processed')), |
1353 | + 'res_model': 'automated.import.job', |
1354 | + 'res_id': job_brw.id, |
1355 | + 'datas': base64.encodestring(csvfile.read()) |
1356 | + }) |
1357 | + |
1358 | + return len(data_lines) |
1359 | + |
1360 | + def cancel_file_import(self, cr, uid, ids, context=None): |
1361 | + """ |
1362 | + Delete the automated.import.job and close the wizard. |
1363 | + :param cr: Cursor to the database |
1364 | + :param uid: ID of the res.users that calls this method |
1365 | + :param ids: List of automated.import.job to delete |
1366 | + :param context: Context of the call |
1367 | + :return: The action to close the wizard |
1368 | + """ |
1369 | + self.unlink(cr, uid, ids, context=context) |
1370 | + return {'type': 'ir.actions.act_window_close'} |
1371 | + |
1372 | +automated_import_job() |
1373 | + |
1374 | + |
1375 | +class automated_import_job_progress(osv.osv_memory): |
1376 | + _name = 'automated.import.job.progress' |
1377 | + |
1378 | + _columns = { |
1379 | + 'job_id': fields.many2one( |
1380 | + 'automated.import.job', |
1381 | + string='Import job', |
1382 | + required=True, |
1383 | + ), |
1384 | + 'import_id': fields.related( |
1385 | + 'automated.import', |
1386 | + string='Import', |
1387 | + ), |
1388 | + } |
1389 | + |
1390 | +automated_import_job_progress() |
1391 | |
1392 | === modified file 'bin/addons/msf_tools/msf_tools.py' |
1393 | --- bin/addons/msf_tools/msf_tools.py 2016-04-11 08:10:35 +0000 |
1394 | +++ bin/addons/msf_tools/msf_tools.py 2016-05-26 09:26:36 +0000 |
1395 | @@ -624,10 +624,7 @@ |
1396 | tr_split = name.split(',') |
1397 | res_id = self.pool.get('ir.model.data').find_sd_ref(cr, 1, sdref, field='res_id', context=context) |
1398 | if res_id and tr_split[0] == 'product.template': |
1399 | - prod = self.pool.get('product.product').read(cr, 1, [res_id], ['product_tmpl_id'], context=context) |
1400 | - if not prod: |
1401 | - return False |
1402 | - prod = prod[0] |
1403 | + prod = self.pool.get('product.product').read(cr, 1, [res_id], ['product_tmpl_id'], context=context)[0] |
1404 | if prod['product_tmpl_id']: |
1405 | return prod['product_tmpl_id'][0] |
1406 | return res_id |
1407 | |
1408 | === added directory 'bin/addons/msf_tools/views' |
1409 | === added file 'bin/addons/msf_tools/views/automated_import_function_view.xml' |
1410 | --- bin/addons/msf_tools/views/automated_import_function_view.xml 1970-01-01 00:00:00 +0000 |
1411 | +++ bin/addons/msf_tools/views/automated_import_function_view.xml 2016-05-26 09:26:36 +0000 |
1412 | @@ -0,0 +1,58 @@ |
1413 | +<?xml version="1.0" encoding="utf-8" ?> |
1414 | +<openerp> |
1415 | + <data> |
1416 | + |
1417 | + <record id="automated_import_function_search_view" model="ir.ui.view"> |
1418 | + <field name="name">automated.import.function.search.view</field> |
1419 | + <field name="model">automated.import.function</field> |
1420 | + <field name="type">search</field> |
1421 | + <field name="arch" type="xml"> |
1422 | + <search string="Automated import functions"> |
1423 | + <field name="name" /> |
1424 | + <field name="model_id" /> |
1425 | + <field name="method_to_call" /> |
1426 | + </search> |
1427 | + </field> |
1428 | + </record> |
1429 | + |
1430 | + <record id="automated_import_function_tree_view" model="ir.ui.view"> |
1431 | + <field name="name">automated.import.function.tree.view</field> |
1432 | + <field name="model">automated.import.function</field> |
1433 | + <field name="type">tree</field> |
1434 | + <field name="arch" type="xml"> |
1435 | + <tree string="Automated import functions"> |
1436 | + <field name="name" /> |
1437 | + <field name="model_id" /> |
1438 | + <field name="method_to_call" /> |
1439 | + </tree> |
1440 | + </field> |
1441 | + </record> |
1442 | + |
1443 | + <record id="automated_import_function_form_view" model="ir.ui.view"> |
1444 | + <field name="name">automated.import.function.form.view</field> |
1445 | + <field name="model">automated.import.function</field> |
1446 | + <field name="type">form</field> |
1447 | + <field name="arch" type="xml"> |
1448 | + <form string="Automated import function"> |
1449 | + <field name="name" /> |
1450 | + <newline /> |
1451 | + <field name="model_id" /> |
1452 | + <field name="method_to_call" /> |
1453 | + </form> |
1454 | + </field> |
1455 | + </record> |
1456 | + |
1457 | + <record id="automated_import_function_action" model="ir.actions.act_window"> |
1458 | + <field name="name">Automated import functions</field> |
1459 | + <field name="res_model">automated.import.function</field> |
1460 | + <field name="view_type">form</field> |
1461 | + <field name="view_mode">tree,form</field> |
1462 | + </record> |
1463 | + |
1464 | + <menuitem |
1465 | + id="automated_import_function_menu" |
1466 | + action="automated_import_function_action" |
1467 | + parent="automated_import_menu" /> |
1468 | + |
1469 | + </data> |
1470 | +</openerp> |
1471 | \ No newline at end of file |
1472 | |
1473 | === added file 'bin/addons/msf_tools/views/automated_import_job_view.xml' |
1474 | --- bin/addons/msf_tools/views/automated_import_job_view.xml 1970-01-01 00:00:00 +0000 |
1475 | +++ bin/addons/msf_tools/views/automated_import_job_view.xml 2016-05-26 09:26:36 +0000 |
1476 | @@ -0,0 +1,120 @@ |
1477 | +<?xml version="1.0" encoding="utf-8" ?> |
1478 | +<openerp> |
1479 | + <data> |
1480 | + |
1481 | + <record id="automated_import_job_search_view" model="ir.ui.view"> |
1482 | + <field name="name">automated.import.job.search.view</field> |
1483 | + <field name="model">automated.import.job</field> |
1484 | + <field name="type">search</field> |
1485 | + <field name="arch" type="xml"> |
1486 | + <search string="Import job requests"> |
1487 | + <filter icon="terp-document-new" string="Draft" name="draft" domain="[('state', '=', 'draft')]" help="Draft jobs" /> |
1488 | + <filter icon="terp-gnome-cpu-frequency-applet+" string="In progress" name="in_progress" domain="[('state', '=', 'in_progress')]" help="In progress jobs" /> |
1489 | + <filter icon="terp-dialog-close" string="Done" name="done" domain="[('state', '=', 'done')]" help="Done jobs" /> |
1490 | + <filter icon="terp-emblem-important" string="Exception" name="exceptions" domain="[('state', '=', 'error')]" help="Jobs with error" /> |
1491 | + <separator orientation="vertical" /> |
1492 | + <field name="import_id" /> |
1493 | + <field name="start_time" /> |
1494 | + <field name="end_time" /> |
1495 | + </search> |
1496 | + </field> |
1497 | + </record> |
1498 | + |
1499 | + <record id="automated_import_job_tree_view" model="ir.ui.view"> |
1500 | + <field name="name">automated.import.job.tree.view</field> |
1501 | + <field name="model">automated.import.job</field> |
1502 | + <field name="type">tree</field> |
1503 | + <field name="arch" type="xml"> |
1504 | + <tree string="Import job reports" noteditable="True" hide_new_button="True"> |
1505 | + <field name="import_id" /> |
1506 | + <field name="start_time" /> |
1507 | + <field name="end_time" /> |
1508 | + <field name="state" /> |
1509 | + </tree> |
1510 | + </field> |
1511 | + </record> |
1512 | + |
1513 | + <record id="automated_import_job_form_view" model="ir.ui.view"> |
1514 | + <field name="name">automated.import.job.form.view</field> |
1515 | + <field name="model">automated.import.job</field> |
1516 | + <field name="type">form</field> |
1517 | + <field name="arch" type="xml"> |
1518 | + <form string="Import job report"> |
1519 | + <field name="import_id" /> |
1520 | + <field name="start_time" /> |
1521 | + <field name="end_time" /> |
1522 | + <separator colspan="4" string="Import file" /> |
1523 | + <field name="file_to_import" readonly="True" filename="filename" /> |
1524 | + <field name="filename" invisible="1" /> |
1525 | + <separator colspan="4" string="Import results" /> |
1526 | + <field name="nb_processed_records" /> |
1527 | + <field name="nb_rejected_records" /> |
1528 | + <field name="comment" colspan="4" /> |
1529 | + <separator colspan="4" string="States" /> |
1530 | + <field name="state" /> |
1531 | + <button name="process_import" type="object" states="draft" string="Process import" icon="gtk-execute" /> |
1532 | + </form> |
1533 | + </field> |
1534 | + </record> |
1535 | + |
1536 | + <record id="automated_import_job_file_view" model="ir.ui.view"> |
1537 | + <field name="name">automated.import.job.file.view</field> |
1538 | + <field name="model">automated.import.job</field> |
1539 | + <field name="type">form</field> |
1540 | + <field name="priority" eval="99" /> |
1541 | + <field name="arch" type="xml"> |
1542 | + <form string="Automated import job"> |
1543 | + <field name="file_sum" invisible="1" /> |
1544 | + <separator colspan="4" string="Import file" /> |
1545 | + <group colspan="4" attrs="{'invisible': [('file_sum', '!=', False)]}"> |
1546 | + <html> |
1547 | + <style> |
1548 | + #explanation_message_unifield div |
1549 | + { |
1550 | + font-weight: bold; |
1551 | + font-size: 1.2em; |
1552 | + } |
1553 | + </style> |
1554 | + <div id="explanation_message_unifield"> |
1555 | + <label colspan="4" string="If no file is selected, the system will try to get the oldest file in the source path." align="0.0" /> |
1556 | + </div> |
1557 | + </html> |
1558 | + </group> |
1559 | + <group colspan="4" attrs="{'invisible': [('file_sum', '=', False)]}"> |
1560 | + <html> |
1561 | + <style> |
1562 | + #warning_message_unifield div |
1563 | + { |
1564 | + font-weight: bold; |
1565 | + font-size: 1.2em; |
1566 | + color: red; |
1567 | + } |
1568 | + </style> |
1569 | + <div id="warning_message_unifield"> |
1570 | + <label colspan="4" string="The file you selected has already been imported in the past. Are you sure you want to import it ?" align="0.0" /> |
1571 | + </div> |
1572 | + </html> |
1573 | + </group> |
1574 | + <field name="file_to_import" filename="filename" /> |
1575 | + <field name="filename" invisible="1" /> |
1576 | + <separator colspan="4" string="Actions" /> |
1577 | + <button name="cancel_file_import" type="object" string="Cancel" icon="gtk-cancel" colspan="2" /> |
1578 | + <button name="process_import" type="object" string="Run job" icon="gtk-execute" colspan="2" /> |
1579 | + </form> |
1580 | + </field> |
1581 | + </record> |
1582 | + |
1583 | + <record id="automated_import_job_action" model="ir.actions.act_window"> |
1584 | + <field name="name">Import job reports</field> |
1585 | + <field name="res_model">automated.import.job</field> |
1586 | + <field name="view_type">form</field> |
1587 | + <field name="view_mode">tree,form</field> |
1588 | + </record> |
1589 | + |
1590 | + <menuitem |
1591 | + id="automated_import_job_menu" |
1592 | + action="automated_import_job_action" |
1593 | + parent="automated_import_menu" /> |
1594 | + |
1595 | + </data> |
1596 | +</openerp> |
1597 | |
1598 | === added file 'bin/addons/msf_tools/views/automated_import_view.xml' |
1599 | --- bin/addons/msf_tools/views/automated_import_view.xml 1970-01-01 00:00:00 +0000 |
1600 | +++ bin/addons/msf_tools/views/automated_import_view.xml 2016-05-26 09:26:36 +0000 |
1601 | @@ -0,0 +1,77 @@ |
1602 | +<?xml version="1.0" encoding="utf-8" ?> |
1603 | +<openerp> |
1604 | + <data> |
1605 | + |
1606 | + <record id="automated_import_search_view" model="ir.ui.view"> |
1607 | + <field name="name">automated.import.search.view</field> |
1608 | + <field name="model">automated.import</field> |
1609 | + <field name="type">search</field> |
1610 | + <field name="arch" type="xml"> |
1611 | + <search string="Automated imports"> |
1612 | + <filter name="active" domain="[('active', '=', False)]" string="Inactive" icon="gtk-undo" /> |
1613 | + <field name="name" /> |
1614 | + <field name="function_id" /> |
1615 | + </search> |
1616 | + </field> |
1617 | + </record> |
1618 | + |
1619 | + <record id="automated_import_tree_view" model="ir.ui.view"> |
1620 | + <field name="name">automated.import.tree.view</field> |
1621 | + <field name="model">automated.import</field> |
1622 | + <field name="type">tree</field> |
1623 | + <field name="arch" type="xml"> |
1624 | + <tree string="Automated imports"> |
1625 | + <field name="name" /> |
1626 | + <field name="function_id" /> |
1627 | + <field name="start_time" /> |
1628 | + <field name="interval" /> |
1629 | + <field name="interval_unit" /> |
1630 | + </tree> |
1631 | + </field> |
1632 | + </record> |
1633 | + |
1634 | + <record id="automated_import_form_view" model="ir.ui.view"> |
1635 | + <field name="name">automated.import.form.view</field> |
1636 | + <field name="model">automated.import</field> |
1637 | + <field name="type">form</field> |
1638 | + <field name="arch" type="xml"> |
1639 | + <form string="Automated import"> |
1640 | + <separator colspan="4" string="General configuration" /> |
1641 | + <field name="name" /> |
1642 | + <field name="function_id" widget="selection" /> |
1643 | + <field name="active" /> |
1644 | + <button name="run_job_manually" string="Run job manually" colspan="2" type="object" icon="gtk-execute" /> |
1645 | + <separator colspan="4" string="Paths configuration" /> |
1646 | + <field name="src_path" colspan="4" attrs="{'required': [('active', '=', True)]}" /> |
1647 | + <field name="dest_path" colspan="4" attrs="{'required': [('active', '=', True)]}" /> |
1648 | + <field name="report_path" colspan="4" attrs="{'required': [('active', '=', True)]}" /> |
1649 | + <separator colspan="4" string="Scheduler configuration" /> |
1650 | + <group colspan="4" col="6"> |
1651 | + <field name="priority" /> |
1652 | + <field name="interval" attrs="{'required': [('interval_unit', '>', 0)]}" /> |
1653 | + <field name="interval_unit" /> |
1654 | + <field name="start_time" /> |
1655 | + </group> |
1656 | + <separator colspan="4" string="Activation" /> |
1657 | + <group colspan="4" col="1"> |
1658 | + <button name="active_import" string="Activate" colspan="1" type="object" icon="gtk-execute" attrs="{'invisible': [('active', '=', True)]}" /> |
1659 | + <button name="deactive_import" string="De-activate" colspan="1" type="object" icon="gtk-execute" attrs="{'invisible': [('active', '=', False)]}" /> |
1660 | + </group> |
1661 | + </form> |
1662 | + </field> |
1663 | + </record> |
1664 | + |
1665 | + <record id="automated_import_action" model="ir.actions.act_window"> |
1666 | + <field name="name">Automated imports</field> |
1667 | + <field name="res_model">automated.import</field> |
1668 | + <field name="view_type">form</field> |
1669 | + <field name="view_mode">tree,form</field> |
1670 | + </record> |
1671 | + |
1672 | + <menuitem |
1673 | + id="automated_import_menu" |
1674 | + action="automated_import_action" |
1675 | + parent="object_query.menu_preferences" /> |
1676 | + |
1677 | + </data> |
1678 | +</openerp> |
1679 | |
1680 | === modified file 'bin/addons/sync_server/__openerp__.py' |
1681 | --- bin/addons/sync_server/__openerp__.py 2015-01-12 10:46:48 +0000 |
1682 | +++ bin/addons/sync_server/__openerp__.py 2016-05-26 09:26:36 +0000 |
1683 | @@ -39,6 +39,7 @@ |
1684 | 'security/ir.model.access.csv', |
1685 | 'data/cron.xml', |
1686 | 'data/alert_email.xml', |
1687 | + 'data/automated_import_sync_groups.xml', |
1688 | ], |
1689 | 'demo_xml': [ |
1690 | ], |
1691 | |
1692 | === added file 'bin/addons/sync_server/data/automated_import_sync_groups.xml' |
1693 | --- bin/addons/sync_server/data/automated_import_sync_groups.xml 1970-01-01 00:00:00 +0000 |
1694 | +++ bin/addons/sync_server/data/automated_import_sync_groups.xml 2016-05-26 09:26:36 +0000 |
1695 | @@ -0,0 +1,18 @@ |
1696 | +<?xml version="1.0" encoding="utf-8" ?> |
1697 | +<openerp> |
1698 | + <data> |
1699 | + |
1700 | + <record id="auto_import_fnct_sync_groups" model="automated.import.function"> |
1701 | + <field name="name">Import Sync. Groups</field> |
1702 | + <field name="model_id" model="ir.model" search="[('model', '=', 'sync.server.entity_group')]" /> |
1703 | + <field name="method_to_call">import_data_from_csv</field> |
1704 | + </record> |
1705 | + |
1706 | + <record id="auto_import_fnct_sync_group_type" model="automated.import.function"> |
1707 | + <field name="name">Import Sync. Group Types</field> |
1708 | + <field name="model_id" model="ir.model" search="[('model', '=', 'sync.server.group_type')]" /> |
1709 | + <field name="method_to_call">import_data_from_csv</field> |
1710 | + </record> |
1711 | + |
1712 | + </data> |
1713 | +</openerp> |
1714 | \ No newline at end of file |
1715 | |
1716 | === modified file 'bin/osv/orm.py' (properties changed: +x to -x) |
1717 | --- bin/osv/orm.py 2016-04-12 16:43:36 +0000 |
1718 | +++ bin/osv/orm.py 2016-05-26 09:26:36 +0000 |
1719 | @@ -48,6 +48,7 @@ |
1720 | import time |
1721 | import traceback |
1722 | import types |
1723 | +import csv |
1724 | |
1725 | import netsvc |
1726 | from lxml import etree |
1727 | @@ -745,6 +746,48 @@ |
1728 | datas += self.__export_row(cr, uid, row, fields_to_export, context) |
1729 | return {'datas': datas} |
1730 | |
1731 | + def import_data_with_wizard(self, cr, uid, csv_file, quotechar="'", delimiter=","): |
1732 | + import base64 |
1733 | + |
1734 | + import_obj = self.pool.get('import_data') |
1735 | + import_id = import_obj.create(cr, uid, { |
1736 | + 'ignore': 1, |
1737 | + 'file': base64.encodestring(open(csv_file, 'r').read()), |
1738 | + 'object': self._name, |
1739 | + 'import_mode': 'create', |
1740 | + }) |
1741 | + processed, rejected, headers = import_obj._import(cr, uid, import_id, use_new_cursor=False, auto_import=True) |
1742 | + return processed, rejected, headers |
1743 | + |
1744 | + def import_data_from_csv(self, cr, uid, csv_file, quotechar='"', delimiter=','): |
1745 | + headers = [] |
1746 | + list_data = [] |
1747 | + with open(csv_file, 'r') as fcsv: |
1748 | + reader = csv.reader(fcsv, quotechar=quotechar, delimiter=delimiter) |
1749 | + for row in reader: |
1750 | + if not headers: |
1751 | + headers = row |
1752 | + else: |
1753 | + list_data.append(row) |
1754 | + |
1755 | + rejected = [] |
1756 | + processed = [] |
1757 | + i = 1 |
1758 | + for d in list_data: |
1759 | + i += 1 |
1760 | + try: |
1761 | + res = self.import_data(cr, uid, headers, [d]) |
1762 | + if res[0] == -1: |
1763 | + rejected.append((i, d, res[2])) |
1764 | + else: |
1765 | + processed.append((i, d)) |
1766 | + cr.commit() |
1767 | + except Exception as e: |
1768 | + rejected.append((i, d, str(e))) |
1769 | + cr.commit() |
1770 | + |
1771 | + return processed, rejected, headers |
1772 | + |
1773 | def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None): |
1774 | """ |
1775 | Import given data in given module |