Merge lp:~jfb-tempo-consulting/unifield-server/US-4490 into lp:unifield-server

Proposed by jftempo
Status: Merged
Merged at revision: 4925
Proposed branch: lp:~jfb-tempo-consulting/unifield-server/US-4490
Merge into: lp:unifield-server
Diff against target: 125 lines (+49/-13)
3 files modified
bin/addons/msf_homere_interface/hr.py (+21/-0)
bin/addons/msf_outgoing/msf_outgoing.py (+10/-3)
bin/addons/sync_client/update.py (+18/-10)
To merge this branch: bzr merge lp:~jfb-tempo-consulting/unifield-server/US-4490
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+343621@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/addons/msf_homere_interface/hr.py'
2--- bin/addons/msf_homere_interface/hr.py 2017-10-27 12:04:12 +0000
3+++ bin/addons/msf_homere_interface/hr.py 2018-04-19 14:23:11 +0000
4@@ -116,6 +116,27 @@
5 'ex_allow_edition': lambda *a: True,
6 }
7
8+ def _set_sync_update_as_run(self, cr, uid, data, sdref, context=None):
9+ if not data.get('identification_id') or not data.get('name'):
10+ return False
11+
12+ employee_name = data['name'].strip()
13+ existing_id = self.find_sd_ref(cr, uid, sdref)
14+ if not existing_id:
15+ # never run, but exists with the same id and name => ignore
16+ if self.search_exist(cr, uid, [('identification_id', '=', data['identification_id']), ('name', '=', employee_name)]):
17+ return True
18+
19+ else:
20+ same_ids = self.search(cr, uid, [('identification_id', '=', data['identification_id']), ('name', '=', employee_name)])
21+ if same_ids and existing_id not in same_ids:
22+ # Run on the instance but has a different Employee ID (identification_id) than on the one run on the instance
23+ return True
24+
25+ return False
26+
27+
28+
29 def _check_unicity(self, cr, uid, ids, context=None):
30 """
31 Check that identification_id is not used yet.
32
33=== modified file 'bin/addons/msf_outgoing/msf_outgoing.py'
34--- bin/addons/msf_outgoing/msf_outgoing.py 2018-04-03 12:31:40 +0000
35+++ bin/addons/msf_outgoing/msf_outgoing.py 2018-04-19 14:23:11 +0000
36@@ -3365,6 +3365,7 @@
37 picking = wizard.picking_id
38 new_picking_id = False
39 processed_moves = []
40+ new_split_move = {}
41 move_data = {}
42 for line in wizard.move_ids:
43 move = line.move_id
44@@ -3376,6 +3377,7 @@
45 move_data.setdefault(move.id, {
46 'original_qty': move.product_qty,
47 'processed_qty': 0.00,
48+ 'total_qty': move.product_qty,
49 })
50
51 if line.quantity <= 0.00:
52@@ -3421,10 +3423,15 @@
53 # Create a new move
54 new_move_id = move_obj.copy(cr, uid, move.id, values, context=context)
55 processed_moves.append(new_move_id)
56+ new_split_move[new_move_id] = True
57+
58 # Update the original move
59+
60+ # here we can't use move.product_qty bc this value is bowser_record cached on not flush with the following write
61+ move_data[move.id]['total_qty'] -= quantity
62 wr_vals = {
63- 'product_qty': move.product_qty - quantity,
64- 'product_uos_qty': move.product_qty - quantity,
65+ 'product_qty': move_data[move.id]['total_qty'],
66+ 'product_uos_qty': move_data[move.id]['total_qty'],
67 }
68 move_obj.write(cr, uid, [move.id], wr_vals, context=context)
69 else:
70@@ -3447,7 +3454,7 @@
71 # If not, create a backorder
72 need_new_picking = False
73 for move in picking.move_lines:
74- if move.state not in ('done', 'cancel') and (not move_data.get(move.id, False) or \
75+ if not new_split_move.get(move.id) and move.state not in ('done', 'cancel') and (not move_data.get(move.id, False) or \
76 move_data[move.id]['original_qty'] != move_data[move.id]['processed_qty']):
77 need_new_picking = True
78 break
79
80=== modified file 'bin/addons/sync_client/update.py'
81--- bin/addons/sync_client/update.py 2017-09-19 12:24:09 +0000
82+++ bin/addons/sync_client/update.py 2018-04-19 14:23:11 +0000
83@@ -572,16 +572,24 @@
84 row = [row[i] for i in range(len(import_fields)) if i not in bad_fields]
85
86 if result['res']: #US-852: if everything is Ok, then do import as normal
87- values.append(row)
88- update_ids.append(update.id)
89- versions.append( (update.sdref, update.version) )
90+ if obj._name == 'hr.employee' and obj._set_sync_update_as_run(cr, uid, dict(zip(import_fields, row)), update.sdref, context=context):
91+ self.write(cr, uid, update.id, {
92+ 'run': True,
93+ 'editable': False,
94+ 'execution_date': datetime.now(),
95+ 'log': 'Set as Run because this employee already exists in the instance',
96+ })
97+ else:
98+ values.append(row)
99+ update_ids.append(update.id)
100+ versions.append( (update.sdref, update.version) )
101
102- #1 conflict detection
103- if self._conflict(cr, uid, update.sdref, update.version, context=context):
104- #2 if conflict => manage conflict according rules : report conflict and how it's solve
105- index_id = eval(update.fields).index('id')
106- sd_ref = eval(update.values)[index_id]
107- logs[update.id] = "Warning: Conflict detected! in content: (%s, %r)" % (update.id, sd_ref)
108+ #1 conflict detection
109+ if self._conflict(cr, uid, update.sdref, update.version, context=context):
110+ #2 if conflict => manage conflict according rules : report conflict and how it's solve
111+ index_id = eval(update.fields).index('id')
112+ sd_ref = eval(update.values)[index_id]
113+ logs[update.id] = "Warning: Conflict detected! in content: (%s, %r)" % (update.id, sd_ref)
114 else: #US-852: if account_move_line is missing then ignore the import, and set it as not run
115 self._set_not_run(cr, uid, [update.id],
116 log=result['error_message'],
117@@ -911,7 +919,7 @@
118 except ValueError:
119 try:
120 #US-852: if account_move_line is given, then cannot use the fallback value, but exit the import!
121- # THIS FIX COULD ALSO OPEN FOR OTHER BUG, BUT CHECK IF THE RULES THAT CONTAIN THE OBJECT (HERE account_move_line)
122+ # THIS FIX COULD ALSO OPEN FOR OTHER BUG, BUT CHECK IF THE RULES THAT CONTAIN THE OBJECT (HERE account_move_line)
123 if 'account_move_line' in xmlid:
124 m, sep, sdref = xmlid.partition('.')
125 if self.search(cr, uid, [('sdref', '=', sdref), ('run', '=', False)], order='NO_ORDER', context=context):

Subscribers

People subscribed via source and target branches