Merge lp:~julie-w/unifield-server/US-6100 into lp:unifield-server
- US-6100
- Merge into trunk
Proposed by
jftempo
Status: | Merged |
---|---|
Merged at revision: | 5518 |
Proposed branch: | lp:~julie-w/unifield-server/US-6100 |
Merge into: | lp:unifield-server |
Diff against target: |
465 lines (+124/-114) 3 files modified
bin/addons/account_corrections/wizard/analytic_distribution_wizard.py (+111/-113) bin/addons/analytic_override/analytic_line.py (+13/-0) bin/addons/msf_profile/i18n/fr_MF.po (+0/-1) |
To merge this branch: | bzr merge lp:~julie-w/unifield-server/US-6100 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+372875@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/addons/account_corrections/wizard/analytic_distribution_wizard.py' |
2 | --- bin/addons/account_corrections/wizard/analytic_distribution_wizard.py 2019-04-16 14:12:31 +0000 |
3 | +++ bin/addons/account_corrections/wizard/analytic_distribution_wizard.py 2019-09-17 12:03:20 +0000 |
4 | @@ -136,23 +136,39 @@ |
5 | def get_entry_seq(entry_seq_data): |
6 | res = entry_seq_data.get('sequence', False) |
7 | if not res: |
8 | - seqnum = self.pool.get('ir.sequence').get_id( |
9 | - cr, uid, journal.sequence_id.id, |
10 | - context={'fiscalyear_id': period.fiscalyear_id.id}) |
11 | + seqnum = self.pool.get('ir.sequence').get_id(cr, uid, journal.sequence_id.id, |
12 | + context={'fiscalyear_id': period.fiscalyear_id.id}) |
13 | res = "%s-%s-%s" % (move_prefix, code, seqnum) |
14 | entry_seq_data['sequence'] = res |
15 | return res |
16 | |
17 | - |
18 | if context is None: |
19 | context = {} |
20 | # Prepare some values |
21 | wizard = self.browse(cr, uid, wizard_id) |
22 | ad_obj = self.pool.get('analytic.distribution') |
23 | + ana_line_obj = self.pool.get('account.analytic.line') |
24 | company_currency_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.currency_id.id |
25 | ml = wizard.move_line_id |
26 | orig_date = ml.source_date or ml.date |
27 | orig_document_date = ml.document_date |
28 | + posting_date = wizard.date |
29 | + working_period_id = [] |
30 | + new_line_ids = [] |
31 | + entry_seq_data = {} |
32 | + |
33 | + # get the last reversal AJI created BEFORE this correction |
34 | + biggest_reversal_aji_ids = ana_line_obj.search(cr, uid, [('move_id', '=', ml.id), |
35 | + ('is_reversal', '=', True)], |
36 | + order='id DESC', limit=1, context=context) |
37 | + if biggest_reversal_aji_ids: |
38 | + biggest_reversal_aji = ana_line_obj.browse(cr, uid, biggest_reversal_aji_ids[0], |
39 | + fields_to_fetch=['period_id', 'entry_sequence', 'date'], context=context) |
40 | + # use the period / date and Entry Sequence of the reversal AJI in case its period is still open |
41 | + if biggest_reversal_aji.period_id and biggest_reversal_aji.period_id.state == 'draft': # Open |
42 | + working_period_id = [biggest_reversal_aji.period_id.id] |
43 | + posting_date = biggest_reversal_aji.date |
44 | + entry_seq_data['sequence'] = biggest_reversal_aji.entry_sequence |
45 | |
46 | jtype = 'correction' |
47 | if wizard.move_line_id.account_id and wizard.move_line_id.account_id.type_for_register == 'donation': |
48 | @@ -169,7 +185,6 @@ |
49 | to_reverse = [] |
50 | old_line_ok = [] |
51 | any_reverse = False |
52 | - ana_obj = self.pool.get('account.analytic.line') |
53 | # Prepare journal and period information for entry sequences |
54 | journal_sql = """ |
55 | SELECT id, code |
56 | @@ -184,12 +199,11 @@ |
57 | journal_id = journal_sql_res[0] |
58 | code = journal_sql_res[1] |
59 | journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context) |
60 | - period_ids = self.pool.get('account.period').get_period_from_date(cr, uid, wizard.date) |
61 | + period_ids = self.pool.get('account.period').get_period_from_date(cr, uid, date=posting_date, context=context) |
62 | if not period_ids: |
63 | - raise osv.except_osv(_('Warning'), _('No period found for creating sequence on the given date: %s') % (wizard.date or '')) |
64 | + raise osv.except_osv(_('Warning'), _('No period found for creating sequence on the given date: %s') % (posting_date or '')) |
65 | period = self.pool.get('account.period').browse(cr, uid, period_ids)[0] |
66 | move_prefix = self.pool.get('res.users').browse(cr, uid, uid, context).company_id.instance_id.move_prefix |
67 | - entry_seq_data = {} |
68 | |
69 | # US-676: check wizard lines total matches JI amount |
70 | # the wizard already check distri is 100% allocated |
71 | @@ -201,12 +215,6 @@ |
72 | # => apply these deduce only if: lines are created as some line are |
73 | # created/resplit. do nothing if only cc/dest of lines changes. |
74 | total_rounded_amount = 0. |
75 | - greater_amount = { # US-676 |
76 | - 'wl': False, # wizard line with greater amount |
77 | - 'aji_id': False, # related aji: not touched wizard line one or created, overrided, reversed |
78 | - 'amount': 0., # greater amount |
79 | - 'gap_amount': 0, # gap amount to fix from greater amount line |
80 | - } |
81 | ##### |
82 | ## FUNDING POOL |
83 | ### |
84 | @@ -221,13 +229,13 @@ |
85 | # consistency |
86 | is_HQ_origin = False |
87 | for old_line_id in old_line_ids: |
88 | - original_al_id = ana_obj.search(cr, uid, [ |
89 | + original_al_id = ana_line_obj.search(cr, uid, [ |
90 | ('distrib_line_id', '=', 'funding.pool.distribution.line,%d' % (old_line_id, )), |
91 | ('is_reversal', '=', False), |
92 | ('is_reallocated', '=', False), |
93 | ]) |
94 | if original_al_id and len(original_al_id) == 1: |
95 | - original_al = ana_obj.browse(cr, uid, original_al_id[0], context) |
96 | + original_al = ana_line_obj.browse(cr, uid, original_al_id[0], context) |
97 | # AJI correction journal and HQ JI |
98 | if original_al \ |
99 | and original_al.move_id and \ |
100 | @@ -256,11 +264,11 @@ |
101 | |
102 | if old_line: |
103 | #US-714: For HQ Entries, always create the COR and REV even the period is closed |
104 | - original_al_id = ana_obj.search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%old_line.id), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
105 | + original_al_id = ana_line_obj.search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%old_line.id), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
106 | |
107 | is_HQ_entries = False |
108 | if original_al_id and len(original_al_id) == 1: |
109 | - original_al = ana_obj.browse(cr, uid, original_al_id[0], context) |
110 | + original_al = ana_line_obj.browse(cr, uid, original_al_id[0], context) |
111 | if original_al.journal_id.type == 'hq': |
112 | is_HQ_entries = True |
113 | |
114 | @@ -289,15 +297,7 @@ |
115 | to_override.append(wiz_line) |
116 | |
117 | old_line_ok.append(old_line.id) |
118 | - total_rounded_amount += round(wiz_line.amount, 2) |
119 | - if wiz_line.amount > greater_amount['amount']: |
120 | - greater_amount.update({ |
121 | - 'amount': wiz_line.amount, |
122 | - 'wl':wiz_line, |
123 | - }) |
124 | - match_amount_diff = total_rounded_amount - abs(wizard.amount) |
125 | - if abs(match_amount_diff) > 0.001: |
126 | - greater_amount['gap_amount'] = match_amount_diff |
127 | + |
128 | to_reverse_ids = [] |
129 | for wiz_line in self.pool.get('funding.pool.distribution.line').browse(cr, uid, [x for x in old_line_ids if x not in old_line_ok]): |
130 | # distribution line deleted by user |
131 | @@ -309,11 +309,11 @@ |
132 | #to_reverse_ids = ana_obj.search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%wiz_line.id)]) |
133 | if period.state != 'draft': |
134 | raise osv.except_osv(_('Error'), _('Period (%s) is not open.') % (period.name,)) |
135 | - reversed_ids = ana_obj.reverse(cr, uid, to_reverse_ids, posting_date=wizard.date) |
136 | + reversed_ids = ana_line_obj.reverse(cr, uid, to_reverse_ids, posting_date=posting_date) |
137 | # Set initial lines as non correctible |
138 | - ana_obj.write(cr, uid, to_reverse_ids, {'is_reallocated': True}) |
139 | + ana_line_obj.write(cr, uid, to_reverse_ids, {'is_reallocated': True}) |
140 | # Set right journal and right entry sequence |
141 | - ana_obj.write(cr, uid, reversed_ids, {'journal_id': correction_journal_id}) |
142 | + ana_line_obj.write(cr, uid, reversed_ids, {'journal_id': correction_journal_id}) |
143 | for reversed_id in reversed_ids: |
144 | cr.execute('update account_analytic_line set entry_sequence = %s where id = %s', (get_entry_seq(entry_seq_data), reversed_id) ) |
145 | # delete the distribution line |
146 | @@ -325,9 +325,9 @@ |
147 | keep_seq_and_corrected = False |
148 | period_closed = ml.period_id and ml.period_id.state and ml.period_id.state in ['done', 'mission-closed'] or ml.have_an_historic or False |
149 | if (period_closed or is_HQ_origin) and to_create and (to_override or to_delete or any_reverse): |
150 | - already_corr_ids = ana_obj.search(cr, uid, [('distribution_id', '=', distrib_id), ('last_corrected_id', '!=', False)]) |
151 | + already_corr_ids = ana_line_obj.search(cr, uid, [('distribution_id', '=', distrib_id), ('last_corrected_id', '!=', False)]) |
152 | if already_corr_ids: |
153 | - for ana in ana_obj.read(cr, uid, already_corr_ids, ['entry_sequence', 'last_corrected_id', 'date', 'ref', 'reversal_origin']): |
154 | + for ana in ana_line_obj.read(cr, uid, already_corr_ids, ['entry_sequence', 'last_corrected_id', 'date', 'ref', 'reversal_origin']): |
155 | if ana['entry_sequence'] and ana['last_corrected_id']: |
156 | rev_name = ana['reversal_origin'] and ana['reversal_origin'][1] or ana['last_corrected_id'] and ana['last_corrected_id'][1] or False |
157 | keep_seq_and_corrected = (ana['entry_sequence'], ana['last_corrected_id'][0], ana['date'], ana['ref'], rev_name) |
158 | @@ -356,14 +356,17 @@ |
159 | name = False |
160 | if period_closed or is_HQ_origin: |
161 | if period_closed or is_HQ_origin: |
162 | - create_date = wizard.date |
163 | - name = self.pool.get('account.analytic.line').join_without_redundancy(ml.name, 'COR') |
164 | + create_date = posting_date |
165 | + name = ana_line_obj.join_without_redundancy(ml.name, 'COR') |
166 | if keep_seq_and_corrected: |
167 | create_date = keep_seq_and_corrected[2] # is_HQ_origin keep date too |
168 | if keep_seq_and_corrected[4]: |
169 | - name = self.pool.get('account.analytic.line').join_without_redundancy(keep_seq_and_corrected[4], 'COR') |
170 | + name = ana_line_obj.join_without_redundancy(keep_seq_and_corrected[4], 'COR') |
171 | |
172 | created_analytic_line_ids = self.pool.get('funding.pool.distribution.line').create_analytic_lines(cr, uid, [new_distrib_line], ml.id, date=create_date, document_date=orig_document_date, source_date=orig_date, name=name, context=context) |
173 | + new_line_ids.extend(created_analytic_line_ids.values()) |
174 | + working_period_id = working_period_id or \ |
175 | + self.pool.get('account.period').get_period_from_date(cr, uid, date=create_date, context=context) |
176 | # Set right analytic correction journal to these lines |
177 | if period_closed or is_HQ_origin: |
178 | sql_to_cor = ['journal_id=%s'] |
179 | @@ -375,8 +378,6 @@ |
180 | cr.execute('update account_analytic_line set '+','.join(sql_to_cor)+' where id = %s', # not_a_user_entry |
181 | sql_data) |
182 | have_been_created.append(created_analytic_line_ids[new_distrib_line]) |
183 | - if created_analytic_line_ids and greater_amount['gap_amount'] and greater_amount['wl'] and greater_amount['wl'].id == line.id: |
184 | - greater_amount['aji_id'] = created_analytic_line_ids[created_analytic_line_ids.keys()[0]] |
185 | |
186 | ##### |
187 | ## FP: TO DELETE |
188 | @@ -385,33 +386,32 @@ |
189 | # delete distrib line |
190 | self.pool.get('funding.pool.distribution.line').unlink(cr, uid, [line.id]) |
191 | # delete associated analytic line |
192 | - to_delete_ids = self.pool.get('account.analytic.line').search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%line.id), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
193 | - self.pool.get('account.analytic.line').unlink(cr, uid, to_delete_ids) |
194 | + to_delete_ids = ana_line_obj.search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%line.id), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
195 | + ana_line_obj.unlink(cr, uid, to_delete_ids) |
196 | |
197 | ##### |
198 | ## FP: TO REVERSE |
199 | ### |
200 | for line in to_reverse: |
201 | # reverse the line |
202 | - to_reverse_ids = self.pool.get('account.analytic.line').search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%line.distribution_line_id.id), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
203 | + to_reverse_ids = ana_line_obj.search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%line.distribution_line_id.id), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
204 | |
205 | # get the original sequence |
206 | - orig_line = self.pool.get('account.analytic.line').browse(cr, uid, to_reverse_ids)[0] |
207 | + orig_line = ana_line_obj.browse(cr, uid, to_reverse_ids)[0] |
208 | |
209 | # UTP-943: Set wizard date as date for REVERSAL AND CORRECTION lines |
210 | - reversed_id = self.pool.get('account.analytic.line').reverse(cr, uid, to_reverse_ids[0], posting_date=wizard.date, context=context)[0] |
211 | + reversed_id = ana_line_obj.reverse(cr, uid, to_reverse_ids[0], posting_date=posting_date, context=context)[0] |
212 | # Add reversal origin link (to not loose it). last_corrected_id is to prevent case where you do a reverse a line that have been already corrected |
213 | |
214 | - self.pool.get('account.analytic.line').write(cr, uid, [reversed_id], {'reversal_origin': to_reverse_ids[0], 'last_corrected_id': False, 'journal_id': correction_journal_id, 'ref': orig_line.entry_sequence}) |
215 | + ana_line_obj.write(cr, uid, [reversed_id], {'reversal_origin': to_reverse_ids[0], 'last_corrected_id': False, 'journal_id': correction_journal_id, 'ref': orig_line.entry_sequence}) |
216 | # Mark old lines as non reallocatable (ana_ids): why reverse() don't set this flag ? |
217 | - self.pool.get('account.analytic.line').write(cr, uid, [to_reverse_ids[0]], {'is_reallocated': True}) |
218 | + ana_line_obj.write(cr, uid, [to_reverse_ids[0]], {'is_reallocated': True}) |
219 | cr.execute('update account_analytic_line set entry_sequence = %s where id = %s', (get_entry_seq(entry_seq_data), reversed_id) ) |
220 | |
221 | # update the distrib line |
222 | name = False |
223 | fp_distrib_obj = self.pool.get('funding.pool.distribution.line') |
224 | if to_reverse_ids: |
225 | - ana_line_obj = self.pool.get('account.analytic.line') |
226 | name = ana_line_obj.join_without_redundancy(ana_line_obj.read(cr, uid, to_reverse_ids[0], ['name'])['name'], 'COR') |
227 | fp_distrib_obj.write(cr, uid, [line.distribution_line_id.id], { |
228 | 'analytic_id': line.analytic_id.id, |
229 | @@ -420,20 +420,19 @@ |
230 | 'destination_id': line.destination_id.id, |
231 | }) |
232 | # UTP-943: Check that new ana line is on an open period |
233 | - correction_period_ids = self.pool.get('account.period').get_period_from_date(cr, uid, wizard.date) |
234 | - if not correction_period_ids: |
235 | - raise osv.except_osv(_('Error'), _('No period found for the given date: %s') % (wizard.date,)) |
236 | + correction_period_ids = period_ids |
237 | for cp in self.pool.get('account.period').browse(cr, uid, correction_period_ids): |
238 | if cp.state != 'draft': |
239 | raise osv.except_osv(_('Error'), _('Period (%s) is not open.') % (cp.name,)) |
240 | # Create the new ana line |
241 | - ret = fp_distrib_obj.create_analytic_lines(cr, uid, line.distribution_line_id.id, ml.id, date=wizard.date, document_date=orig_document_date, source_date=orig_date, name=name,context=context) |
242 | + ret = fp_distrib_obj.create_analytic_lines(cr, uid, line.distribution_line_id.id, ml.id, date=posting_date, |
243 | + document_date=orig_document_date, source_date=orig_date, name=name, context=context) |
244 | + new_line_ids.extend(ret.values()) |
245 | + working_period_id = working_period_id or period_ids |
246 | # Add link to first analytic lines |
247 | for ret_id in ret: |
248 | - self.pool.get('account.analytic.line').write(cr, uid, [ret[ret_id]], {'last_corrected_id': to_reverse_ids[0], 'journal_id': correction_journal_id, 'ref': orig_line.entry_sequence }) |
249 | + ana_line_obj.write(cr, uid, [ret[ret_id]], {'last_corrected_id': to_reverse_ids[0], 'journal_id': correction_journal_id, 'ref': orig_line.entry_sequence }) |
250 | cr.execute('update account_analytic_line set entry_sequence = %s where id = %s', (get_entry_seq(entry_seq_data), ret[ret_id]) ) |
251 | - if ret and greater_amount['gap_amount'] and greater_amount['wl'] and greater_amount['wl'].id == line.id: |
252 | - greater_amount['aji_id'] = ret[ret.keys()[0]] |
253 | # UFTP-194: Set missing entry sequence for created analytic lines |
254 | if have_been_created and to_reverse_ids: |
255 | cr.execute('update account_analytic_line set entry_sequence = %s, last_corrected_id = %s where id in %s', (get_entry_seq(entry_seq_data), to_reverse_ids[0], tuple(have_been_created))) |
256 | @@ -443,7 +442,7 @@ |
257 | ### |
258 | for line in to_override: |
259 | # update the ana line |
260 | - to_override_ids = self.pool.get('account.analytic.line').search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%line.distribution_line_id.id), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
261 | + to_override_ids = ana_line_obj.search(cr, uid, [('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%line.distribution_line_id.id), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
262 | ctx = {'date': orig_date} |
263 | amount_cur = (ml.credit_currency - ml.debit_currency) * line.percentage / 100 |
264 | amount = self.pool.get('res.currency').compute(cr, uid, ml.currency_id.id, company_currency_id, amount_cur, round=False, context=ctx) |
265 | @@ -457,13 +456,14 @@ |
266 | } |
267 | |
268 | # get the actual AJI date (can differ from the ML orig_date if an AD correction has already been made) |
269 | - aal_date = ana_obj.browse(cr, uid, to_override_ids[0], fields_to_fetch=['date'], context=context).date |
270 | + aal_date = ana_line_obj.browse(cr, uid, to_override_ids[0], fields_to_fetch=['date'], context=context).date |
271 | # original dates are kept but we add them in vals to trigger the check on dates with the new AD |
272 | vals.update({ |
273 | 'date': aal_date, |
274 | 'document_date': orig_document_date, |
275 | }) |
276 | - self.pool.get('account.analytic.line').write(cr, uid, to_override_ids, vals) |
277 | + working_period_id = working_period_id or self.pool.get('account.period').get_period_from_date(cr, uid, date=aal_date, context=context) |
278 | + ana_line_obj.write(cr, uid, to_override_ids, vals) |
279 | # update the distib line |
280 | self.pool.get('funding.pool.distribution.line').write(cr, uid, [line.distribution_line_id.id], { |
281 | 'analytic_id': line.analytic_id.id, |
282 | @@ -471,60 +471,58 @@ |
283 | 'percentage': line.percentage, |
284 | 'destination_id': line.destination_id.id |
285 | }) |
286 | - if greater_amount['gap_amount'] and greater_amount['wl'] and greater_amount['wl'].id == line.id: |
287 | - greater_amount['aji_id'] = to_override_ids[0] |
288 | - |
289 | - ##### |
290 | - # US-676 |
291 | - if greater_amount['gap_amount']: |
292 | - aal_obj = self.pool.get('account.analytic.line') |
293 | - |
294 | - if not greater_amount['aji_id'] and greater_amount['wl']: |
295 | - # untouched greater amount, get analytic line id: |
296 | - # (not in to_create, to_delete, to_override, to_reverse) |
297 | - aji_ids = aal_obj.search(cr, uid, [ |
298 | - ('distrib_line_id', '=', 'funding.pool.distribution.line,%d'%greater_amount['wl'].distribution_line_id.id), |
299 | - ('is_reversal', '=', False), |
300 | - ('is_reallocated', '=', False), |
301 | - ]) |
302 | - if aji_ids: |
303 | - greater_amount['aji_id'] = aji_ids[0] |
304 | - |
305 | - if greater_amount['aji_id']: |
306 | - # US-676 greater amount update to fix (deduce) rounding gap |
307 | - # we read the aji created for distri then fix it |
308 | - aji_rec = aal_obj.read(cr, uid, [greater_amount['aji_id']], |
309 | - ['amount_currency', 'currency_id', 'source_date', 'date'], context=context)[0] |
310 | - if aji_rec: |
311 | - fix_aji_old_amount = aji_rec['amount_currency'] |
312 | - fix_aji_currency_id = aji_rec['currency_id'] \ |
313 | - and aji_rec['currency_id'][0] or False |
314 | - |
315 | - # fix booking amount |
316 | - fix_aji_amount_currency = round(greater_amount['wl'].amount, 2) \ |
317 | - - greater_amount['gap_amount'] |
318 | - if fix_aji_old_amount < 0: |
319 | - fix_aji_amount_currency *= -1 |
320 | - aji_fix_vals = { |
321 | - 'amount_currency': fix_aji_amount_currency, |
322 | - } |
323 | - |
324 | - # then recompute functional amount |
325 | - if fix_aji_currency_id: |
326 | - new_context = context.copy() |
327 | - if aji_rec['source_date']: |
328 | - new_context['date'] = aji_rec['source_date'] |
329 | - else: |
330 | - new_context['date'] = aji_rec['date'] |
331 | - aji_fix_vals['amount'] = \ |
332 | - self.pool.get('res.currency').compute(cr, uid, |
333 | - fix_aji_currency_id, company_currency_id, |
334 | - fix_aji_amount_currency, round=False, |
335 | - context=new_context) |
336 | - |
337 | - # fix aji |
338 | - aal_obj.write(cr, uid, [greater_amount['aji_id']], |
339 | - aji_fix_vals, context=context) |
340 | + |
341 | + # US-6100 in case of a corr. the adjustment should be made on the biggest COR amount |
342 | + # instead of the biggest amount of all AJIs (cf. don't modify the entry being corrected) |
343 | + has_generated_cor = False |
344 | + if new_line_ids and (to_reverse or any_reverse): # check if COR lines have been generated |
345 | + has_generated_cor = True |
346 | + # compute the adjustment amount |
347 | + all_aji_ids = ana_line_obj.search(cr, uid, [ |
348 | + ('move_id', '=', ml.id), |
349 | + ('is_reversal', '=', False), |
350 | + ('is_reallocated', '=', False), |
351 | + ], order='NO_ORDER', context=context) |
352 | + max_line = {'amount': 0, 'aji_bro': False} |
353 | + aji_fields = ['amount_currency', 'period_id', 'currency_id', 'source_date', 'date'] |
354 | + for aji in ana_line_obj.browse(cr, uid, all_aji_ids, fields_to_fetch=aji_fields, context=context): |
355 | + total_rounded_amount += round(abs(aji.amount_currency or 0.0), 2) |
356 | + if has_generated_cor and aji.id in new_line_ids and abs(aji.amount_currency or 0.0) > max_line['amount']: |
357 | + max_line = {'aji_bro': aji, 'amount': abs(aji.amount_currency or 0.0)} |
358 | + elif not has_generated_cor and working_period_id and aji.period_id.id == working_period_id[0] and \ |
359 | + abs(aji.amount_currency or 0.0) > max_line['amount']: |
360 | + max_line = {'aji_bro': aji, 'amount': abs(aji.amount_currency or 0.0)} |
361 | + |
362 | + amount_diff = total_rounded_amount - abs(wizard.amount) |
363 | + if abs(amount_diff) > 10 ** -3 and max_line['aji_bro']: |
364 | + |
365 | + # get data from the biggest AJI, on which the adjustment will be applied |
366 | + fix_aji_old_amount = max_line['aji_bro'].amount_currency or 0.0 |
367 | + fix_aji_currency_id = max_line['aji_bro'].currency_id and max_line['aji_bro'].currency_id.id or False |
368 | + |
369 | + # fix booking amount |
370 | + fix_aji_amount_currency = round(abs(fix_aji_old_amount), 2) - amount_diff |
371 | + if fix_aji_old_amount < 0: |
372 | + fix_aji_amount_currency *= -1 |
373 | + aji_fix_vals = { |
374 | + 'amount_currency': fix_aji_amount_currency, |
375 | + } |
376 | + |
377 | + # then recompute functional amount |
378 | + if fix_aji_currency_id: |
379 | + new_context = context.copy() |
380 | + if max_line['aji_bro'].source_date: |
381 | + new_context['date'] = max_line['aji_bro'].source_date |
382 | + else: |
383 | + new_context['date'] = max_line['aji_bro'].date |
384 | + aji_fix_vals['amount'] = \ |
385 | + self.pool.get('res.currency').compute(cr, uid, |
386 | + fix_aji_currency_id, company_currency_id, |
387 | + fix_aji_amount_currency, round=False, |
388 | + context=new_context) |
389 | + |
390 | + # fix aji |
391 | + ana_line_obj.write(cr, uid, [max_line['aji_bro'].id], aji_fix_vals, context=context) |
392 | |
393 | ##### |
394 | ## Set move line as corrected upstream if needed |
395 | @@ -566,16 +564,16 @@ |
396 | # delete distrib line |
397 | self.pool.get(obj_name).unlink(cr, uid, [line.id]) |
398 | # delete associated analytic line |
399 | - to_delete_ids = self.pool.get('account.analytic.line').search(cr, uid, [('distrib_line_id', '=', '%s,%d' % (obj_name,line.id))]) |
400 | - self.pool.get('account.analytic.line').unlink(cr, uid, to_delete_ids) |
401 | + to_delete_ids = ana_line_obj.search(cr, uid, [('distrib_line_id', '=', '%s,%d' % (obj_name,line.id))]) |
402 | + ana_line_obj.unlink(cr, uid, to_delete_ids) |
403 | # Override those that should be |
404 | for line in to_override: |
405 | # update the ana line |
406 | - to_override_ids = self.pool.get('account.analytic.line').search(cr, uid, [('distrib_line_id', '=', '%s,%d' % (obj_name, line.distribution_line_id.id)), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
407 | + to_override_ids = ana_line_obj.search(cr, uid, [('distrib_line_id', '=', '%s,%d' % (obj_name, line.distribution_line_id.id)), ('is_reversal', '=', False), ('is_reallocated', '=', False)]) |
408 | ctx = {'date': orig_date} |
409 | amount_cur = (ml.credit_currency - ml.debit_currency) * line.percentage / 100 |
410 | amount = self.pool.get('res.currency').compute(cr, uid, ml.currency_id.id, company_currency_id, amount_cur, round=False, context=ctx) |
411 | - self.pool.get('account.analytic.line').write(cr, uid, to_override_ids, { |
412 | + ana_line_obj.write(cr, uid, to_override_ids, { |
413 | 'account_id': line.analytic_id.id, |
414 | 'amount_currency': amount_cur, |
415 | 'amount': amount, |
416 | |
417 | === modified file 'bin/addons/analytic_override/analytic_line.py' |
418 | --- bin/addons/analytic_override/analytic_line.py 2019-08-22 12:42:07 +0000 |
419 | +++ bin/addons/analytic_override/analytic_line.py 2019-09-17 12:03:20 +0000 |
420 | @@ -236,6 +236,17 @@ |
421 | view['arch'] = etree.tostring(tree) |
422 | return view |
423 | |
424 | + def _round_amounts(self, vals): |
425 | + """ |
426 | + Updates vals with the booking and fctal amounts rounded to 2 digits |
427 | + This avoids the rounding to be done in the SQL query as it would be different from the one done in Python |
428 | + Cf: round(1.125, 2) ==> 1.13 / '%.2lf' % 1.125 ==> 1.12 |
429 | + """ |
430 | + if vals.get('amount_currency'): |
431 | + vals['amount_currency'] = round(vals['amount_currency'], 2) |
432 | + if vals.get('amount'): |
433 | + vals['amount'] = round(vals['amount'], 2) |
434 | + |
435 | def create(self, cr, uid, vals, context=None): |
436 | entry_sequence_sync = None |
437 | if vals.get('entry_sequence',False): |
438 | @@ -251,6 +262,7 @@ |
439 | invoice_line_obj = self.pool.get('account.invoice.line') |
440 | aal_obj = self.pool.get('account.analytic.line') |
441 | aal_account_obj = self.pool.get('account.analytic.account') |
442 | + self._round_amounts(vals) |
443 | # SP-50: If data is synchronized from another instance, just create it with the given document_date |
444 | if context.get('update_mode') in ['init', 'update']: |
445 | if not context.get('sync_update_execution', False) or not vals.get('document_date', False): |
446 | @@ -299,6 +311,7 @@ |
447 | context = {} |
448 | if isinstance(ids, (int, long)): |
449 | ids = [ids] |
450 | + self._round_amounts(vals) |
451 | for l in self.browse(cr, uid, ids): |
452 | vals2 = vals.copy() |
453 | for el in ['account_id', 'cost_center_id', 'destination_id']: |
454 | |
455 | === modified file 'bin/addons/msf_profile/i18n/fr_MF.po' |
456 | --- bin/addons/msf_profile/i18n/fr_MF.po 2019-08-22 10:11:24 +0000 |
457 | +++ bin/addons/msf_profile/i18n/fr_MF.po 2019-09-17 12:03:20 +0000 |
458 | @@ -50674,7 +50674,6 @@ |
459 | msgstr "Traitement quantité %d pour %s est plus grande que la quantité disponible %d !" |
460 | |
461 | #. modules: account_corrections, sync_client |
462 | -#: code:addons/account_corrections/wizard/analytic_distribution_wizard.py:415 |
463 | #: code:addons/account_corrections/wizard/journal_items_corrections.py:397 |
464 | #: code:addons/sync_client/special_handling.py:56 |
465 | #, python-format |