Merge lp:~julie-w/unifield-server/US-4703 into lp:unifield-server
- US-4703
- Merge into trunk
Proposed by
jftempo
Status: | Merged |
---|---|
Merged at revision: | 5399 |
Proposed branch: | lp:~julie-w/unifield-server/US-4703 |
Merge into: | lp:unifield-server |
Diff against target: |
840 lines (+324/-346) 2 files modified
bin/addons/vertical_integration/report/hq_report_oca.py (+314/-345) bin/addons/vertical_integration/wizard/wizard_hq_report_oca.py (+10/-1) |
To merge this branch: | bzr merge lp:~julie-w/unifield-server/US-4703 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+357934@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/addons/vertical_integration/report/hq_report_oca.py' |
2 | --- bin/addons/vertical_integration/report/hq_report_oca.py 2017-02-13 16:32:30 +0000 |
3 | +++ bin/addons/vertical_integration/report/hq_report_oca.py 2019-06-07 09:34:36 +0000 |
4 | @@ -28,13 +28,11 @@ |
5 | import os |
6 | from osv import osv |
7 | from tools.translate import _ |
8 | +from time import strptime |
9 | |
10 | from report import report_sxw |
11 | |
12 | |
13 | -ZERO_CELL_CONTENT = "0.0" |
14 | - |
15 | - |
16 | class hq_report_oca(report_sxw.report_sxw): |
17 | |
18 | def __init__(self, name, table, rml=False, parser=report_sxw.rml_parse, header='external', store=False): |
19 | @@ -45,7 +43,28 @@ |
20 | return st.encode('utf8') |
21 | return st |
22 | |
23 | - def translate_country(self, cr, uid, pool, browse_instance, context={}): |
24 | + def translate_account(self, cr, uid, pool, browse_account, context=None): |
25 | + """ |
26 | + Returns the "HQ System Account Code" of the account in parameter if it exists, else returns the standard account code |
27 | + """ |
28 | + if context is None: |
29 | + context = {} |
30 | + mapping_obj = pool.get('account.export.mapping') |
31 | + if browse_account: |
32 | + mapping_ids = mapping_obj.search(cr, uid, [('account_id', '=', browse_account.id)], context=context) |
33 | + if len(mapping_ids) > 0: |
34 | + mapping = mapping_obj.browse(cr, uid, mapping_ids[0], fields_to_fetch=['mapping_value'], context=context) |
35 | + return mapping.mapping_value |
36 | + else: |
37 | + return browse_account.code |
38 | + return "" |
39 | + |
40 | + def translate_country(self, cr, uid, pool, browse_instance, context=None): |
41 | + """ |
42 | + Returns the "HQ System Country Code" of the instance in parameter if it exists, else returns 0 |
43 | + """ |
44 | + if context is None: |
45 | + context = {} |
46 | mapping_obj = pool.get('country.export.mapping') |
47 | if browse_instance: |
48 | mapping_ids = mapping_obj.search(cr, uid, [('instance_id', '=', browse_instance.id)], context=context) |
49 | @@ -54,69 +73,69 @@ |
50 | return mapping.mapping_value |
51 | return "0" |
52 | |
53 | - def create_counterpart(self, cr, uid, line): |
54 | - """ third report: up balances """ |
55 | - # method to create counterpart line |
56 | - return line[:2] + \ |
57 | - ["20750", |
58 | - "0", # before US-274/7 was expat EMP identification line[3] |
59 | - "0", |
60 | - line[5], # expat employee name or "0" |
61 | - line[6], |
62 | - line[7], |
63 | - line[9], |
64 | - line[8]] + line[10:] |
65 | - |
66 | - def create_subtotal(self, cr, uid, line_key, |
67 | - line_debit, line_functional_debit, line_functional_debit_no_ccy_adj, |
68 | - counterpart_date, country_code, sequence_number): |
69 | - """ third report: up balances """ |
70 | + def create_subtotal(self, cr, uid, line_key, line_debit, counterpart_date, period, department_info, field_activity, context=None): |
71 | + if context is None: |
72 | + context = {} |
73 | + pool = pooler.get_pool(cr.dbname) |
74 | + curr_obj = pool.get('res.currency') |
75 | + rate_obj = pool.get('res.currency.rate') |
76 | # method to create subtotal + counterpart line |
77 | - if len(line_key) > 2 and line_debit != 0.0 and line_functional_debit != 0.0: |
78 | - # US-118: func debit with no FXA currency adjustement entries |
79 | - # compute subtotal line inverted rate with no FXA entry: |
80 | - # no booking amount but funct one then cause a wrong balance |
81 | - # for ccy inverted rate computation |
82 | - rate = round(1 / (line_debit / line_functional_debit_no_ccy_adj), 8) |
83 | - return [["01", |
84 | - country_code, |
85 | + period_code = period.code or "" |
86 | + if len(line_key) > 1 and line_debit != 0.0: |
87 | + currency = curr_obj.browse(cr, uid, line_key[1], context=context) |
88 | + # rate at the first day of the selected period |
89 | + rate = 0 |
90 | + rate_ids = rate_obj.search(cr, uid, [('currency_id', '=', currency.id), ('name', '<=', period.date_start)], |
91 | + order='name DESC', limit=1, context=context) |
92 | + if rate_ids: |
93 | + rate = rate_obj.browse(cr, uid, rate_ids[0], fields_to_fetch=['rate'], context=context).rate |
94 | + # Description for the line |
95 | + if line_key[0] == "1000 0000": |
96 | + description = "Mvts_BANK_" + period_code + "_" + currency.name |
97 | + elif line_key[0] == "1000 0001": |
98 | + description = "Mvts_CASH_" + period_code + "_" + currency.name |
99 | + else: |
100 | + mapping_obj = pool.get('account.export.mapping') |
101 | + account_values = "" |
102 | + mapping_ids = mapping_obj.search(cr, uid, [('mapping_value', '=', line_key[0])], context=context) |
103 | + for mapping in mapping_obj.browse(cr, uid, mapping_ids, fields_to_fetch=['account_id'], context=context): |
104 | + if account_values != "": |
105 | + account_values += "-" |
106 | + account_values += mapping.account_id.code |
107 | + description = "Mvts_" + account_values + period_code + "_" + currency.name |
108 | + |
109 | + return [["", |
110 | + "", |
111 | + "", |
112 | + description, |
113 | + "", |
114 | + counterpart_date, |
115 | + counterpart_date, |
116 | + period_code, |
117 | line_key[0], |
118 | - "0", |
119 | - "0", |
120 | - counterpart_date, |
121 | - line_key[1], |
122 | + "", |
123 | + department_info, |
124 | + "", |
125 | + "", |
126 | + "", |
127 | rate, |
128 | - line_debit > 0 and round(line_debit, 2) or "", |
129 | - line_debit < 0 and round(-line_debit, 2) or "", |
130 | - sequence_number, |
131 | - "Subtotal - " + line_key[0] + " - " + line_key[1] + " - " + line_key[2], |
132 | - "", |
133 | - "0", |
134 | - counterpart_date, |
135 | - "0"] |
136 | - ,["01", |
137 | - country_code, |
138 | - "20750", |
139 | - "0", |
140 | - "0", |
141 | - counterpart_date, |
142 | - line_key[1], |
143 | - rate, |
144 | - line_debit < 0 and round(-line_debit, 2) or "", |
145 | - line_debit > 0 and round(line_debit, 2) or "", |
146 | - sequence_number, |
147 | - "Automatic counterpart for " + line_key[0] + " - " + line_key[1] + " - " + line_key[2], |
148 | - "", |
149 | - "0", |
150 | - counterpart_date, |
151 | - "0"]] |
152 | + line_debit > 0 and round(line_debit, 2) or "0.00", |
153 | + line_debit > 0 and "0.00" or round(-line_debit, 2), |
154 | + currency.name, |
155 | + field_activity]] |
156 | |
157 | def create(self, cr, uid, ids, data, context=None): |
158 | if context is None: |
159 | context = {} |
160 | - # US-2303 Data should always be in English whatever the language settings |
161 | + # data should always be in English whatever the language settings |
162 | context.update({'lang': 'en_MF'}) |
163 | pool = pooler.get_pool(cr.dbname) |
164 | + rate_obj = pool.get('res.currency.rate') |
165 | + period_obj = pool.get('account.period') |
166 | + inst_obj = pool.get('msf.instance') |
167 | + aml_obj = pool.get('account.move.line') |
168 | + aal_obj = pool.get('account.analytic.line') |
169 | + rates = {} # store the rates already computed |
170 | |
171 | first_header = ['Proprietary Instance', |
172 | 'Journal Code', |
173 | @@ -127,6 +146,7 @@ |
174 | 'Posting Date', |
175 | 'Period', |
176 | 'G/L Account', |
177 | + 'Unifield Account', |
178 | 'Destination', |
179 | 'Cost Centre', |
180 | 'Funding Pool', |
181 | @@ -136,38 +156,49 @@ |
182 | 'Booking Currency', |
183 | 'Functional Debit', |
184 | 'Functional Credit', |
185 | - 'Functional Currency', |
186 | - 'Exchange Rate'] |
187 | + 'Functional Currency'] |
188 | + |
189 | + second_header = ['Proprietary Instance', |
190 | + 'Journal Code', |
191 | + 'Entry Sequence', |
192 | + 'Description', |
193 | + 'Reference', |
194 | + 'Document Date', |
195 | + 'Posting Date', |
196 | + 'Period', |
197 | + 'G/L Account', |
198 | + 'Destination', |
199 | + 'Department', |
200 | + 'Cost Centre', |
201 | + 'Third Parties', |
202 | + 'Employee Id', |
203 | + 'Exchange rate', |
204 | + 'Booking Debit', |
205 | + 'Booking Credit', |
206 | + 'Booking Currency', |
207 | + 'Field Activity'] |
208 | + |
209 | + period = period_obj.browse(cr, uid, data['form']['period_id'], context=context) |
210 | + |
211 | + # list the journal types for which the rate used will always be 1 |
212 | + # i.e. REVAL, Curr. Adjustment, and Accrual |
213 | + no_rate_journal_types = ['revaluation', 'cur_adj', 'accrual'] |
214 | + no_rate_analytic_journal_types = ['revaluation', 'cur_adj', 'general'] # Analytic Accrual Journals have the type General |
215 | |
216 | # Initialize lists: one for the first report... |
217 | first_result_lines = [] |
218 | - # ...one for the second report... |
219 | + # ...and subdivisions for the second report. |
220 | second_result_lines = [] |
221 | - # ...and subdivisions for the third report. |
222 | - third_report = [] |
223 | - account_lines = [] |
224 | + main_lines = {} |
225 | account_lines_debit = {} |
226 | - account_lines_functional_debit = {} |
227 | - rate_req = "SELECT rate FROM res_currency_rate WHERE currency_id = %s AND name <= %s ORDER BY name desc LIMIT 1" |
228 | - # US-118: func debit with no FXA currency adjustement entries |
229 | - account_lines_functional_debit_no_ccy_adj = {} |
230 | - journal_exclude_subtotal_ids = pool.get('account.journal').search(cr, |
231 | - uid, [('type', 'in', ('cur_adj', 'revaluation'))], context=context) |
232 | - # General variables |
233 | - period = pool.get('account.period').browse(cr, uid, data['form']['period_id']) |
234 | - period_name = period and period.code or "0" |
235 | - counterpart_date = period and period.date_stop and \ |
236 | - datetime.datetime.strptime(period.date_stop, '%Y-%m-%d').date().strftime('%d/%m/%Y') or "" |
237 | - integration_ref = "0" |
238 | - country_code = "0" |
239 | - move_prefix = "0" |
240 | + # Get department code filled in through the country code mapping |
241 | + department_info = "" |
242 | + field_activity = "" # always empty |
243 | + parent_instance = False |
244 | if len(data['form']['instance_ids']) > 0: |
245 | - parent_instance = pool.get('msf.instance').browse(cr, uid, data['form']['instance_ids'][0], context=context) |
246 | + parent_instance = inst_obj.browse(cr, uid, data['form']['instance_ids'][0], context=context) |
247 | if parent_instance: |
248 | - country_code = self.translate_country(cr, uid, pool, parent_instance) |
249 | - if period and period.date_start: |
250 | - integration_ref = parent_instance.code[:2] + period.date_start[5:7] |
251 | - move_prefix = parent_instance.move_prefix[:2] |
252 | + department_info = self.translate_country(cr, uid, pool, parent_instance, context=context) |
253 | |
254 | # UFTP-375: Add export all/previous functionality |
255 | selection = data['form'].get('selection', False) |
256 | @@ -181,66 +212,30 @@ |
257 | else: |
258 | raise osv.except_osv(_('Error'), _('Wrong value for selection: %s.') % (selection,)) |
259 | |
260 | - last_processed_journal = False |
261 | - move_line_ids = pool.get('account.move.line').search(cr, uid, [('period_id', '=', data['form']['period_id']), |
262 | - ('instance_id', 'in', data['form']['instance_ids']), |
263 | - ('account_id.is_analytic_addicted', '=', False), |
264 | - ('journal_id.type', 'not in', ['hq', 'migration']), |
265 | - ('exported', 'in', to_export)], context=context) |
266 | - # US-274/2: remove 'Inkind', 'OD-Extra Accounting' entries from both |
267 | - # in Upbalances and Upexpenses files |
268 | - exclude_jn_type_for_balance_and_expense_report = ( |
269 | - 'inkind', |
270 | - 'extra', |
271 | - ) |
272 | + move_line_ids = aml_obj.search(cr, uid, [('period_id', '=', data['form']['period_id']), |
273 | + ('instance_id', 'in', data['form']['instance_ids']), |
274 | + ('account_id.is_analytic_addicted', '=', False), |
275 | + ('journal_id.type', 'not in', ['migration', 'hq']), |
276 | + ('exported', 'in', to_export)], |
277 | + context=context) |
278 | |
279 | nb_move_line = len(move_line_ids) |
280 | move_line_count = 0 |
281 | - |
282 | if 'background_id' in context: |
283 | bg_id = context['background_id'] |
284 | else: |
285 | bg_id = None |
286 | |
287 | - # assume that this for loop is about 40% of the total treatment |
288 | - move_share = 0.4 |
289 | + move_share = 0.4 # 40% of the total process |
290 | |
291 | - for move_line in pool.get('account.move.line').browse(cr, uid, move_line_ids, context=context): |
292 | + for move_line in aml_obj.browse(cr, uid, move_line_ids, context=context): |
293 | + if move_line.move_id.state != 'posted': # only posted move lines are kept |
294 | + move_line_count += 1 |
295 | + continue |
296 | journal = move_line.journal_id |
297 | - if journal: |
298 | - last_processed_journal = journal |
299 | account = move_line.account_id |
300 | currency = move_line.currency_id |
301 | - func_currency = move_line.functional_currency_id |
302 | - rate = ZERO_CELL_CONTENT |
303 | - |
304 | - is_cur_adj_entry = move_line.journal_id \ |
305 | - and move_line.journal_id.type == 'cur_adj' or False |
306 | - is_rev_entry = move_line.journal_id \ |
307 | - and move_line.journal_id.type == 'revaluation' or False |
308 | - |
309 | - if currency and func_currency: |
310 | - # US-274/9: accrual account (always refer to previous period) |
311 | - # base on doc date instead posting in this case |
312 | - # - 1st period accruals: doc date and posting same period |
313 | - # - next accruals: doc date previous period (accrual of) |
314 | - move_line_date = move_line.journal_id \ |
315 | - and move_line.journal_id.type == 'accrual' \ |
316 | - and move_line.document_date or move_line.date |
317 | - |
318 | - if move_line.journal_id.type == 'correction' and move_line.source_date: |
319 | - # US-1525 For the Correction entries display the rate of the entry period corrected |
320 | - move_line_date = move_line.source_date |
321 | - cr.execute(rate_req, (move_line.functional_currency_id.id, move_line_date)) |
322 | - if cr.rowcount: |
323 | - func_rate = cr.fetchall()[0][0] |
324 | - cr.execute(rate_req, (currency.id, move_line_date)) |
325 | - if cr.rowcount: |
326 | - curr_rate = cr.fetchall()[0][0] |
327 | - if func_rate != 0.00: |
328 | - rate = round(1 / (curr_rate / func_rate), 8) |
329 | - |
330 | - # For first report: as it |
331 | + # For the first report: |
332 | formatted_data = [move_line.instance_id and move_line.instance_id.code or "", |
333 | journal and journal.code or "", |
334 | move_line.move_id and move_line.move_id.name or "", |
335 | @@ -249,6 +244,7 @@ |
336 | datetime.datetime.strptime(move_line.document_date, '%Y-%m-%d').date().strftime('%d/%m/%Y'), |
337 | datetime.datetime.strptime(move_line.date, '%Y-%m-%d').date().strftime('%d/%m/%Y'), |
338 | move_line.period_id and move_line.period_id.code or "", |
339 | + self.translate_account(cr, uid, pool, account, context=context), |
340 | account and account.code + " " + account.name, |
341 | "", |
342 | "", |
343 | @@ -259,79 +255,74 @@ |
344 | currency and currency.name or "", |
345 | round(move_line.debit, 2), |
346 | round(move_line.credit, 2), |
347 | - func_currency and func_currency.name or "", |
348 | - rate] |
349 | + move_line.functional_currency_id and move_line.functional_currency_id.name or ""] |
350 | first_result_lines.append(formatted_data) |
351 | - if is_cur_adj_entry or is_rev_entry: |
352 | - # US-788/1 and US-478/3: FXA/REV raw data override |
353 | - # without impacting formatted_data for other files |
354 | - # US-788/1: always display booking as func |
355 | - # US-478/3 always rate of 1 |
356 | - first_result_lines[-1][13] = first_result_lines[-1][16] # US-788/1 |
357 | - first_result_lines[-1][14] = first_result_lines[-1][17] # US-788/1 |
358 | - first_result_lines[-1][15] = first_result_lines[-1][18] # US-788/1 |
359 | - first_result_lines[-1][-1] = 1. # US-478/3 |
360 | - |
361 | - # For third report: add to corresponding sub |
362 | - if journal and journal.type not in ( |
363 | - exclude_jn_type_for_balance_and_expense_report): # US-274/2 |
364 | - if not account.shrink_entries_for_hq or is_rev_entry or is_cur_adj_entry: |
365 | - # US-478/1: or is_rev_entry, no shrink for rev journal entries |
366 | - expat_identification = "0" |
367 | - expat_employee = "0" |
368 | - # Expat employees are the only third party in this report |
369 | - if move_line.partner_txt and move_line.employee_id and move_line.employee_id.employee_type == 'ex': |
370 | - if account.code == '15640': # US-274/7 |
371 | - expat_identification = move_line.employee_id.identification_id |
372 | - expat_employee = move_line.partner_txt |
373 | - |
374 | - # US-274/1: for FXA/REV entries output fonctional amount in balance |
375 | - # report |
376 | - if is_cur_adj_entry or is_rev_entry: |
377 | - output_debit = move_line.debit |
378 | - output_credit = move_line.credit |
379 | - output_rate = 1. |
380 | - output_curr = func_currency and func_currency.name or "0" |
381 | - else: |
382 | - output_debit = move_line.debit_currency |
383 | - output_credit = move_line.credit_currency |
384 | - output_rate = rate |
385 | - output_curr = currency and currency.name or "0" |
386 | - |
387 | - other_formatted_data = ["01", |
388 | - country_code, |
389 | - account and account.code or "0", |
390 | - expat_identification, |
391 | - "0", |
392 | - move_line.date and datetime.datetime.strptime(move_line.date, '%Y-%m-%d').date().strftime('%d/%m/%Y') or "0", |
393 | - output_curr, |
394 | - output_rate, |
395 | - output_debit != 0.0 and round(output_debit, 2) or "", |
396 | - output_credit != 0.0 and round(output_credit, 2) or "", |
397 | - move_line.move_id and move_line.move_id.name or "0", |
398 | - move_line.name or "0", |
399 | - move_line.ref or "", |
400 | - expat_employee, |
401 | - move_line.document_date and datetime.datetime.strptime(move_line.document_date, '%Y-%m-%d').date().strftime('%d/%m/%Y') or "0", |
402 | - move_line.ref or "0"] |
403 | - account_lines.append(other_formatted_data) |
404 | + |
405 | + # For the second report: |
406 | + # exclude In-kind Donations and OD-Extra Accounting entries from the "formatted data" file |
407 | + if move_line.journal_id.type not in ['inkind', 'extra']: |
408 | + if not account.shrink_entries_for_hq: |
409 | + # data for the "Employee Id" column |
410 | + employee_id = '' |
411 | + if move_line.employee_id and move_line.employee_id.employee_type == 'ex': # expat staff |
412 | + employee_id = move_line.employee_id.identification_id or '' |
413 | + # data for the columns: Exchange rate, Booking Debit, Booking Credit, Booking Currency |
414 | + exchange_rate = 0 |
415 | + booking_amounts = [round(move_line.debit_currency, 2), round(move_line.credit_currency, 2)] |
416 | + booking_curr = formatted_data[16:17] |
417 | + if move_line.journal_id.type in no_rate_journal_types: |
418 | + # use 1 as exchange rate and display the functional values in the "booking" columns |
419 | + exchange_rate = 1 |
420 | + booking_amounts = [round(move_line.debit, 2), round(move_line.credit, 2)] |
421 | + booking_curr = formatted_data[19:20] |
422 | + # automatic corrections |
423 | + elif move_line.journal_id.type == 'correction' and (move_line.corrected_line_id or move_line.reversal_line_id): |
424 | + # If there are several levels of correction use the last one |
425 | + corr_aml = move_line.corrected_line_id or move_line.reversal_line_id # JI corrected or reversed |
426 | + initial_id = -1 |
427 | + final_id = -2 |
428 | + while initial_id != final_id: |
429 | + initial_id = corr_aml.id |
430 | + # check if the corrected line corrects another line |
431 | + corr_aml = corr_aml.corrected_line_id or corr_aml |
432 | + final_id = corr_aml.id |
433 | + # rate of the original corrected entry |
434 | + if currency.id not in rates: |
435 | + rates[currency.id] = {} |
436 | + if corr_aml.date not in rates[currency.id]: |
437 | + rate = 0 |
438 | + rate_ids = rate_obj.search(cr, uid, [('currency_id', '=', currency.id), ('name', '<=', corr_aml.date)], |
439 | + order='name DESC', limit=1, context=context) |
440 | + if rate_ids: |
441 | + rate = rate_obj.browse(cr, uid, rate_ids[0], fields_to_fetch=['rate'], context=context).rate |
442 | + rates[currency.id][corr_aml.date] = rate |
443 | + exchange_rate = rates[currency.id][corr_aml.date] |
444 | + # other lines |
445 | + elif currency: |
446 | + # rate of the period selected |
447 | + if currency.id not in rates: |
448 | + rates[currency.id] = {} |
449 | + if period.date_start not in rates[currency.id]: |
450 | + rate = 0 |
451 | + rate_ids = rate_obj.search(cr, uid, [('currency_id', '=', currency.id), ('name', '<=', period.date_start)], |
452 | + order='name DESC', limit=1, context=context) |
453 | + if rate_ids: |
454 | + rate = rate_obj.browse(cr, uid, rate_ids[0], fields_to_fetch=['rate'], context=context).rate |
455 | + rates[currency.id][period.date_start] = rate |
456 | + exchange_rate = rates[currency.id][period.date_start] |
457 | + |
458 | + if (journal.code, journal.id, currency.id) not in main_lines: |
459 | + main_lines[(journal.code, journal.id, currency.id)] = [] |
460 | + main_lines[(journal.code, journal.id, currency.id)].append(formatted_data[:9] + [formatted_data[10]] + |
461 | + [department_info] + [formatted_data[11]] + |
462 | + [formatted_data[13]] + [employee_id] + |
463 | + [exchange_rate] + booking_amounts + |
464 | + booking_curr + [field_activity]) |
465 | else: |
466 | - if (account.code, currency.name, period_name) not in account_lines_debit: |
467 | - account_lines_debit[(account.code, currency.name, period_name)] = 0.0 |
468 | - account_lines_functional_debit[(account.code, currency.name, period_name)] = 0.0 |
469 | - account_lines_functional_debit_no_ccy_adj[(account.code, currency.name, period_name)] = 0.0 |
470 | - |
471 | - account_lines_debit[(account.code, currency.name, period_name)] += (move_line.debit_currency - move_line.credit_currency) |
472 | - funct_balance = (move_line.debit - move_line.credit) |
473 | - account_lines_functional_debit[(account.code, currency.name, period_name)] += funct_balance |
474 | - |
475 | - # US-118: func debit with no FXA currency adjustement entries |
476 | - # compute subtotal line inverted rate with no FXA entry: |
477 | - # no booking amount but funct one then cause a wrong balance |
478 | - # for ccy inverted rate computation |
479 | - if not journal_exclude_subtotal_ids or \ |
480 | - move_line.journal_id.id not in journal_exclude_subtotal_ids: |
481 | - account_lines_functional_debit_no_ccy_adj[(account.code, currency.name, period_name)] += funct_balance |
482 | + translated_account_code = self.translate_account(cr, uid, pool, account, context=context) |
483 | + if (translated_account_code, currency.id) not in account_lines_debit: |
484 | + account_lines_debit[(translated_account_code, currency.id)] = 0.0 |
485 | + account_lines_debit[(translated_account_code, currency.id)] += (move_line.debit_currency - move_line.credit_currency) |
486 | |
487 | move_line_count += 1 |
488 | if move_line_count % 30 == 0: |
489 | @@ -343,124 +334,113 @@ |
490 | self.shared_update_percent(cr, uid, pool, [bg_id], |
491 | share=move_share, finished=True) |
492 | |
493 | - # UFTP-375: Do not include FREE1 and FREE2 analytic lines |
494 | - # US-817: search period from JI (VI from HQ so AJI always with its JI) |
495 | - # (AJI period_id is a field function always deduced from date since UTP-943) |
496 | - analytic_line_ids = pool.get('account.analytic.line').search(cr, uid, [('period_id', '=', data['form']['period_id']), |
497 | - ('instance_id', 'in', data['form']['instance_ids']), |
498 | - ('journal_id.type', 'not in', ['hq', 'engagement', 'migration']), |
499 | - ('account_id.category', 'not in', ['FREE1', 'FREE2']), |
500 | - ('exported', 'in', to_export)], context=context) |
501 | - |
502 | + analytic_line_ids = aal_obj.search(cr, uid, [('period_id', '=', data['form']['period_id']), |
503 | + ('instance_id', 'in', data['form']['instance_ids']), |
504 | + ('journal_id.type', 'not in', ['migration', 'hq', 'engagement']), |
505 | + ('account_id.category', 'not in', ['FREE1', 'FREE2']), |
506 | + ('exported', 'in', to_export)], context=context) |
507 | nb_analytic_line = len(analytic_line_ids) |
508 | analytic_line_count = 0 |
509 | |
510 | - # assume that this for loop is about 50% of the total treatment |
511 | - analytic_share = 0.5 |
512 | + analytic_share = 0.5 # 50% of the total process |
513 | |
514 | - for analytic_line in pool.get('account.analytic.line').browse(cr, uid, analytic_line_ids, context=context): |
515 | - journal = analytic_line.move_id and analytic_line.move_id.journal_id or False |
516 | - if journal: |
517 | - last_processed_journal = journal |
518 | + for analytic_line in aal_obj.browse(cr, uid, analytic_line_ids, context=context): |
519 | + # restrict to analytic lines coming from posted move lines |
520 | + if analytic_line.move_state != 'posted': |
521 | + analytic_line_count += 1 |
522 | + continue |
523 | + journal = analytic_line.move_id and analytic_line.move_id.journal_id |
524 | account = analytic_line.general_account_id |
525 | currency = analytic_line.currency_id |
526 | - func_currency = analytic_line.move_id.functional_currency_id |
527 | - rate = "" |
528 | - |
529 | - # US-478 - US-274/9 accrual account always refer to previous period |
530 | - # base on doc date instead posting in this case |
531 | - # - 1st period accruals: doc date and posting same period |
532 | - # - next accruals: doc date previous period (accrual of) |
533 | - ldate = analytic_line.date |
534 | - if analytic_line.move_id: |
535 | - if analytic_line.move_id.journal_id.type == 'accrual': |
536 | - ldate = analytic_line.document_date or analytic_line.date |
537 | - elif analytic_line.journal_id \ |
538 | - and analytic_line.journal_id.code == 'ACC': |
539 | - # sync border case no JI for the AJI |
540 | - ldate = analytic_line.document_date or analytic_line.date |
541 | - |
542 | - if func_currency: |
543 | - if analytic_line.journal_id.type == 'correction' and analytic_line.source_date: |
544 | - # US-1525 For the Correction entries display the rate of the entry period corrected |
545 | - ldate = analytic_line.source_date |
546 | - cr.execute(rate_req, (currency.id, ldate)) |
547 | - if cr.rowcount: |
548 | - rate = round(1 / cr.fetchall()[0][0], 8) |
549 | - |
550 | - is_analytic_cur_adj_entry = analytic_line.journal_id \ |
551 | - and analytic_line.journal_id.type == 'cur_adj' or False |
552 | - is_analytic_rev_entry = analytic_line.journal_id \ |
553 | - and analytic_line.journal_id.type == 'revaluation' or False |
554 | - |
555 | - # US-1375: cancel US-817 |
556 | + cost_center_code = analytic_line.cost_center_id and analytic_line.cost_center_id.code or "" |
557 | aji_period_id = analytic_line and analytic_line.period_id or False |
558 | |
559 | - # For first report: as is |
560 | + # For the first report: |
561 | formatted_data = [analytic_line.instance_id and analytic_line.instance_id.code or "", |
562 | analytic_line.journal_id and analytic_line.journal_id.code or "", |
563 | - analytic_line.entry_sequence or "", |
564 | + analytic_line.entry_sequence or analytic_line.move_id and analytic_line.move_id.move_id and analytic_line.move_id.move_id.name or "", |
565 | analytic_line.name or "", |
566 | analytic_line.ref or "", |
567 | datetime.datetime.strptime(analytic_line.document_date, '%Y-%m-%d').date().strftime('%d/%m/%Y'), |
568 | datetime.datetime.strptime(analytic_line.date, '%Y-%m-%d').date().strftime('%d/%m/%Y'), |
569 | aji_period_id and aji_period_id.code or "", |
570 | + self.translate_account(cr, uid, pool, account, context=context), |
571 | account and account.code + " " + account.name or "", |
572 | analytic_line.destination_id and analytic_line.destination_id.code or "", |
573 | - analytic_line.cost_center_id and analytic_line.cost_center_id.code or "", |
574 | + cost_center_code, |
575 | analytic_line.account_id and analytic_line.account_id.code or "", |
576 | analytic_line.partner_txt or "", |
577 | - analytic_line.amount_currency > 0 and ZERO_CELL_CONTENT or round(-analytic_line.amount_currency, 2), |
578 | - analytic_line.amount_currency > 0 and round(analytic_line.amount_currency, 2) or ZERO_CELL_CONTENT, |
579 | + analytic_line.amount_currency > 0 and "0.00" or round(-analytic_line.amount_currency, 2), |
580 | + analytic_line.amount_currency > 0 and round(analytic_line.amount_currency, 2) or "0.00", |
581 | currency and currency.name or "", |
582 | - analytic_line.amount > 0 and ZERO_CELL_CONTENT or round(-analytic_line.amount, 2), |
583 | - analytic_line.amount > 0 and round(analytic_line.amount, 2) or ZERO_CELL_CONTENT, |
584 | - func_currency and func_currency.name or "", |
585 | - rate] |
586 | + analytic_line.amount > 0 and "0.00" or round(-analytic_line.amount, 2), |
587 | + analytic_line.amount > 0 and round(analytic_line.amount, 2) or "0.00", |
588 | + analytic_line.functional_currency_id and analytic_line.functional_currency_id.name or ""] |
589 | first_result_lines.append(formatted_data) |
590 | - if is_analytic_cur_adj_entry or is_analytic_rev_entry: |
591 | - # US-788/1 and US-478/3: FXA/REV raw data override |
592 | - # without impacting formatted_data for other files |
593 | - # US-788/1: always display booking as func |
594 | - # US-478/3 always rate of 1 |
595 | - first_result_lines[-1][13] = first_result_lines[-1][16] # US-788/1 |
596 | - first_result_lines[-1][14] = first_result_lines[-1][17] # US-788/1 |
597 | - first_result_lines[-1][15] = first_result_lines[-1][18] # US-788/1 |
598 | - first_result_lines[-1][-1] = 1. # US-478/3 |
599 | - if analytic_line.journal_id \ |
600 | - and analytic_line.journal_id.type not in ( |
601 | - exclude_jn_type_for_balance_and_expense_report): # US-274/2 |
602 | - # Add to second report (expenses only) |
603 | - up_exp_ccy = currency |
604 | - up_exp_rate = rate |
605 | - up_exp_amount = analytic_line.amount_currency |
606 | - |
607 | - if is_analytic_rev_entry: |
608 | - # US-1008 for up_expenses (file 2), match REV AJIs to JIs: |
609 | - # display AJIs booked on REV journal in func ccy and rate 1 |
610 | - # as already done here for JIs of up_balances (file 3) |
611 | - up_exp_ccy = func_currency |
612 | - up_exp_rate = 1. |
613 | - up_exp_amount = analytic_line.amount |
614 | - |
615 | - other_formatted_data = [integration_ref , |
616 | - analytic_line.document_date and datetime.datetime.strptime(analytic_line.document_date, '%Y-%m-%d').date().strftime('%d/%m/%Y') or "0", |
617 | - "0", |
618 | - "0", |
619 | - analytic_line.cost_center_id and analytic_line.cost_center_id.code or "0", |
620 | - "1", |
621 | - account and account.code + " " + account.name or "0", |
622 | - up_exp_ccy and up_exp_ccy.name or "0", |
623 | - up_exp_amount and round(-up_exp_amount, 2) or ZERO_CELL_CONTENT, |
624 | - "0", |
625 | - up_exp_rate, |
626 | - analytic_line.date and datetime.datetime.strptime(analytic_line.date, '%Y-%m-%d').date().strftime('%d/%m/%Y') or "0", |
627 | - analytic_line.entry_sequence or "0", |
628 | - "0", |
629 | - analytic_line.name or "0", |
630 | - analytic_line.ref or "0", |
631 | - analytic_line.destination_id and analytic_line.destination_id.code or "0"] |
632 | - second_result_lines.append(other_formatted_data) |
633 | + |
634 | + # exclude In-kind Donations and OD-Extra Accounting entries from the "formatted data" file |
635 | + if analytic_line.journal_id.type not in ['inkind', 'extra']: |
636 | + # format CC as: P + the 4 digits from the right |
637 | + cost_center = formatted_data[11] and "P%s" % formatted_data[11][-4:] or "" |
638 | + # data for the "Employee Id" column |
639 | + employee_id = '' |
640 | + if analytic_line.move_id and analytic_line.move_id.employee_id and analytic_line.move_id.employee_id.employee_type == 'ex': # expat staff |
641 | + employee_id = analytic_line.move_id.employee_id.identification_id or '' |
642 | + |
643 | + # data for the columns: Exchange rate, Booking Debit, Booking Credit, Booking Currency |
644 | + exchange_rate = 0 |
645 | + booking_amounts = [analytic_line.amount_currency > 0 and "0.00" or round(-analytic_line.amount_currency, 2), |
646 | + analytic_line.amount_currency > 0 and round(analytic_line.amount_currency, 2) or "0.00"] |
647 | + booking_curr = formatted_data[16:17] |
648 | + if analytic_line.journal_id.type in no_rate_analytic_journal_types: |
649 | + # use 1 as exchange rate and display the functional values in the "booking" columns |
650 | + exchange_rate = 1 |
651 | + booking_amounts = [analytic_line.amount > 0 and "0.00" or round(-analytic_line.amount, 2), |
652 | + analytic_line.amount > 0 and round(analytic_line.amount, 2) or "0.00"] |
653 | + booking_curr = formatted_data[19:20] |
654 | + # automatic corrections |
655 | + elif analytic_line.journal_id.type == 'correction' and (analytic_line.last_corrected_id or analytic_line.reversal_origin): |
656 | + # If there are several levels of correction use the last one |
657 | + corr_aal = analytic_line.last_corrected_id or analytic_line.reversal_origin # AJI corrected or reversed |
658 | + initial_id = -1 |
659 | + final_id = -2 |
660 | + while initial_id != final_id: |
661 | + initial_id = corr_aal.id |
662 | + # check if the corrected line corrects another line |
663 | + corr_aal = corr_aal.last_corrected_id or corr_aal |
664 | + final_id = corr_aal.id |
665 | + # rate of the original corrected entry |
666 | + if currency.id not in rates: |
667 | + rates[currency.id] = {} |
668 | + if corr_aal.date not in rates[currency.id]: |
669 | + rate = 0 |
670 | + rate_ids = rate_obj.search(cr, uid, [('currency_id', '=', currency.id), ('name', '<=', corr_aal.date)], |
671 | + order='name DESC', limit=1, context=context) |
672 | + if rate_ids: |
673 | + rate = rate_obj.browse(cr, uid, rate_ids[0], fields_to_fetch=['rate'], context=context).rate |
674 | + rates[currency.id][corr_aal.date] = rate |
675 | + exchange_rate = rates[currency.id][corr_aal.date] |
676 | + # other lines |
677 | + elif currency: |
678 | + # rate of the period selected |
679 | + if currency.id not in rates: |
680 | + rates[currency.id] = {} |
681 | + if period.date_start not in rates[currency.id]: |
682 | + rate = 0 |
683 | + rate_ids = rate_obj.search(cr, uid, [('currency_id', '=', currency.id), ('name', '<=', period.date_start)], |
684 | + order='name DESC', limit=1, context=context) |
685 | + if rate_ids: |
686 | + rate = rate_obj.browse(cr, uid, rate_ids[0], fields_to_fetch=['rate'], context=context).rate |
687 | + rates[currency.id][period.date_start] = rate |
688 | + exchange_rate = rates[currency.id][period.date_start] |
689 | + |
690 | + if (journal.code, journal.id, currency.id) not in main_lines: |
691 | + main_lines[(journal.code, journal.id, currency.id)] = [] |
692 | + main_lines[(journal.code, journal.id, currency.id)].append(formatted_data[:9] + [formatted_data[10]] + |
693 | + [department_info] + [cost_center] + |
694 | + [formatted_data[13]] + [employee_id] + |
695 | + [exchange_rate] + booking_amounts + |
696 | + booking_curr + [field_activity]) |
697 | |
698 | analytic_line_count += 1 |
699 | if analytic_line_count % 30 == 0: |
700 | @@ -474,69 +454,58 @@ |
701 | share=analytic_share, finished=True, already_done=move_share) |
702 | |
703 | first_result_lines = sorted(first_result_lines, key=lambda line: line[2]) |
704 | - if not move_line_ids and not analytic_line_ids: |
705 | - first_report = [] |
706 | - else: |
707 | - first_report = [first_header] + first_result_lines |
708 | - |
709 | - second_report = sorted(second_result_lines, key=lambda line: line[12]) |
710 | - |
711 | - for line in sorted(account_lines, key=lambda line: line[10]): |
712 | - third_report.append(line) |
713 | - third_report.append(self.create_counterpart(cr, uid, line)) |
714 | - |
715 | - if last_processed_journal and last_processed_journal.type not in ( |
716 | - exclude_jn_type_for_balance_and_expense_report): # US-274/2 |
717 | - for key in sorted(account_lines_debit.iterkeys(), key=lambda tuple: tuple[0]): |
718 | - # create the sequence number for those lines |
719 | - sequence_number = move_prefix + "-" + \ |
720 | - period.date_start[5:7] + "-" + \ |
721 | - period.date_start[:4] + "-" + \ |
722 | - key[0] + "-" + \ |
723 | - key[1] |
724 | - |
725 | - subtotal_lines = self.create_subtotal(cr, uid, key, |
726 | - account_lines_debit[key], |
727 | - account_lines_functional_debit[key], |
728 | - account_lines_functional_debit_no_ccy_adj[key], |
729 | - counterpart_date, |
730 | - country_code, |
731 | - sequence_number) |
732 | - if subtotal_lines: |
733 | - third_report += subtotal_lines |
734 | + first_report = [first_header] + first_result_lines |
735 | + |
736 | + counterpart_date = period and period.date_stop and \ |
737 | + datetime.datetime.strptime(period.date_stop, '%Y-%m-%d').date().strftime('%d/%m/%Y') or "" |
738 | + |
739 | + # regroup second report lines |
740 | + for key in sorted(main_lines.iterkeys(), key=lambda tuple: tuple[0]): |
741 | + second_result_lines += sorted(main_lines[key], key=lambda line: line[2]) |
742 | + |
743 | + for key in sorted(account_lines_debit.iterkeys(), key=lambda tuple: tuple[0]): |
744 | + # for entries "shrunk for HQ export" |
745 | + subtotal_lines = self.create_subtotal(cr, uid, key, account_lines_debit[key], counterpart_date, period, |
746 | + department_info, field_activity, context=context) |
747 | + if subtotal_lines: |
748 | + second_result_lines += subtotal_lines |
749 | |
750 | self.shared_update_percent(cr, uid, pool, [bg_id], |
751 | share=0.05, finished=True, |
752 | already_done=move_share+analytic_share) |
753 | |
754 | - # Write result to the final content |
755 | + second_report = [second_header] + second_result_lines |
756 | + |
757 | + # set prefix for file names |
758 | + mission_code = '' |
759 | + if parent_instance: |
760 | + mission_code = "%s" % parent_instance.code[:3] |
761 | + tm = strptime(period.date_start, '%Y-%m-%d') |
762 | + year = str(tm.tm_year) |
763 | + period_number = period and period.number and '%02d' % period.number or '' |
764 | + prefix = '%sY%sP%s_' % (mission_code, year, period_number) |
765 | + |
766 | zip_buffer = StringIO.StringIO() |
767 | first_fileobj = NamedTemporaryFile('w+b', delete=False) |
768 | second_fileobj = NamedTemporaryFile('w+b', delete=False) |
769 | - third_fileobj = NamedTemporaryFile('w+b', delete=False) |
770 | - writer = csv.writer(first_fileobj, quoting=csv.QUOTE_ALL) |
771 | + # for Raw data file: use double quotes for all entries |
772 | + writer = csv.writer(first_fileobj, quoting=csv.QUOTE_ALL, delimiter=",") |
773 | for line in first_report: |
774 | - writer.writerow(map(self._enc,line)) |
775 | + writer.writerow(map(self._enc, line)) |
776 | first_fileobj.close() |
777 | - writer = csv.writer(second_fileobj, quoting=csv.QUOTE_ALL) |
778 | + # for formatted data file: use double quotes only for entries containing double quote or comma |
779 | + writer = csv.writer(second_fileobj, quoting=csv.QUOTE_MINIMAL, delimiter=",") |
780 | for line in second_report: |
781 | - writer.writerow(map(self._enc,line)) |
782 | + writer.writerow(map(self._enc, line)) |
783 | second_fileobj.close() |
784 | - writer = csv.writer(third_fileobj, quoting=csv.QUOTE_ALL) |
785 | - for line in third_report: |
786 | - line.pop() |
787 | - line.pop() |
788 | - writer.writerow(map(self._enc,line)) |
789 | - third_fileobj.close() |
790 | + |
791 | out_zipfile = zipfile.ZipFile(zip_buffer, "w") |
792 | - out_zipfile.write(first_fileobj.name, "%sRaw_Data.csv" % (integration_ref and integration_ref+'_' or ''), zipfile.ZIP_DEFLATED) |
793 | - out_zipfile.write(second_fileobj.name, "%sUp_Expenses.csv" % (integration_ref and integration_ref+'_' or ''), zipfile.ZIP_DEFLATED) |
794 | - out_zipfile.write(third_fileobj.name, "%sUp_Balances.csv" % (integration_ref and integration_ref+'_' or ''), zipfile.ZIP_DEFLATED) |
795 | + out_zipfile.write(first_fileobj.name, prefix + "Raw data UF export.csv", zipfile.ZIP_DEFLATED) |
796 | + out_zipfile.write(second_fileobj.name, prefix + "formatted data D365 import.csv", zipfile.ZIP_DEFLATED) |
797 | out_zipfile.close() |
798 | out = zip_buffer.getvalue() |
799 | os.unlink(first_fileobj.name) |
800 | os.unlink(second_fileobj.name) |
801 | - os.unlink(third_fileobj.name) |
802 | |
803 | # Mark lines as exported |
804 | if move_line_ids: |
805 | |
806 | === modified file 'bin/addons/vertical_integration/wizard/wizard_hq_report_oca.py' |
807 | --- bin/addons/vertical_integration/wizard/wizard_hq_report_oca.py 2017-03-16 17:04:51 +0000 |
808 | +++ bin/addons/vertical_integration/wizard/wizard_hq_report_oca.py 2019-06-07 09:34:36 +0000 |
809 | @@ -23,6 +23,8 @@ |
810 | from tools.translate import _ |
811 | assert _ # pyflakes |
812 | import time |
813 | +from time import strptime |
814 | + |
815 | |
816 | class wizard_hq_report_oca(osv.osv_memory): |
817 | _name = "wizard.hq.report.oca" |
818 | @@ -60,14 +62,21 @@ |
819 | data = {} |
820 | # add parameters |
821 | data['form'] = {} |
822 | + mission_code = '' |
823 | + year = '' |
824 | + period_number = '' |
825 | if wizard.instance_id: |
826 | + mission_code = "%s" % wizard.instance_id.code[:3] |
827 | # Get projects below instance |
828 | data['form'].update({'instance_ids': [wizard.instance_id.id] + [x.id for x in wizard.instance_id.child_ids]}) |
829 | if wizard.period_id: |
830 | + tm = strptime(wizard.period_id.date_start, '%Y-%m-%d') |
831 | + year = str(tm.tm_year) |
832 | + period_number = wizard.period_id.number and '%02d' % wizard.period_id.number or '' |
833 | data['form'].update({'period_id': wizard.period_id.id}) |
834 | # UFTP-375: Permit user to select all lines or only previous ones |
835 | data['form'].update({'selection': wizard.selection}) |
836 | - data['target_filename'] = '%s_%s_%s' % (_('Export to HQ System'), wizard.instance_id and wizard.instance_id.code or '', time.strftime('%Y%m%d')) |
837 | + data['target_filename'] = '%sY%sP%s_formatted data D365 import' % (mission_code, year, period_number) |
838 | |
839 | background_id = self.pool.get('memory.background.report').create(cr, uid, { |
840 | 'file_name': data['target_filename'], |