Merge lp:~julie-w/unifield-server/US-5011 into lp:unifield-server

Proposed by jftempo
Status: Merged
Merged at revision: 5096
Proposed branch: lp:~julie-w/unifield-server/US-5011
Merge into: lp:unifield-server
Diff against target: 521 lines (+258/-166)
6 files modified
bin/addons/account/account.py (+14/-0)
bin/addons/account_override/account_view.xml (+5/-1)
bin/addons/account_reconciliation/account_move_line.py (+1/-0)
bin/addons/msf_profile/i18n/fr_MF.po (+24/-0)
bin/addons/msf_sync_data_server/data/sync_server.sync_rule.csv (+1/-0)
bin/addons/res_currency_functional/account_move_line_compute_currency.py (+213/-165)
To merge this branch: bzr merge lp:~julie-w/unifield-server/US-5011
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+355433@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/addons/account/account.py'
2--- bin/addons/account/account.py 2018-04-03 10:18:51 +0000
3+++ bin/addons/account/account.py 2018-09-20 14:23:22 +0000
4@@ -317,6 +317,20 @@
5 'child_consol_ids': fields.many2many('account.account', 'account_account_consol_rel', 'child_id', 'parent_id', 'Consolidated Children'),
6 'child_id': fields.function(_get_child_ids, method=True, type='many2many', relation="account.account", string="Child Accounts"),
7 'reconcile': fields.boolean('Reconcile', help="Check this if the user is allowed to reconcile entries in this account."),
8+ 'reconciliation_debit_account_id': fields.many2one('account.account', 'Default Debit Account for Reconciliation',
9+ domain=[('type', '!=', 'view'),
10+ ('type_for_register', '!=', 'donation'),
11+ '|',
12+ ('user_type_code', '=', 'income'),
13+ '&', ('user_type_code', '=', 'expense'), ('user_type.report_type', '!=', 'none'), # exclude Extra-accounting expenses
14+ ]),
15+ 'reconciliation_credit_account_id': fields.many2one('account.account', 'Default Credit Account for Reconciliation',
16+ domain=[('type', '!=', 'view'),
17+ ('type_for_register', '!=', 'donation'),
18+ '|',
19+ ('user_type_code', '=', 'income'),
20+ '&', ('user_type_code', '=', 'expense'), ('user_type.report_type', '!=', 'none'), # exclude Extra-accounting expenses
21+ ]),
22 'shortcut': fields.char('Shortcut', size=12),
23 'tax_ids': fields.many2many('account.tax', 'account_account_tax_default_rel',
24 'account_id', 'tax_id', 'Default Taxes'),
25
26=== modified file 'bin/addons/account_override/account_view.xml'
27--- bin/addons/account_override/account_view.xml 2017-05-10 12:47:14 +0000
28+++ bin/addons/account_override/account_view.xml 2018-09-20 14:23:22 +0000
29@@ -15,7 +15,11 @@
30 <group col="6" colspan="4">
31 <field name="activation_date"/>
32 <field name="inactivation_date"/>
33- <field name="reconcile"/>
34+ <group col="2" colspan="2">
35+ <field name="reconcile"/>
36+ <field name="reconciliation_debit_account_id"/>
37+ <field name="reconciliation_credit_account_id"/>
38+ </group>
39 </group>
40 </page>
41 </page>
42
43=== modified file 'bin/addons/account_reconciliation/account_move_line.py'
44--- bin/addons/account_reconciliation/account_move_line.py 2018-03-28 07:36:10 +0000
45+++ bin/addons/account_reconciliation/account_move_line.py 2018-09-20 14:23:22 +0000
46@@ -464,6 +464,7 @@
47 self._check_instance(cr, uid, reconcile_id, context)
48 fxa_set.update(fxa_line_ids)
49 # first we delete the reconciliation for all lines including FXA
50+ context.update({'from_remove_move_reconcile': True})
51 res = super(account_move_line, self)._remove_move_reconcile(cr, uid, move_ids, context=context)
52 if fxa_set:
53 # then for each FXA we create a reversal entry and reconcile them together
54
55=== modified file 'bin/addons/msf_profile/i18n/fr_MF.po'
56--- bin/addons/msf_profile/i18n/fr_MF.po 2018-09-03 09:00:00 +0000
57+++ bin/addons/msf_profile/i18n/fr_MF.po 2018-09-20 14:23:22 +0000
58@@ -101860,3 +101860,27 @@
59 #, python-format
60 msgid " goods products, please refer to the appropriate procedures"
61 msgstr " veuillez vous référer aux procédures adéquates."
62+
63+#. module: account
64+#: field:account.account,reconciliation_debit_account_id:0
65+msgid "Default Debit Account for Reconciliation"
66+msgstr "Compte de Débit par Défaut pour le Lettrage"
67+
68+#. module: account
69+#: field:account.account,reconciliation_credit_account_id:0
70+msgid "Default Credit Account for Reconciliation"
71+msgstr "Compte de Crédit par Défaut pour le Lettrage"
72+
73+#. module: res_currency_functional
74+#: code:addons/res_currency_functional/account_move_line_compute_currency.py:139
75+#: code:addons/res_currency_functional/account_move_line_compute_currency.py:149
76+#, python-format
77+msgid "The account %s - %s has no Default Destination. FX adjustment entry can't be created."
78+msgstr "Le compte %s - %s n'a pas de Destination par défaut. L'écriture de différence de change ne peut pas être créée."
79+
80+#. module: res_currency_functional
81+#: code:addons/res_currency_functional/account_move_line_compute_currency.py:355
82+#: code:addons/res_currency_functional/account_move_line_compute_currency.py:365
83+#, python-format
84+msgid "The account %s - %s used for the FX adjustment entry has no Default Destination."
85+msgstr "Le compte %s - %s utilisé pour l'écriture de différence de change n'a pas de Destination par défaut."
86
87=== modified file 'bin/addons/msf_sync_data_server/data/sync_server.sync_rule.csv'
88--- bin/addons/msf_sync_data_server/data/sync_server.sync_rule.csv 2018-08-21 19:12:31 +0000
89+++ bin/addons/msf_sync_data_server/data/sync_server.sync_rule.csv 2018-09-20 14:23:22 +0000
90@@ -30,6 +30,7 @@
91 msf_sync_data_server.periods_state,TRUE,TRUE,TRUE,TRUE,bidirectional,Up,[],"['state', 'period_id/id', 'instance_id/id']",HQ + MISSION,account.period.state,,Periods states,Valid,,129
92 msf_sync_data_server.fys_state,TRUE,TRUE,TRUE,TRUE,bidirectional,Up,[],"['state', 'fy_id/id', 'instance_id/id']",HQ + MISSION,account.fiscalyear.state,,Fiscal years states,Valid,,130
93 msf_sync_data_server.cost_center_cc_intermission,TRUE,TRUE,FALSE,TRUE,bidirectional,Down,"[('category' , '=' , 'OC'), ('code', '=', 'cc-intermission')]","['category', 'code', 'date', 'date_start', 'description', 'name', 'type']",OC,account.analytic.account,,CC-Intermission,Valid,,140
94+msf_sync_data_server.gl_accounts_reconciliation,TRUE,TRUE,FALSE,TRUE,bidirectional,Down,[],"['reconciliation_debit_account_id/id', 'reconciliation_credit_account_id/id']",OC,account.account,,GL Accounts Reconciliation Accounts,Valid,,150
95 msf_sync_data_server.analytic_distribution,TRUE,TRUE,FALSE,FALSE,bidirectional,Bidirectional,[],['name'],HQ + MISSION,analytic.distribution,,Analytic Distribution,Valid,,200
96 msf_sync_data_server.cost_center_distribution_line,TRUE,TRUE,TRUE,FALSE,bidirectional,Bidirectional,"[('partner_type','=','internal')]","['amount', 'analytic_id/id', 'currency_id/id', 'date', 'destination_id/id', 'distribution_id/id', 'name', 'percentage', 'partner_type', 'source_date']",HQ + MISSION,cost.center.distribution.line,analytic_id,Cost Center Distribution Line - Internal partner,Valid,,201
97 msf_sync_data_server.cost_center_distribution_line_non_internal,TRUE,TRUE,FALSE,FALSE,bidirectional,Bidirectional-Private,"[('partner_type','!=','internal')]","['amount', 'analytic_id/id', 'currency_id/id', 'date', 'destination_id/id', 'distribution_id/id', 'name', 'percentage', 'partner_type', 'source_date']",HQ + MISSION,cost.center.distribution.line,analytic_id,Cost Center Distribution Line - Non internal partner,Valid,,202
98
99=== modified file 'bin/addons/res_currency_functional/account_move_line_compute_currency.py'
100--- bin/addons/res_currency_functional/account_move_line_compute_currency.py 2018-03-01 16:39:51 +0000
101+++ bin/addons/res_currency_functional/account_move_line_compute_currency.py 2018-09-20 14:23:22 +0000
102@@ -125,168 +125,190 @@
103 addendum_line_credit_account_id = journal.default_credit_account_id.id
104 addendum_line_debit_account_default_destination_id = journal.default_debit_account_id.default_destination_id.id
105 addendum_line_credit_account_default_destination_id = journal.default_credit_account_id.default_destination_id.id
106+ # Since US-5011 use the Default Accounts for Reconciliation of the account to reconcile if it has any
107+ # (note that FXA default accounts are still mandatory, though)
108+ acc_to_reconcile = lines and self.browse(cr, uid, lines[0], fields_to_fetch=['account_id'], context=context).account_id
109+ # Debit account
110+ if acc_to_reconcile and acc_to_reconcile.reconciliation_debit_account_id:
111+ rec_debit_acc = acc_to_reconcile.reconciliation_debit_account_id
112+ addendum_line_debit_account_id = rec_debit_acc.id
113+ if rec_debit_acc.default_destination_id:
114+ addendum_line_debit_account_default_destination_id = rec_debit_acc.default_destination_id.id
115+ else:
116+ raise osv.except_osv(_('Error'),
117+ _("The account %s - %s has no Default Destination. FX adjustment entry "
118+ "can't be created.") % (rec_debit_acc.code, rec_debit_acc.name,))
119+ # Credit account
120+ if acc_to_reconcile and acc_to_reconcile.reconciliation_credit_account_id:
121+ rec_credit_acc = acc_to_reconcile.reconciliation_credit_account_id
122+ addendum_line_credit_account_id = rec_credit_acc.id
123+ if rec_credit_acc.default_destination_id:
124+ addendum_line_credit_account_default_destination_id = rec_credit_acc.default_destination_id.id
125+ else:
126+ raise osv.except_osv(_('Error'),
127+ _("The account %s - %s has no Default Destination. FX adjustment entry "
128+ "can't be created.") % (rec_credit_acc.code, rec_credit_acc.name,))
129 # Create analytic distribution if this account is an analytic-a-holic account
130 distrib_id = False
131 different_currency = False
132 prev_curr = False
133- if journal.default_debit_account_id.is_analytic_addicted:
134- ## Browse all lines to fetch some values
135- partner_id = employee_id = transfer_journal_id = False
136- oldiest_date = False
137- highest_debit_amount = highest_credit_amount = 0.0
138- highest_debit_line = highest_credit_line = None
139- for rline in self.browse(cr, uid, lines):
140- # note: fctal debit and fctal credit are always positive
141- if rline.debit > highest_debit_amount:
142- highest_debit_amount = rline.debit
143- highest_debit_line = rline
144- elif rline.credit > highest_credit_amount:
145- highest_credit_amount = rline.credit
146- highest_credit_line = rline
147- account_id = (rline.account_id and rline.account_id.id) or False
148- partner_id = (rline.partner_id and rline.partner_id.id) or False
149- employee_id = (rline.employee_id and rline.employee_id.id) or False
150- transfer_journal_id = (rline.transfer_journal_id and rline.transfer_journal_id.id) or False
151- currency_id = (rline.currency_id and rline.currency_id.id) or False
152- # Check if lines are in different currencies
153- if not prev_curr:
154- prev_curr = rline.currency_id.id
155- if rline.currency_id.id != prev_curr:
156- different_currency = True
157+ # Browse all lines to fetch some values
158+ partner_id = employee_id = transfer_journal_id = False
159+ oldiest_date = False
160+ highest_debit_amount = highest_credit_amount = 0.0
161+ highest_debit_line = highest_credit_line = None
162+ for rline in self.browse(cr, uid, lines):
163+ # note: fctal debit and fctal credit are always positive
164+ if rline.debit > highest_debit_amount:
165+ highest_debit_amount = rline.debit
166+ highest_debit_line = rline
167+ elif rline.credit > highest_credit_amount:
168+ highest_credit_amount = rline.credit
169+ highest_credit_line = rline
170+ account_id = (rline.account_id and rline.account_id.id) or False
171+ partner_id = (rline.partner_id and rline.partner_id.id) or False
172+ employee_id = (rline.employee_id and rline.employee_id.id) or False
173+ transfer_journal_id = (rline.transfer_journal_id and rline.transfer_journal_id.id) or False
174+ currency_id = (rline.currency_id and rline.currency_id.id) or False
175+ # Check if lines are in different currencies
176+ if not prev_curr:
177 prev_curr = rline.currency_id.id
178- if not oldiest_date:
179- oldiest_date = rline.date or False
180- if rline.date > oldiest_date:
181- oldiest_date = rline.date
182-
183- # US-236: default document and posting dates should belong to the
184- # first open period found after the highest posting date involved in
185- # the reconciliation
186- current_date = time.strftime('%Y-%m-%d')
187- current_date_dt = self.pool.get('date.tools').orm2date(current_date)
188- oldiest_date_dt = self.pool.get('date.tools').orm2date(oldiest_date)
189- base_date = oldiest_date or current_date
190- base_date_dt = self.pool.get('date.tools').orm2date(base_date)
191-
192- # search first opened period since latest posting date
193- period_from = "%04d-%02d-%02d" % (base_date_dt.year,
194- base_date_dt.month, 1, )
195- period_ids = period_obj.search(cr, uid, [
196- ('date_start', '>=', period_from),
197- ('state', '=', 'draft'), # first opened period since
198- ('number', 'not in', [0, 16]),
199- ], limit=1, order='date_start, number', context=context)
200- if not period_ids:
201- raise osv.except_osv(_('Warning'),
202- _('No open period found since this date: %s') % base_date)
203- period_id = period_ids[0]
204- period_br = period_obj.browse(cr, uid, period_id, context=context)
205-
206- if current_date_dt.year == oldiest_date_dt.year \
207- and current_date_dt.month == oldiest_date_dt.month \
208- and current_date_dt.day > oldiest_date_dt.day \
209- and period_br.date_start <= current_date <= period_br.date_stop:
210- # current date in 'opened period found': use it as 'base date'
211- base_date = current_date
212- elif period_br.date_start > base_date:
213- # opened period finally found after the latest posting date:
214- # use the period start date as 'base date'
215- base_date = period_br.date_start
216-
217- # verify that a fx gain/loss account exists
218- search_ids = self.pool.get('account.analytic.account').search(cr, uid, [('for_fx_gain_loss', '=', True)], context=context)
219- if not search_ids:
220- raise osv.except_osv(_('Warning'), _('Please activate an analytic account with "For FX gain/loss" to allow reconciliation!'))
221- # Prepare some values
222- partner_db = partner_cr = addendum_db = addendum_cr = None
223- if total < 0.0:
224- # data for partner line
225- partner_db = addendum_cr = abs(total)
226- addendum_line_account_id = addendum_line_credit_account_id
227- addendum_line_account_default_destination_id = addendum_line_credit_account_default_destination_id
228- # Conversely some amount is missing @credit for partner
229- else:
230- partner_cr = addendum_db = abs(total)
231- addendum_line_account_id = addendum_line_debit_account_id
232- addendum_line_account_default_destination_id = addendum_line_debit_account_default_destination_id
233- # create an analytic distribution if addendum_line_account_id is an analytic-a-holic account
234- account = acc_obj.browse(cr, uid, addendum_line_account_id, context=context)
235- if account and account.is_analytic_addicted:
236- distrib_id = self.pool.get('analytic.distribution').create(cr, uid, {}, context={})
237- # add a cost center for analytic distribution
238- distrib_line_vals = {
239- 'distribution_id': distrib_id,
240- 'currency_id': company_currency_id,
241- 'analytic_id': search_ids[0],
242- 'percentage': 100.0,
243- 'date': base_date,
244- 'source_date': base_date,
245- 'destination_id': addendum_line_account_default_destination_id,
246- }
247- self.pool.get('cost.center.distribution.line').create(cr, uid, distrib_line_vals, context=context)
248- # add a funding pool line for analytic distribution
249- try:
250- fp_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'analytic_distribution', 'analytic_account_msf_private_funds')[1]
251- except ValueError:
252- fp_id = 0
253- if not fp_id:
254- raise osv.except_osv(_('Error'), _('No "MSF Private Fund" found!'))
255- distrib_line_vals.update({'analytic_id': fp_id, 'cost_center_id': search_ids[0],})
256- self.pool.get('funding.pool.distribution.line').create(cr, uid, distrib_line_vals, context=context)
257-
258- move_id = self.pool.get('account.move').create(cr, uid,{'journal_id': journal_id, 'period_id': period_id, 'date': base_date}, context=context)
259- # Create default vals for the new two move lines
260- vals = {
261- 'move_id': move_id,
262+ if rline.currency_id.id != prev_curr:
263+ different_currency = True
264+ prev_curr = rline.currency_id.id
265+ if not oldiest_date:
266+ oldiest_date = rline.date or False
267+ if rline.date > oldiest_date:
268+ oldiest_date = rline.date
269+
270+ # US-236: default document and posting dates should belong to the
271+ # first open period found after the highest posting date involved in
272+ # the reconciliation
273+ current_date = time.strftime('%Y-%m-%d')
274+ current_date_dt = self.pool.get('date.tools').orm2date(current_date)
275+ oldiest_date_dt = self.pool.get('date.tools').orm2date(oldiest_date)
276+ base_date = oldiest_date or current_date
277+ base_date_dt = self.pool.get('date.tools').orm2date(base_date)
278+
279+ # search first opened period since latest posting date
280+ period_from = "%04d-%02d-%02d" % (base_date_dt.year,
281+ base_date_dt.month, 1, )
282+ period_ids = period_obj.search(cr, uid, [
283+ ('date_start', '>=', period_from),
284+ ('state', '=', 'draft'), # first opened period since
285+ ('number', 'not in', [0, 16]),
286+ ], limit=1, order='date_start, number', context=context)
287+ if not period_ids:
288+ raise osv.except_osv(_('Warning'),
289+ _('No open period found since this date: %s') % base_date)
290+ period_id = period_ids[0]
291+ period_br = period_obj.browse(cr, uid, period_id, context=context)
292+
293+ if current_date_dt.year == oldiest_date_dt.year \
294+ and current_date_dt.month == oldiest_date_dt.month \
295+ and current_date_dt.day > oldiest_date_dt.day \
296+ and period_br.date_start <= current_date <= period_br.date_stop:
297+ # current date in 'opened period found': use it as 'base date'
298+ base_date = current_date
299+ elif period_br.date_start > base_date:
300+ # opened period finally found after the latest posting date:
301+ # use the period start date as 'base date'
302+ base_date = period_br.date_start
303+
304+ # verify that a fx gain/loss account exists
305+ search_ids = self.pool.get('account.analytic.account').search(cr, uid, [('for_fx_gain_loss', '=', True)], context=context)
306+ if not search_ids:
307+ raise osv.except_osv(_('Warning'), _('Please activate an analytic account with "For FX gain/loss" to allow reconciliation!'))
308+ # Prepare some values
309+ partner_db = partner_cr = addendum_db = addendum_cr = None
310+ if total < 0.0:
311+ # data for partner line
312+ partner_db = addendum_cr = abs(total)
313+ addendum_line_account_id = addendum_line_credit_account_id
314+ addendum_line_account_default_destination_id = addendum_line_credit_account_default_destination_id
315+ # Conversely some amount is missing @credit for partner
316+ else:
317+ partner_cr = addendum_db = abs(total)
318+ addendum_line_account_id = addendum_line_debit_account_id
319+ addendum_line_account_default_destination_id = addendum_line_debit_account_default_destination_id
320+ # create an analytic distribution if addendum_line_account_id is an analytic-a-holic account
321+ account = acc_obj.browse(cr, uid, addendum_line_account_id, context=context)
322+ if account and account.is_analytic_addicted:
323+ distrib_id = self.pool.get('analytic.distribution').create(cr, uid, {}, context={})
324+ # add a cost center for analytic distribution
325+ distrib_line_vals = {
326+ 'distribution_id': distrib_id,
327+ 'currency_id': company_currency_id,
328+ 'analytic_id': search_ids[0],
329+ 'percentage': 100.0,
330 'date': base_date,
331 'source_date': base_date,
332- 'document_date': base_date,
333- 'journal_id': journal_id,
334- 'period_id': period_id,
335- 'partner_id': partner_id,
336- 'employee_id': employee_id,
337- 'transfer_journal_id': transfer_journal_id,
338- 'credit': 0.0,
339- 'debit': 0.0,
340- 'name': 'Realised loss/gain',
341- 'is_addendum_line': True,
342- 'currency_id': currency_id,
343+ 'destination_id': addendum_line_account_default_destination_id,
344 }
345- # US-2594 if different currencies are used:
346- # if the FXA is for debit the currency is taken from the highest debit entry (and likewise for credit)
347- if different_currency:
348- new_currency_id = False
349- if partner_db:
350- new_currency_id = highest_debit_line and highest_debit_line.currency_id and highest_debit_line.currency_id.id
351- elif partner_cr:
352- new_currency_id = highest_credit_line and highest_credit_line.currency_id and highest_credit_line.currency_id.id
353- new_currency_id and vals.update({'currency_id': new_currency_id})
354- # Create partner line
355- vals.update({'account_id': account_id, 'debit': partner_db or 0.0, 'credit': partner_cr or 0.0,})
356- # UTP-1022: Allow account.move.line creation when we come from "create_addendum_line" because of currencies rate redefinition
357- context.update({'addendum_line_creation': True})
358- partner_line_id = self.create(cr, uid, vals, context=context)
359- # Create addendum_line
360- if distrib_id:
361- vals.update({'analytic_distribution_id': distrib_id})
362- # the ref of the expense line is the B/S account code and name
363- reconciled_acc = account_id and acc_obj.read(cr, uid, account_id, ['code', 'name'], context=context)
364- fxa_ref = reconciled_acc and '%s - %s' % (reconciled_acc['code'], reconciled_acc['name']) or False
365- vals.update({'account_id': addendum_line_account_id,
366- 'debit': addendum_db or 0.0,
367- 'credit': addendum_cr or 0.0,
368- 'ref': fxa_ref,
369- 'reference': fxa_ref})
370- addendum_line_id = self.create(cr, uid, vals, context=context)
371- # Validate move
372- self.pool.get('account.move').post(cr, uid, [move_id], context=context)
373-
374- # Update analytic line with right amount (instead of "0.0")
375- analytic_line_ids = self.pool.get('account.analytic.line').search(cr, uid, [('move_id', '=', addendum_line_id)], context=context)
376- addendum_line_amount_curr = -1*total or 0.0
377- self.pool.get('account.analytic.line').write(cr, uid, analytic_line_ids, {'currency_id': company_currency_id, 'amount': addendum_line_amount_curr, 'amount_currency': addendum_line_amount_curr})
378-
379- return partner_line_id
380+ self.pool.get('cost.center.distribution.line').create(cr, uid, distrib_line_vals, context=context)
381+ # add a funding pool line for analytic distribution
382+ try:
383+ fp_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'analytic_distribution', 'analytic_account_msf_private_funds')[1]
384+ except ValueError:
385+ fp_id = 0
386+ if not fp_id:
387+ raise osv.except_osv(_('Error'), _('No "MSF Private Fund" found!'))
388+ distrib_line_vals.update({'analytic_id': fp_id, 'cost_center_id': search_ids[0],})
389+ self.pool.get('funding.pool.distribution.line').create(cr, uid, distrib_line_vals, context=context)
390+
391+ move_id = self.pool.get('account.move').create(cr, uid,{'journal_id': journal_id, 'period_id': period_id, 'date': base_date}, context=context)
392+ # Create default vals for the new two move lines
393+ vals = {
394+ 'move_id': move_id,
395+ 'date': base_date,
396+ 'source_date': base_date,
397+ 'document_date': base_date,
398+ 'journal_id': journal_id,
399+ 'period_id': period_id,
400+ 'partner_id': partner_id,
401+ 'employee_id': employee_id,
402+ 'transfer_journal_id': transfer_journal_id,
403+ 'credit': 0.0,
404+ 'debit': 0.0,
405+ 'name': 'Realised loss/gain',
406+ 'is_addendum_line': True,
407+ 'currency_id': currency_id,
408+ }
409+ # US-2594 if different currencies are used:
410+ # if the FXA is for debit the currency is taken from the highest debit entry (and likewise for credit)
411+ if different_currency:
412+ new_currency_id = False
413+ if partner_db:
414+ new_currency_id = highest_debit_line and highest_debit_line.currency_id and highest_debit_line.currency_id.id
415+ elif partner_cr:
416+ new_currency_id = highest_credit_line and highest_credit_line.currency_id and highest_credit_line.currency_id.id
417+ new_currency_id and vals.update({'currency_id': new_currency_id})
418+ # Create partner line
419+ vals.update({'account_id': account_id, 'debit': partner_db or 0.0, 'credit': partner_cr or 0.0,})
420+ # UTP-1022: Allow account.move.line creation when we come from "create_addendum_line" because of currencies rate redefinition
421+ context.update({'addendum_line_creation': True})
422+ partner_line_id = self.create(cr, uid, vals, context=context)
423+ # Create addendum_line
424+ if distrib_id:
425+ vals.update({'analytic_distribution_id': distrib_id})
426+ # the ref of the expense line is the B/S account code and name
427+ reconciled_acc = account_id and acc_obj.read(cr, uid, account_id, ['code', 'name'], context=context)
428+ fxa_ref = reconciled_acc and '%s - %s' % (reconciled_acc['code'], reconciled_acc['name']) or False
429+ vals.update({'account_id': addendum_line_account_id,
430+ 'debit': addendum_db or 0.0,
431+ 'credit': addendum_cr or 0.0,
432+ 'ref': fxa_ref,
433+ 'reference': fxa_ref})
434+ addendum_line_id = self.create(cr, uid, vals, context=context)
435+ # Validate move
436+ self.pool.get('account.move').post(cr, uid, [move_id], context=context)
437+
438+ # Update analytic line with right amount (instead of "0.0")
439+ analytic_line_ids = self.pool.get('account.analytic.line').search(cr, uid, [('move_id', '=', addendum_line_id)], context=context)
440+ addendum_line_amount_curr = -1*total or 0.0
441+ self.pool.get('account.analytic.line').write(cr, uid, analytic_line_ids, {'currency_id': company_currency_id, 'amount': addendum_line_amount_curr, 'amount_currency': addendum_line_amount_curr})
442+
443+ return partner_line_id
444
445 def reconciliation_update(self, cr, uid, ids, context=None):
446 """
447@@ -317,6 +339,30 @@
448 addendum_line_credit_account_default_destination_id = journal.default_credit_account_id.default_destination_id.id
449 # Check line state
450 for reconciled in reconciled_obj.browse(cr, uid, ids, context=context):
451+ # Since US-5011 use the Default Accounts for Reconciliation of the account to reconcile if it has any
452+ # (note that FXA default accounts are still mandatory, though)
453+ aml_ids = self.search(cr, uid, [('reconcile_id', '=', reconciled.id)], limit=1, context=context)
454+ reconciled_acc = aml_ids and self.browse(cr, uid, aml_ids[0], fields_to_fetch=['account_id'], context=context).account_id
455+ # Debit account
456+ if reconciled_acc and reconciled_acc.reconciliation_debit_account_id:
457+ rec_debit_acc = reconciled_acc.reconciliation_debit_account_id
458+ addendum_line_debit_account_id = rec_debit_acc.id
459+ if rec_debit_acc.default_destination_id:
460+ addendum_line_debit_account_default_destination_id = rec_debit_acc.default_destination_id.id
461+ else:
462+ raise osv.except_osv(_('Error'),
463+ _("The account %s - %s used for the FX adjustment entry has no Default "
464+ "Destination.") % (rec_debit_acc.code, rec_debit_acc.name,))
465+ # Credit account
466+ if reconciled_acc and reconciled_acc.reconciliation_credit_account_id:
467+ rec_credit_acc = reconciled_acc.reconciliation_credit_account_id
468+ addendum_line_credit_account_id = rec_credit_acc.id
469+ if rec_credit_acc.default_destination_id:
470+ addendum_line_credit_account_default_destination_id = rec_credit_acc.default_destination_id.id
471+ else:
472+ raise osv.except_osv(_('Error'),
473+ _("The account %s - %s used for the FX adjustment entry has no Default "
474+ "Destination.") % (rec_credit_acc.code, rec_credit_acc.name,))
475 # Search addendum line
476 addendum_line_ids = self.search(cr, uid, [('reconcile_id', '=', reconciled.id), ('is_addendum_line', '=', True)], context=context)
477 # If addendum_line_ids, update it (if needed)
478@@ -343,7 +389,7 @@
479 sql = """
480 UPDATE account_move_line
481 SET debit_currency=%s, credit_currency=%s, amount_currency=%s, debit=%s, credit=%s
482- WHERE id=%s
483+ WHERE id=%s;
484 """
485 cr.execute(sql, [0.0, 0.0, 0.0, addendum_db or 0.0, addendum_cr or 0.0, tuple([al.id])])
486 # Update partner line
487@@ -358,15 +404,17 @@
488 addendum_counterpart_ids = self.search(cr, uid, [('move_id', '=', al.move_id.id), ('id', '!=', al.id), ('is_addendum_line', '=', True)])
489 if not addendum_counterpart_ids:
490 continue
491- counterpart_sql = """
492- UPDATE account_move_line
493- SET account_id=%s
494- WHERE id=%s
495- """
496- cr.execute(counterpart_sql, [addendum_line_account_id, tuple(addendum_counterpart_ids)])
497- # then update their analytic lines with default destination
498- analytic_line_ids = al_obj.search(cr, uid, [('move_id', 'in', addendum_counterpart_ids)])
499- al_obj.write(cr, uid, analytic_line_ids, {'general_account_id': addendum_line_account_id, 'destination_id': addendum_line_account_default_destination_id,})
500+ if not context.get('sync_update_execution', False):
501+ # update FXA accounts only if out of synchro, cf. they should be the same in all instances
502+ counterpart_sql = """
503+ UPDATE account_move_line
504+ SET account_id=%s
505+ WHERE id=%s;
506+ """
507+ cr.execute(counterpart_sql, [addendum_line_account_id, tuple(addendum_counterpart_ids)])
508+ # then update their analytic lines with default destination
509+ analytic_line_ids = al_obj.search(cr, uid, [('move_id', 'in', addendum_counterpart_ids)])
510+ al_obj.write(cr, uid, analytic_line_ids, {'general_account_id': addendum_line_account_id, 'destination_id': addendum_line_account_default_destination_id,})
511 else:
512 # Search all lines that have same reconcile_id
513 reconciled_line_ids = self.search(cr, uid, [('reconcile_id', '=', reconciled.id)], context=context)
514@@ -647,7 +695,7 @@
515 reconciled_move[vals['reconcile_id']] = True
516 elif line.reconcile_id:
517 reconciled_move[line.reconcile_id.id] = True
518- if reconciled_move:
519+ if reconciled_move and not context.get('from_remove_move_reconcile', False): # don't update an FXA about to be reversed
520 self.reconciliation_update(cr, uid, reconciled_move.keys(), context=context)
521 return res
522

Subscribers

People subscribed via source and target branches