Merge lp:~unifield-team/unifield-wm/UF_1746 into lp:unifield-wm

Proposed by Olivier DOSSMANN
Status: Merged
Merged at revision: 2195
Proposed branch: lp:~unifield-team/unifield-wm/UF_1746
Merge into: lp:unifield-wm
Diff against target: 369 lines (+91/-30)
7 files modified
account_corrections/account_move_line.py (+31/-0)
account_corrections/wizard/analytic_distribution_wizard.py (+9/-3)
account_hq_entries/wizard/hq_entries_import.py (+16/-21)
account_hq_entries/wizard/hq_entries_unsplit.py (+0/-1)
account_override/account_move_line.py (+2/-0)
analytic_distribution/analytic_line.py (+2/-0)
analytic_distribution/wizard/mass_reallocation_wizard.py (+31/-5)
To merge this branch: bzr merge lp:~unifield-team/unifield-wm/UF_1746
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+217591@code.launchpad.net
To post a comment you must log in.
1917. By Olivier DOSSMANN

UF-1746 [IMP] Correction - Change all move lines as "corrected_upstream" to disallow project users to change any other line if a coordo user have changed any move lines from a journal entry

1918. By Olivier DOSSMANN

UF-1746 [FIX] Bug on analytic lines that are corrected in closed period: No REV and COR lines.

This is due to the UF-1746 branch in which the "def corrected_upstream" make a write on account move line which delete and recreate analytic lines.

As this write permits to indicate synchronization that a change was done, I use "update_check=False, check=False" to avoid analytic lines deletion/recreation. Which solves the problem.

1919. By Olivier DOSSMANN

[MERGE] Last unifield trunk lp:unifield-wm

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'account_corrections/account_move_line.py'
2--- account_corrections/account_move_line.py 2014-04-29 14:46:07 +0000
3+++ account_corrections/account_move_line.py 2014-05-12 12:56:18 +0000
4@@ -42,13 +42,18 @@
5 - The account is not the default credit/debit account of the attached statement (register)
6 - All items attached to the entry have no reconcile_id on reconciliable account
7 - The line doesn't come from a write-off
8+ - The line is "corrected_upstream" that implies the line have been already corrected from a coordo or a hq to a level that is superior or equal to these instance.
9 """
10+ # Some checks
11 if context is None:
12 context = {}
13+ # Prepare some values
14 res = {}
15 # Search all accounts that are used in bank, cheque and cash registers
16 journal_ids = self.pool.get('account.journal').search(cr, uid, [('type', 'in', ['bank', 'cheque', 'cash'])])
17 account_ids = []
18+ company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
19+ level = company and company.instance_id and company.instance_id.level or ''
20 for j in self.pool.get('account.journal').read(cr, uid, journal_ids, ['default_debit_account_id', 'default_credit_account_id']):
21 if j.get('default_debit_account_id', False) and j.get('default_debit_account_id')[0] not in account_ids:
22 account_ids.append(j.get('default_debit_account_id')[0])
23@@ -107,9 +112,14 @@
24 if ml.account_id.id in account_ids:
25 res[ml.id] = False
26 continue
27+ # False if "corrected_upstream" is True and that we come from project level
28+ if ml.corrected_upstream and level == 'project':
29+ res[ml.id] = False
30+ continue
31 # False if this line is a revaluation
32 if ml.journal_id.type == 'revaluation':
33 res[ml.id] = False
34+ continue
35 return res
36
37 _columns = {
38@@ -761,6 +771,8 @@
39 al_obj.write(cr, uid, search_ids, search_data[1])
40 # Add this line to succeded lines
41 success_move_line_ids.append(ml.id)
42+ # Mark it as "corrected_upstream" if needed
43+ self.corrected_upstream_marker(cr, uid, [ml.id], context=context)
44 return success_move_line_ids
45
46 def correct_partner_id(self, cr, uid, ids, date=None, partner_id=None, context=None):
47@@ -859,6 +871,25 @@
48 success_move_line_ids.append(move_line.id)
49 return success_move_line_ids
50
51+ def corrected_upstream_marker(self, cr, uid, ids, context=None):
52+ """
53+ Check if we are in a COORDO / HQ instance. If yes, set move line(s) as corrected upstream.
54+ """
55+ # Some check
56+ if context is None:
57+ context = {}
58+ if isinstance(ids, (int, long)):
59+ ids = [ids]
60+ # Prepare some values
61+ company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
62+ # Check if we come from COORDO/HQ instance
63+ if company and company.instance_id and company.instance_id.level in ['section', 'coordo']:
64+ # UF-1746: Set also all other move lines as corrected upstream to disallow projet user to correct any move line of this move.
65+ move_ids = [x and x.get('move_id', False) and x.get('move_id')[0] for x in self.read(cr, uid, ids, ['move_id'], context=context)]
66+ ml_ids = self.search(cr, uid, [('move_id', 'in', move_ids)])
67+ self.write(cr, uid, ml_ids, {'corrected_upstream': True}, check=False, update_check=False, context=context)
68+ return True
69+
70 account_move_line()
71
72 class account_move(osv.osv):
73
74=== modified file 'account_corrections/wizard/analytic_distribution_wizard.py'
75--- account_corrections/wizard/analytic_distribution_wizard.py 2014-04-28 14:37:24 +0000
76+++ account_corrections/wizard/analytic_distribution_wizard.py 2014-05-12 12:56:18 +0000
77@@ -308,6 +308,12 @@
78 })
79
80 #####
81+ ## Set move line as corrected upstream if needed
82+ ###
83+ if to_reverse or to_override or to_create:
84+ self.pool.get('account.move.line').corrected_upstream_marker(cr, uid, [ml.id], context=context)
85+
86+ #####
87 ## FREE 1 / FREE 2
88 ###
89 for free in [('free.1', 'f1'), ('free.2', 'f2')]:
90@@ -373,9 +379,9 @@
91 })
92 # create the ana line
93 self.pool.get(obj_name).create_analytic_lines(cr, uid, [new_distrib_line], ml.id, date=wizard.date, document_date=orig_document_date, source_date=orig_date)
94-
95-
96-
97+ # Set move line as corrected upstream if needed
98+ if to_reverse or to_override or to_create:
99+ self.pool.get('account.move.line').corrected_upstream_marker(cr, uid, [ml.id], context=context)
100
101 def button_cancel(self, cr, uid, ids, context=None):
102 """
103
104=== modified file 'account_hq_entries/wizard/hq_entries_import.py'
105--- account_hq_entries/wizard/hq_entries_import.py 2014-01-16 09:09:39 +0000
106+++ account_hq_entries/wizard/hq_entries_import.py 2014-05-12 12:56:18 +0000
107@@ -23,14 +23,12 @@
108
109 from osv import osv
110 from osv import fields
111-import os.path
112 from base64 import decodestring
113 from tempfile import NamedTemporaryFile
114 import csv
115-from tools.misc import ustr
116 from tools.translate import _
117 import time
118-import locale
119+#import locale
120 from account_override import ACCOUNT_RESTRICTED_AREA
121
122 class hq_entries_import_wizard(osv.osv_memory):
123@@ -45,7 +43,7 @@
124 def parse_date(self, date):
125 try:
126 pdate = time.strptime(date, '%d/%m/%y')
127- except ValueError, e:
128+ except ValueError:
129 pdate = time.strptime(date, '%d/%m/%Y')
130 return time.strftime('%Y-%m-%d', pdate)
131
132@@ -94,7 +92,7 @@
133 vals.update({'document_date': dd})
134 except ValueError, e:
135 raise osv.except_osv(_('Error'), _('Wrong format for date: %s: %s') % (document_date, e))
136- # [utp-928]
137+ # [utp-928]
138 # Make it impossible to import HQ entries where Doc Date > Posting Date,
139 # it will spare trouble at HQ entry validation.
140 if dd and line_date and dd > line_date:
141@@ -170,9 +168,9 @@
142 free2_id = free2_id[0]
143 aa_check_ids.append(free2_id)
144 vals.update({'destination_id_first_value': destination_id, 'destination_id': destination_id, 'cost_center_id': cc_id, 'analytic_id': fp_id, 'cost_center_id_first_value': cc_id, 'analytic_id_first_value': fp_id, 'free_1_id': free1_id, 'free_2_id': free2_id,})
145-
146- # [utp-928] do not import line with a
147- # 'Destination' or 'Cost Center' or 'Funding Pool',
148+
149+ # [utp-928] do not import line with a
150+ # 'Destination' or 'Cost Center' or 'Funding Pool',
151 # of type 'view'
152 aa_check_errors = []
153 aa_check_category_map = {
154@@ -193,7 +191,7 @@
155 aa_check_errors.append('%s"%s - %s" of type "view" is not allowed for import' % (category, aa_r['code'], aa_r['name']))
156 if aa_check_errors:
157 raise osv.except_osv(_('Error'), ", ".join(aa_check_errors))
158-
159+
160 # Fetch description
161 if description:
162 vals.update({'name': description})
163@@ -259,23 +257,21 @@
164 # Do verifications
165 if not context:
166 context = {}
167-
168+
169 # Verify that an HQ journal exists
170 journal_ids = self.pool.get('account.journal').search(cr, uid, [('type', '=', 'hq'),
171 ('is_current_instance', '=', True)])
172 if not journal_ids:
173 raise osv.except_osv(_('Error'), _('You cannot import HQ entries because no HQ Journal exists.'))
174-
175+
176 # Prepare some values
177- file_ext_separator = '.'
178- file_ext = "csv"
179 message = _("HQ Entries import failed.")
180 res = False
181 created = 0
182 processed = 0
183 errors = []
184 filename = ""
185-
186+
187 # Browse all given wizard
188 for wiz in self.browse(cr, uid, ids):
189 if not wiz.file:
190@@ -296,7 +292,6 @@
191 if filename.split('.')[-1] != 'csv':
192 raise osv.except_osv(_('Warning'), _('You are trying to import a file with the wrong file format; please import a CSV file.'))
193 res = True
194- amount = 0.0
195 # Omit first line that contains columns ' name
196 try:
197 reader.next()
198@@ -307,28 +302,28 @@
199 nbline += 1
200 processed += 1
201 try:
202- update = self.update_hq_entries(cr, uid, line)
203+ self.update_hq_entries(cr, uid, line)
204 created += 1
205 except osv.except_osv, e:
206 errors.append('Line %s, %s'%(nbline, e.value))
207 fileobj.close()
208-
209+
210 if res:
211 message = _("HQ Entries import successful")
212 context.update({'message': message})
213-
214+
215 if errors:
216 cr.rollback()
217 view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'msf_homere_interface', 'payroll_import_error')
218 else:
219 view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'msf_homere_interface', 'payroll_import_confirmation')
220 view_id = view_id and view_id[1] or False
221-
222+
223 # This is to redirect to HQ Entries Tree View
224 context.update({'from': 'hq_entries_import'})
225-
226+
227 res_id = self.pool.get('hr.payroll.import.confirmation').create(cr, uid, {'filename': filename, 'created': created, 'total': processed, 'state': 'hq', 'errors': "\n".join(errors), 'nberrors': len(errors)}, context=context)
228-
229+
230 return {
231 'name': 'HQ Entries Import Confirmation',
232 'type': 'ir.actions.act_window',
233
234=== modified file 'account_hq_entries/wizard/hq_entries_unsplit.py'
235--- account_hq_entries/wizard/hq_entries_unsplit.py 2013-11-22 13:55:21 +0000
236+++ account_hq_entries/wizard/hq_entries_unsplit.py 2014-05-12 12:56:18 +0000
237@@ -23,7 +23,6 @@
238
239 from osv import osv
240 from osv import fields
241-from tools.translate import _
242
243 class hq_entries_unsplit(osv.osv_memory):
244 _name = 'hq.entries.unsplit'
245
246=== modified file 'account_override/account_move_line.py'
247--- account_override/account_move_line.py 2014-03-19 13:40:41 +0000
248+++ account_override/account_move_line.py 2014-05-12 12:56:18 +0000
249@@ -260,6 +260,7 @@
250 ),
251 'is_reconciled': fields.function(_get_is_reconciled, fnct_search=_search_is_reconciled, type='boolean', method=True, string="Is reconciled", help="Is that line partially/totally reconciled?"),
252 'balance_currency': fields.function(_balance_currency, fnct_search=_balance_currency_search, method=True, string='Balance Booking'),
253+ 'corrected_upstream': fields.boolean('Corrected from CC/HQ', readonly=True, help='This line have been corrected from Coordo or HQ level to a cost center that have the same level or superior.'),
254 'line_number': fields.integer(string='Line Number'),
255 'invoice_partner_link': fields.many2one('account.invoice', string="Invoice partner link", readonly=True,
256 help="This link implies this line come from the total of an invoice, directly from partner account.", ondelete="cascade"),
257@@ -273,6 +274,7 @@
258 'document_date': lambda self, cr, uid, c: c.get('document_date', False) or strftime('%Y-%m-%d'),
259 'date': lambda self, cr, uid, c: c.get('date', False) or strftime('%Y-%m-%d'),
260 'exported': lambda *a: False,
261+ 'corrected_upstream': lambda *a: False,
262 'line_number': lambda *a: 0,
263 }
264
265
266=== modified file 'analytic_distribution/analytic_line.py'
267--- analytic_distribution/analytic_line.py 2014-03-28 15:22:54 +0000
268+++ analytic_distribution/analytic_line.py 2014-05-12 12:56:18 +0000
269@@ -279,6 +279,8 @@
270 else:
271 # Update account
272 self.write(cr, uid, [aline.id], {'account_id': account_id}, context=context)
273+ # Set line as corrected upstream if we are in COORDO/HQ instance
274+ self.pool.get('account.move.line').corrected_upstream_marker(cr, uid, [aline.move_id.id], context=context)
275 return True
276
277 def check_analytic_account(self, cr, uid, ids, account_id, context=None):
278
279=== modified file 'analytic_distribution/wizard/mass_reallocation_wizard.py'
280--- analytic_distribution/wizard/mass_reallocation_wizard.py 2014-01-13 14:57:01 +0000
281+++ analytic_distribution/wizard/mass_reallocation_wizard.py 2014-05-12 12:56:18 +0000
282@@ -120,11 +120,11 @@
283 _columns = {
284 'account_id': fields.many2one('account.analytic.account', string="Analytic Account", required=True),
285 'date': fields.date('Posting date', required=True),
286- 'line_ids': fields.many2many('account.analytic.line', 'mass_reallocation_rel', 'wizard_id', 'analytic_line_id',
287+ 'line_ids': fields.many2many('account.analytic.line', 'mass_reallocation_rel', 'wizard_id', 'analytic_line_id',
288 string="Analytic Journal Items", required=True),
289 'state': fields.selection([('normal', 'Normal'), ('blocked', 'Blocked')], string="State", readonly=True),
290 'display_fp': fields.boolean('Display FP'),
291- 'other_ids': fields.many2many('account.analytic.line', 'mass_reallocation_other_rel', 'wizard_id', 'analytic_line_id',
292+ 'other_ids': fields.many2many('account.analytic.line', 'mass_reallocation_other_rel', 'wizard_id', 'analytic_line_id',
293 string="Non eligible analytic journal items", required=False, readonly=True),
294 }
295
296@@ -163,7 +163,7 @@
297 if fields is None:
298 fields = []
299 # Some verifications
300- if not context:
301+ if context is None:
302 context = {}
303 # Default behaviour
304 res = super(mass_reallocation_wizard, self).default_get(cr, uid, fields, context=context)
305@@ -173,7 +173,7 @@
306 res['account_id'] = context['analytic_account_from']
307 if context.get('active_ids', False) and context.get('active_model', False) == 'account.analytic.line':
308 res['line_ids'] = context.get('active_ids')
309- # Search which lines are eligible
310+ # Search which lines are eligible (add another criteria if we come from project)
311 search_args = [
312 ('id', 'in', context.get('active_ids')), '|', '|', '|', '|', '|', '|',
313 ('commitment_line_id', '!=', False), ('is_reallocated', '=', True),
314@@ -183,6 +183,19 @@
315 ('move_state', '=', 'draft'),
316 ('account_id.category', 'in', ['FREE1', 'FREE2'])
317 ]
318+ company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
319+ if company and company.instance_id and company.instance_id.level == 'project':
320+ search_args = [
321+ ('id', 'in', context.get('active_ids')), '|', '|', '|', '|', '|', '|', '|',
322+ ('commitment_line_id', '!=', False), ('is_reallocated', '=', True),
323+ ('is_reversal', '=', True),
324+ ('journal_id.type', 'in', ['engagement', 'revaluation']),
325+ ('from_write_off', '=', True),
326+ ('move_state', '=', 'draft'),
327+ ('account_id.category', 'in', ['FREE1', 'FREE2']),
328+ ('move_id.corrected_upstream', '=', True)
329+ ]
330+
331 search_ns_ids = self.pool.get('account.analytic.line').search(cr, uid, search_args, context=context)
332 # Process lines if exist
333 if search_ns_ids:
334@@ -231,7 +244,7 @@
335 Launch mass reallocation process
336 """
337 # Some verifications
338- if not context:
339+ if context is None:
340 context = {}
341 if isinstance(ids, (int, long)):
342 ids = [ids]
343@@ -241,6 +254,8 @@
344 process_ids = []
345 account_id = False
346 date = False
347+ company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
348+ level = company and company.instance_id and company.instance_id.level or ''
349 # Browse given wizard
350 for wiz in self.browse(cr, uid, ids, context=context):
351 to_process = [x.id for x in wiz.line_ids] or []
352@@ -266,6 +281,17 @@
353 ('move_state', '=', 'draft'),
354 ('account_id.category', 'in', ['FREE1', 'FREE2'])
355 ]
356+ if level == 'project':
357+ search_args = [
358+ ('id', 'in', context.get('active_ids')), '|', '|', '|', '|', '|', '|', '|',
359+ ('commitment_line_id', '!=', False), ('is_reallocated', '=', True),
360+ ('is_reversal', '=', True),
361+ ('journal_id.type', 'in', ['engagement', 'revaluation']),
362+ ('from_write_off', '=', True),
363+ ('move_state', '=', 'draft'),
364+ ('account_id.category', 'in', ['FREE1', 'FREE2']),
365+ ('move_id.corrected_upstream', '=', True)
366+ ]
367 search_ns_ids = self.pool.get('account.analytic.line').search(cr, uid, search_args)
368 if search_ns_ids:
369 non_supported_ids.extend(search_ns_ids)

Subscribers

People subscribed via source and target branches