Merge lp:~julie-w/unifield-server/US-4493 into lp:unifield-server
- US-4493
- Merge into trunk
Proposed by
jftempo
Status: | Merged |
---|---|
Merged at revision: | 4866 |
Proposed branch: | lp:~julie-w/unifield-server/US-4493 |
Merge into: | lp:unifield-server |
Diff against target: |
191 lines (+44/-20) 4 files modified
bin/addons/account_hq_entries/hq_entries.py (+22/-6) bin/addons/account_hq_entries/wizard/hq_entries_unsplit.py (+2/-2) bin/addons/account_hq_entries/wizard/hq_entries_validation.py (+18/-11) bin/addons/msf_sync_data_server/data/sync_server.sync_rule.csv (+2/-1) |
To merge this branch: | bzr merge lp:~julie-w/unifield-server/US-4493 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+343384@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/addons/account_hq_entries/hq_entries.py' |
2 | --- bin/addons/account_hq_entries/hq_entries.py 2018-02-26 09:49:40 +0000 |
3 | +++ bin/addons/account_hq_entries/hq_entries.py 2018-04-17 09:11:48 +0000 |
4 | @@ -260,9 +260,9 @@ |
5 | |
6 | for line in self.browse(cr, uid, ids, context=context): |
7 | res.add(line.id) |
8 | - if line.is_original: |
9 | + if line.is_original and line.split_ids: |
10 | add_split(line) |
11 | - if line.is_split: |
12 | + if line.is_split and line.original_id: |
13 | # add original one |
14 | res.add(line.original_id.id) |
15 | # then other split lines |
16 | @@ -282,7 +282,9 @@ |
17 | # Prepare some values |
18 | res = set() |
19 | for line in self.browse(cr, uid, ids, context=context): |
20 | - if line.user_validated == False and (line.is_original or line.is_split): |
21 | + line_original = line.is_original and line.split_ids |
22 | + line_split = line.is_split and line.original_id |
23 | + if not line.user_validated and (line_original or line_split): |
24 | # First add original and split linked lines |
25 | for el in self.get_linked_lines(cr, uid, [line.id]): |
26 | res.add(el) |
27 | @@ -466,8 +468,13 @@ |
28 | |
29 | #US-921: Only save the user_validated value if the update comes from sync! |
30 | if context.get('sync_update_execution', False): |
31 | - if 'user_validated' in vals: |
32 | - return super(hq_entries, self).write(cr, uid, ids, {'user_validated': vals['user_validated']}, context) |
33 | + sync_vals = {} |
34 | + if 'user_validated' in vals: |
35 | + sync_vals.update({'user_validated': vals['user_validated']}) |
36 | + if 'is_original' in vals: # US-4169 also enable to sync the is_original tag |
37 | + sync_vals.update({'is_original': vals['is_original']}) |
38 | + if sync_vals: |
39 | + return super(hq_entries, self).write(cr, uid, ids, sync_vals, context) |
40 | return True |
41 | |
42 | if 'account_id' in vals: |
43 | @@ -480,6 +487,7 @@ |
44 | |
45 | def unlink(self, cr, uid, ids, context=None): |
46 | """ |
47 | + At synchro. only delete the entries having the tag is_split (= sync of an unsplit done in coordo). Otherwise: |
48 | Do not permit user to delete: |
49 | - validated HQ entries |
50 | - split entries |
51 | @@ -487,10 +495,18 @@ |
52 | """ |
53 | if isinstance(ids, (int, long)): |
54 | ids = [ids] |
55 | + if context is None: |
56 | + context = {} |
57 | + if context.get('sync_update_execution', False): |
58 | + new_ids = [] |
59 | + for hq_entry in self.browse(cr, uid, ids, fields_to_fetch=['is_split'], context=context): |
60 | + if hq_entry.is_split: |
61 | + new_ids.append(hq_entry.id) |
62 | + ids = new_ids |
63 | if not context.get('from', False) or context.get('from') != 'code' and ids: |
64 | if self.search(cr, uid, [('id', 'in', ids), ('user_validated', '=', True)]): |
65 | raise osv.except_osv(_('Error'), _('You cannot delete validated HQ Entries lines!')) |
66 | - if self.search(cr, uid, [('id', 'in', ids), ('is_split', '=', True)]): |
67 | + if self.search(cr, uid, [('id', 'in', ids), ('is_split', '=', True)]) and not context.get('sync_update_execution'): |
68 | raise osv.except_osv(_('Error'), _('You cannot delete split entries!')) |
69 | if self.search(cr, uid, [('id', 'in', ids), ('is_original', '=', True)]): |
70 | raise osv.except_osv(_('Error'), _('You cannot delete original entries!')) |
71 | |
72 | === modified file 'bin/addons/account_hq_entries/wizard/hq_entries_unsplit.py' |
73 | --- bin/addons/account_hq_entries/wizard/hq_entries_unsplit.py 2015-05-27 15:54:46 +0000 |
74 | +++ bin/addons/account_hq_entries/wizard/hq_entries_unsplit.py 2018-04-17 09:11:48 +0000 |
75 | @@ -61,10 +61,10 @@ |
76 | split_ids = [] |
77 | original_ids = [] |
78 | for line in wiz.process_ids: |
79 | - if line.is_original: |
80 | + if line.is_original and line.split_ids: |
81 | original_ids.append(line.id) |
82 | continue |
83 | - if line.is_split: |
84 | + if line.is_split and line.original_id: |
85 | split_ids.append(line.id) |
86 | continue |
87 | # Process |
88 | |
89 | === modified file 'bin/addons/account_hq_entries/wizard/hq_entries_validation.py' |
90 | --- bin/addons/account_hq_entries/wizard/hq_entries_validation.py 2018-04-03 10:18:51 +0000 |
91 | +++ bin/addons/account_hq_entries/wizard/hq_entries_validation.py 2018-04-17 09:11:48 +0000 |
92 | @@ -88,19 +88,21 @@ |
93 | return view |
94 | |
95 | # UTP-1101: Extract the method to create AD for being called also for the REV move |
96 | - def create_distribution_id(self, cr, uid, currency_id, line, account): |
97 | + def create_distribution_id(self, cr, uid, currency_id, line, account, split=False): |
98 | current_date = strftime('%Y-%m-%d') |
99 | line_cc_first = line.cost_center_id_first_value and line.cost_center_id_first_value.id or False |
100 | line_cc_id = line.cost_center_id and line.cost_center_id.id or False |
101 | line_account_first = line.account_id_first_value and line.account_id_first_value.id or False |
102 | |
103 | - cc_id = line_cc_first or line_cc_id or False |
104 | + # if split is True the line is a split line: use the current values instead of the original ones |
105 | + cc_id = (not split and line_cc_first) or line_cc_id or False |
106 | fp_id = line.analytic_id and line.analytic_id.id or False |
107 | - if line_cc_id != line_cc_first or line_account_first != line.account_id.id: |
108 | + if not split and (line_cc_id != line_cc_first or line_account_first != line.account_id.id): |
109 | fp_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'analytic_distribution', 'analytic_account_msf_private_funds')[1] |
110 | f1_id = line.free_1_id and line.free_1_id.id or False |
111 | f2_id = line.free_2_id and line.free_2_id.id or False |
112 | - destination_id = line.destination_id_first_value and line.destination_id_first_value.id or account.default_destination_id and account.default_destination_id.id or False |
113 | + destination_id = (split and line.destination_id.id) or line.destination_id_first_value.id or \ |
114 | + (account.default_destination_id and account.default_destination_id.id) or False |
115 | distrib_id = self.pool.get('analytic.distribution').create(cr, uid, {}) |
116 | if distrib_id: |
117 | common_vals = {'distribution_id':distrib_id, |
118 | @@ -124,10 +126,12 @@ |
119 | return distrib_id |
120 | |
121 | def create_move(self, cr, uid, ids, period_id=False, currency_id=False, |
122 | - date=None, journal=None, orig_acct=None, doc_date=None, context=None): |
123 | + date=None, journal=None, orig_acct=None, doc_date=None, split=False, context=None): |
124 | """ |
125 | Create a move with given hq entries lines |
126 | Return created lines (except counterpart lines) |
127 | + Note: if split is True, the lines handled are split lines => the account used is the last given by the user and |
128 | + not the account_id_first_value |
129 | """ |
130 | # Some verifications |
131 | if context is None: |
132 | @@ -173,9 +177,10 @@ |
133 | if not line.account_id_first_value: |
134 | raise osv.except_osv(_('Error'), _('An account is missing!')) |
135 | # create new distribution (only for expense accounts) |
136 | - distrib_id = self.create_distribution_id(cr, uid, currency_id, line, line.account_id_first_value) |
137 | + line_account = split and line.account_id or line.account_id_first_value |
138 | + distrib_id = self.create_distribution_id(cr, uid, currency_id, line, line_account, split=split) |
139 | vals = { |
140 | - 'account_id': line.account_id_first_value.id, |
141 | + 'account_id': line_account.id, |
142 | 'period_id': period_id, |
143 | 'journal_id': journal_id, |
144 | 'date': line.date, |
145 | @@ -261,10 +266,10 @@ |
146 | #+ original ones |
147 | #+ split ones |
148 | for line in lines: |
149 | - if line.is_original: |
150 | + if line.is_original and line.split_ids: |
151 | original_lines.add(line) |
152 | all_lines.add(line.id) |
153 | - elif line.is_split: |
154 | + elif line.is_split and line.original_id: |
155 | original_lines.add(line.original_id) |
156 | all_lines.add(line.original_id.id) |
157 | # Create the original line as it is (and its reverse) |
158 | @@ -284,7 +289,9 @@ |
159 | aml_obj.write(cr, uid, original_move.id, {'corrected': True, 'have_an_historic': True} , context=context) |
160 | original_account_id = original_move.account_id.id |
161 | |
162 | - new_res_move = self.create_move(cr, uid, [x.id for x in line.split_ids], line.period_id.id, line.currency_id.id, date=line.date, doc_date=line.document_date, journal=od_journal_id, orig_acct=original_account_id) |
163 | + new_res_move = self.create_move(cr, uid, [x.id for x in line.split_ids], line.period_id.id, |
164 | + line.currency_id.id, date=line.date, doc_date=line.document_date, |
165 | + journal=od_journal_id, orig_acct=original_account_id, split=True, context=context) |
166 | # original move line |
167 | original_ml_result = res_move[line.id] |
168 | # Mark new journal items as corrections for the first one |
169 | @@ -434,7 +441,7 @@ |
170 | self.write(cr, uid, [wiz.id], {'running': False}) |
171 | raise osv.except_osv(_('Warning'), _('Invalid analytic distribution!')) |
172 | # UTP-760: Do other modifications for split lines |
173 | - if line.is_original or line.is_split: |
174 | + if (line.is_original and line.split_ids) or (line.is_split and line.original_id): |
175 | split_change.append(line) |
176 | continue |
177 | if not line.user_validated: |
178 | |
179 | === modified file 'bin/addons/msf_sync_data_server/data/sync_server.sync_rule.csv' |
180 | --- bin/addons/msf_sync_data_server/data/sync_server.sync_rule.csv 2018-03-15 14:46:24 +0000 |
181 | +++ bin/addons/msf_sync_data_server/data/sync_server.sync_rule.csv 2018-04-17 09:11:48 +0000 |
182 | @@ -38,7 +38,8 @@ |
183 | msf_sync_data_server.fp_distribution_line_intermission,FALSE,TRUE,FALSE,FALSE,bidirectional,Bidirectional,"[('cost_center_id' , 'in' , ('account.analytic.account' , 'id' , [('code','=','cc-intermission')]))]","['amount', 'analytic_id/id', 'cost_center_id/id', 'currency_id/id', 'date', 'destination_id/id', 'distribution_id/id', 'name', 'percentage', 'source_date']",COORDINATIONS,funding.pool.distribution.line,,FP Distribution Line - Intermission ,Valid,,211 |
184 | msf_sync_data_server.free_1_distribution_line,TRUE,TRUE,TRUE,FALSE,bidirectional,Bidirectional,[],"['amount', 'analytic_id/id', 'currency_id/id', 'date', 'destination_id/id', 'distribution_id/id', 'name', 'percentage', 'source_date']",HQ + MISSION,free.1.distribution.line,,Free 1 Distribution Line,Valid,,220 |
185 | msf_sync_data_server.free_2_distribution_line,TRUE,TRUE,TRUE,FALSE,bidirectional,Bidirectional,[],"['amount', 'analytic_id/id', 'currency_id/id', 'date', 'destination_id/id', 'distribution_id/id', 'name', 'percentage', 'source_date']",HQ + MISSION,free.2.distribution.line,,Free 2 Distribution Line,Valid,,221 |
186 | -msf_sync_data_server.hq_entries,TRUE,TRUE,FALSE,FALSE,bidirectional,Bidirectional-Private,[],"['account_id/id', 'account_id_first_value/id', 'amount', 'analytic_id/id', 'analytic_id_first_value/id', 'analytic_state', 'cost_center_id/id', 'cost_center_id_first_value/id', 'currency_id/id', 'date', 'destination_id/id', 'destination_id_first_value/id', 'document_date', 'free_1_id/id', 'free_2_id/id', 'name', 'partner_txt', 'period_id/id', 'ref', 'user_validated']",OC,hq.entries,cost_center_id,HQ Entries,Valid,,250 |
187 | +msf_sync_data_server.hq_entries,TRUE,TRUE,FALSE,FALSE,bidirectional,Bidirectional-Private,[],"['account_id/id', 'account_id_first_value/id', 'amount', 'analytic_id/id', 'analytic_id_first_value/id', 'analytic_state', 'cost_center_id/id', 'cost_center_id_first_value/id', 'currency_id/id', 'date', 'destination_id/id', 'destination_id_first_value/id', 'document_date', 'free_1_id/id', 'free_2_id/id', 'name', 'partner_txt', 'period_id/id', 'ref', 'user_validated', 'is_split', 'is_original']",OC,hq.entries,cost_center_id,HQ Entries,Valid,,250 |
188 | +msf_sync_data_server.hq_entries_deletion,TRUE,TRUE,TRUE,FALSE,bidirectional,Up,"[('id', '=', 0)]","['name']",OC,hq.entries,,HQ Entries deletion,Valid,,251 |
189 | msf_sync_data_server.gl_journal_non_registers,TRUE,TRUE,FALSE,TRUE,bidirectional,Up,"[('type', '!=', 'cash'),('type', '!=', 'cheque'),('type', '!=', 'bank')]","['bank_journal_id/id','analytic_journal_id/id', 'code', 'currency/id', 'default_credit_account_id/id', 'default_debit_account_id/id', 'name', 'type','instance_id/id']",OC,account.journal,,GL Journal (Non Registers),Valid,,260 |
190 | msf_sync_data_server.gl_journals_registers_hq_bank,FALSE,TRUE,FALSE,TRUE,bidirectional,Down,"[('type', '=', 'bank'),('instance_id.level', '=', 'section')]","['bank_journal_id/id','analytic_journal_id/id', 'code', 'currency/id', 'default_credit_account_id/id', 'default_debit_account_id/id', 'name', 'type','instance_id/id']",OC,account.journal,,GL Journals – Bank Registers (HQ),Valid,,261 |
191 | msf_sync_data_server.gl_journals_registers_coordo_bank,FALSE,TRUE,FALSE,TRUE,bidirectional,Down,"[('type', '=', 'bank'),('instance_id.level', '=', 'coordo')]","['bank_journal_id/id','analytic_journal_id/id', 'code', 'currency/id', 'default_credit_account_id/id', 'default_debit_account_id/id', 'name', 'type','instance_id/id']",MISSION,account.journal,,GL Journals – Bank Registers (Coordo),Valid,,262 |