Merge lp:~unifield-team/unifield-wm/uf_1699_auto_creation_intermission_partner_coordo into lp:unifield-wm
- uf_1699_auto_creation_intermission_partner_coordo
- Merge into trunk
Proposed by
jftempo
Status: | Needs review |
---|---|
Proposed branch: | lp:~unifield-team/unifield-wm/uf_1699_auto_creation_intermission_partner_coordo |
Merge into: | lp:unifield-wm |
Diff against target: |
27810 lines (+13466/-6284) (has conflicts) 108 files modified
account_override/account.py (+56/-8) analytic_distribution/account_bank_statement.py (+6/-4) analytic_distribution/account_move_line.py (+66/-35) analytic_distribution/analytic_account.py (+45/-0) analytic_distribution/analytic_distribution.py (+8/-8) analytic_distribution/wizard/analytic_distribution_wizard.py (+1/-1) analytic_distribution_invoice/invoice.py (+1/-1) delivery_mechanism/delivery_mechanism.py (+487/-400) delivery_mechanism/wizard/change_product_memory_move_view.xml (+3/-3) import_data/import_data.py (+105/-26) kit/kit_creation.py (+0/-3) msf_audittrail/audittrail.py (+249/-625) msf_budget/__openerp__.py (+2/-0) msf_budget/analytic.py (+1/-2) msf_budget/msf_budget.py (+250/-75) msf_budget/msf_budget_line.py (+346/-347) msf_budget/msf_budget_report.xml (+7/-6) msf_budget/msf_budget_sequence.xml (+18/-0) msf_budget/msf_budget_tools.py (+225/-72) msf_budget/msf_budget_view.xml (+17/-35) msf_budget/msf_budget_workflow.xml (+2/-2) msf_budget/report/__init__.py (+0/-1) msf_budget/report/budget_criteria_xls.mako (+520/-615) msf_budget/report/budget_monthly_xls.mako (+187/-188) msf_budget/report/budget_summary.rml (+6/-6) msf_budget/report/monthly_budget.rml (+16/-16) msf_budget/report/msf_budget_summary.py (+52/-32) msf_budget/report/report_budget_actual.py (+20/-21) msf_budget/report/report_budget_criteria.py (+202/-133) msf_budget/report/report_csv_budget_monthly.py (+0/-94) msf_budget/report/report_local_expenses.py (+8/-9) msf_budget/report/report_pdf_budget_monthly.py (+0/-78) msf_budget/report/report_pdf_budget_summary.py (+12/-35) msf_budget/wizard/__init__.py (+1/-0) msf_budget/wizard/wizard_budget_criteria_export.py (+21/-9) msf_budget/wizard/wizard_budget_criteria_export_view.xml (+21/-1) msf_budget/wizard/wizard_budget_import.py (+227/-213) msf_budget/wizard/wizard_budget_import_finish.py (+1/-1) msf_budget/wizard/wizard_budget_summary.py (+58/-0) msf_budget/wizard/wizard_budget_summary.xml (+41/-0) msf_cross_docking/cross_docking.py (+9/-11) msf_currency_revaluation/res_currency.py (+3/-5) msf_doc_import/purchase_order.py (+9/-10) msf_doc_import/wizard/stock_partial_picking.py (+21/-21) msf_doc_import/wizard/wizard_in_simulation_screen.py (+28/-29) msf_field_access_rights/osv_override.py (+18/-10) msf_instance/add_instance.py (+28/-28) msf_instance/msf_instance.py (+14/-8) msf_order_date/order_dates.py (+0/-13) msf_outgoing/__openerp__.py (+12/-1) msf_outgoing/msf_outgoing.py (+1735/-1254) msf_outgoing/msf_outgoing_view.xml (+2/-2) msf_outgoing/wizard/__init__.py (+14/-1) msf_outgoing/wizard/change_product_move_processor.py (+126/-0) msf_outgoing/wizard/create_picking_processor.py (+390/-130) msf_outgoing/wizard/create_picking_processor_view.xml (+96/-1) msf_outgoing/wizard/incoming_shipment_processor.py (+543/-0) msf_outgoing/wizard/incoming_shipment_processor_view.xml (+138/-0) msf_outgoing/wizard/internal_move_processor.py (+581/-0) msf_outgoing/wizard/internal_move_processor_view.xml (+121/-0) msf_outgoing/wizard/outgoing_delivery_processor.py (+372/-0) msf_outgoing/wizard/outgoing_delivery_processor_view.xml (+102/-0) msf_outgoing/wizard/picking_processor.py (+997/-0) msf_outgoing/wizard/picking_processor_view.xml (+101/-0) msf_outgoing/wizard/ppl_processor.py (+696/-0) msf_outgoing/wizard/ppl_processor_view.xml (+122/-0) msf_outgoing/wizard/return_pack_shipment_processor.py (+263/-0) msf_outgoing/wizard/return_pack_shipment_processor_view.xml (+53/-0) msf_outgoing/wizard/return_ppl_processor.py (+319/-0) msf_outgoing/wizard/return_ppl_processor_view.xml (+74/-0) msf_outgoing/wizard/return_shipment_processor.py (+126/-0) msf_outgoing/wizard/return_shipment_processor_view.xml (+54/-0) msf_outgoing/wizard/shipment.py (+4/-2) msf_outgoing/wizard/shipment_processor.py (+423/-0) msf_outgoing/wizard/shipment_processor_view.xml (+75/-0) msf_outgoing/wizard/split_memory_move.py (+4/-4) msf_outgoing/wizard/split_move_processor.py (+139/-0) msf_outgoing/wizard/split_move_processor_view.xml (+23/-0) msf_outgoing/wizard/validate_picking_processor.py (+380/-0) msf_outgoing/wizard/validate_picking_processor_view.xml (+101/-0) msf_profile/msf_profile.py (+24/-3) order_nomenclature/order_nomenclature.py (+2/-2) order_types/stock.py (+61/-53) procurement_request/procurement_request.py (+2/-2) product_asset/product_asset.py (+0/-31) product_attributes/product_attributes.py (+0/-14) product_list/product_list.py (+1/-33) product_nomenclature/product_nomenclature.py (+271/-141) purchase_override/purchase.py (+182/-128) reason_types_moves/reason_type.py (+69/-112) register_accounting/account_bank_statement.py (+397/-399) register_accounting/account_move_line.py (+1/-1) register_accounting/invoice.py (+19/-18) register_accounting/wizard/direct_invoice.py (+6/-1) register_accounting/wizard/down_payment.py (+7/-6) register_accounting/wizard/hard_posting.py (+10/-7) register_accounting/wizard/temp_posting.py (+11/-8) register_accounting/wizard/wizard_register_import.py (+82/-40) res_currency_functional/account_move_compute_currency.py (+9/-9) return_claim/return_claim.py (+275/-238) return_claim/wizard/stock_partial_picking.py (+8/-115) specific_rules/specific_rules.py (+0/-36) specific_rules/wizard/stock_partial_picking.py (+3/-3) spreadsheet_xml/spreadsheet_xml_write.py (+4/-1) stock_override/stock.py (+379/-209) stock_override/wizard/__init__.py (+7/-7) stock_override/wizard/stock_partial_move.py (+1/-1) stock_override/wizard/stock_partial_picking.py (+31/-31) Text conflict in delivery_mechanism/delivery_mechanism.py Text conflict in register_accounting/account_bank_statement.py |
To merge this branch: | bzr merge lp:~unifield-team/unifield-wm/uf_1699_auto_creation_intermission_partner_coordo |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+212112@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'account_override/account.py' |
2 | --- account_override/account.py 2014-01-24 10:20:06 +0000 |
3 | +++ account_override/account.py 2014-03-21 08:49:45 +0000 |
4 | @@ -203,15 +203,18 @@ |
5 | company_account_active = company.additional_allocation |
6 | company_account = 7 # User for accounts that begins by "7" |
7 | # Prepare result |
8 | - for account in self.browse(cr, uid, ids, context=context): |
9 | - res[account.id] = False |
10 | - if account.user_type_code == 'expense': |
11 | - res[account.id] = True |
12 | - elif account.user_type_code == 'income': |
13 | + for account in self.read(cr, uid, ids, ['user_type_code', 'code'], context=context): |
14 | + account_id = account.get('id', False) |
15 | + user_type = account.get('user_type_code', False) |
16 | + code = account.get('code') |
17 | + res[account_id] = False |
18 | + if user_type == 'expense': |
19 | + res[account_id] = True |
20 | + elif user_type == 'income': |
21 | if not company_account_active: |
22 | - res[account.id] = True |
23 | - elif company_account_active and account.code.startswith(str(company_account)): |
24 | - res[account.id] = True |
25 | + res[account_id] = True |
26 | + elif company_account_active and code.startswith(str(company_account)): |
27 | + res[account_id] = True |
28 | return res |
29 | |
30 | def _search_is_analytic_addicted(self, cr, uid, ids, field_name, args, context=None): |
31 | @@ -350,6 +353,51 @@ |
32 | res.append((record['id'], name)) |
33 | return res |
34 | |
35 | + def _get_parent_of(self, cr, uid, ids, limit=10, context=None): |
36 | + """ |
37 | + Get all parents from the given accounts. |
38 | + To avoid problem of recursion, set a limit from 1 to 10. |
39 | + """ |
40 | + # Some checks |
41 | + if context is None: |
42 | + context = {} |
43 | + if not ids: |
44 | + return [] |
45 | + if isinstance(ids, (int, long)): |
46 | + ids = [ids] |
47 | + if limit < 1 or limit > 10: |
48 | + raise osv.except_osv(_('Error'), _("You're only allowed to use a limit between 1 and 10.")) |
49 | + # Prepare some values |
50 | + account_ids = list(ids) |
51 | + sql = """ |
52 | + SELECT parent_id |
53 | + FROM account_account |
54 | + WHERE id IN %s |
55 | + AND parent_id IS NOT NULL |
56 | + GROUP BY parent_id""" |
57 | + cr.execute(sql, (tuple(ids),)) |
58 | + if not cr.rowcount: |
59 | + return account_ids |
60 | + parent_ids = [x[0] for x in cr.fetchall()] |
61 | + account_ids += parent_ids |
62 | + stop = 1 |
63 | + while parent_ids: |
64 | + # Stop the search if we reach limit |
65 | + if stop >= limit: |
66 | + break |
67 | + stop += 1 |
68 | + cr.execute(sql, (tuple(parent_ids),)) |
69 | + if not cr.rowcount: |
70 | + parent_ids = False |
71 | + tmp_res = cr.fetchall() |
72 | + tmp_ids = [x[0] for x in tmp_res] |
73 | + if None in tmp_ids: |
74 | + parent_ids = False |
75 | + else: |
76 | + parent_ids = list(tmp_ids) |
77 | + account_ids += tmp_ids |
78 | + return account_ids |
79 | + |
80 | account_account() |
81 | |
82 | class account_journal(osv.osv): |
83 | |
84 | === modified file 'analytic_distribution/account_bank_statement.py' |
85 | --- analytic_distribution/account_bank_statement.py 2013-12-06 10:54:04 +0000 |
86 | +++ analytic_distribution/account_bank_statement.py 2014-03-21 08:49:45 +0000 |
87 | @@ -59,11 +59,13 @@ |
88 | # Prepare some values |
89 | res = {} |
90 | # Browse all given lines |
91 | - for line in self.browse(cr, uid, ids, context=context): |
92 | - if not line.analytic_distribution_id: |
93 | - res[line.id] = 'none' |
94 | + for line in self.read(cr, uid, ids, ['analytic_distribution_id', 'account_id'], context=context): |
95 | + if not line.get('analytic_distribution_id', False): |
96 | + res[line.get('id')] = 'none' |
97 | continue |
98 | - res[line.id] = self.pool.get('analytic.distribution')._get_distribution_state(cr, uid, line.analytic_distribution_id.id, False, line.account_id.id) |
99 | + distribution_id = line.get('analytic_distribution_id')[0] |
100 | + account_id = line.get('account_id', [False])[0] |
101 | + res[line.get('id')] = self.pool.get('analytic.distribution')._get_distribution_state(cr, uid, distribution_id, False, account_id) |
102 | return res |
103 | |
104 | _columns = { |
105 | |
106 | === modified file 'analytic_distribution/account_move_line.py' |
107 | --- analytic_distribution/account_move_line.py 2013-12-10 09:13:08 +0000 |
108 | +++ analytic_distribution/account_move_line.py 2014-03-21 08:49:45 +0000 |
109 | @@ -55,9 +55,16 @@ |
110 | ids = [ids] |
111 | # Prepare some values |
112 | res = {} |
113 | - # Browse all given lines |
114 | - for line in self.browse(cr, uid, ids, context=context): |
115 | - res[line.id] = self.pool.get('analytic.distribution')._get_distribution_state(cr, uid, line.analytic_distribution_id.id, line.move_id and line.move_id.analytic_distribution_id and line.move_id.analytic_distribution_id.id or False, line.account_id.id) |
116 | + distrib_obj = self.pool.get('analytic.distribution') |
117 | + sql = """ |
118 | + SELECT aml.id, aml.analytic_distribution_id AS distrib_id, m.analytic_distribution_id AS move_distrib_id, aml.account_id |
119 | + FROM account_move_line AS aml, account_move AS m |
120 | + WHERE aml.move_id = m.id |
121 | + AND aml.id IN %s |
122 | + ORDER BY aml.id;""" |
123 | + cr.execute(sql, (tuple(ids),)) |
124 | + for line in cr.fetchall(): |
125 | + res[line[0]] = distrib_obj._get_distribution_state(cr, uid, line[1], line[2], line[3]) |
126 | return res |
127 | |
128 | def _have_analytic_distribution_from_header(self, cr, uid, ids, name, arg, context=None): |
129 | @@ -119,52 +126,69 @@ |
130 | context = {} |
131 | acc_ana_line_obj = self.pool.get('account.analytic.line') |
132 | company_currency = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id |
133 | - for obj_line in self.browse(cr, uid, ids, context=context): |
134 | + obj_fields = [ |
135 | + 'debit_currency', |
136 | + 'credit_currency', |
137 | + 'analytic_distribution_id', |
138 | + 'move_id', |
139 | + 'state', |
140 | + 'journal_id', |
141 | + 'source_date', |
142 | + 'date', |
143 | + 'document_date', |
144 | + 'name', |
145 | + 'ref', |
146 | + 'currency_id', |
147 | + 'corrected_line_id', |
148 | + 'is_write_off', |
149 | + 'account_id', |
150 | + ] |
151 | + for obj_line in self.read(cr, uid, ids, obj_fields, context=context): |
152 | # Prepare some values |
153 | - amount = obj_line.debit_currency - obj_line.credit_currency |
154 | - line_distrib_id = obj_line.analytic_distribution_id and obj_line.analytic_distribution_id.id or obj_line.move_id and obj_line.move_id.analytic_distribution_id and obj_line.move_id.analytic_distribution_id.id or False |
155 | + amount = obj_line.get('debit_currency', 0.0) - obj_line.get('credit_currency', 0.0) |
156 | + journal = self.pool.get('account.journal').read(cr, uid, obj_line.get('journal_id', [False])[0], ['analytic_journal_id', 'name'], context=context) |
157 | + move = self.pool.get('account.move').read(cr, uid, obj_line.get('move_id', [False])[0], ['analytic_distribution_id', 'status', 'line_id'], context=context) |
158 | + account = self.pool.get('account.account').read(cr, uid, obj_line.get('account_id', [False])[0], ['is_analytic_addicted'], context=context) |
159 | + line_distrib_id = (obj_line.get('analytic_distribution_id', False) and obj_line.get('analytic_distribution_id')[0]) or (move.get('analytic_distribution_id', False) and move.get('analytic_distribution_id')[0]) or False |
160 | # When you create a journal entry manually, we should not have analytic lines if ONE line is invalid! |
161 | other_lines_are_ok = True |
162 | - if obj_line.move_id and obj_line.move_id.status and obj_line.move_id.status == 'manu': |
163 | - if obj_line.state != 'valid': |
164 | - other_lines_are_ok = False |
165 | - for other_line in obj_line.move_id.line_id: |
166 | - if other_line.state != 'valid': |
167 | - other_lines_are_ok = False |
168 | + result = self.search(cr, uid, [('move_id', '=', move.get('status', False)), ('move_id.status', '=', 'manu'), ('state', '!=', 'valid')], count=1) |
169 | + if result and result > 0 and move.get('status', False) == 'manu': |
170 | + other_lines_are_ok = False |
171 | # Check that line have analytic-a-holic account and have a distribution |
172 | - if line_distrib_id and obj_line.account_id.is_analytic_addicted and other_lines_are_ok: |
173 | - ana_state = self.pool.get('analytic.distribution')._get_distribution_state(cr, uid, line_distrib_id, {}, obj_line.account_id.id) |
174 | + if line_distrib_id and account.get('is_analytic_addicted', False) and other_lines_are_ok: |
175 | + ana_state = self.pool.get('analytic.distribution')._get_distribution_state(cr, uid, line_distrib_id, {}, account.get('id')) |
176 | # For manual journal entries, do not raise an error. But delete all analytic distribution linked to other_lines because if one line is invalid, all lines should not create analytic lines |
177 | - if ana_state == 'invalid' and obj_line.move_id.status == 'manu': |
178 | - ana_line_ids = acc_ana_line_obj.search(cr, uid, [('move_id', 'in', [x.id for x in obj_line.move_id.line_id])]) |
179 | + if ana_state == 'invalid' and move.get('status', '') == 'manu': |
180 | + ana_line_ids = acc_ana_line_obj.search(cr, uid, [('move_id', 'in', move.get('line_id', []))]) |
181 | acc_ana_line_obj.unlink(cr, uid, ana_line_ids) |
182 | continue |
183 | elif ana_state == 'invalid': |
184 | raise osv.except_osv(_('Warning'), _('Invalid analytic distribution.')) |
185 | - if not obj_line.journal_id.analytic_journal_id: |
186 | - raise osv.except_osv(_('Warning'),_("No Analytic Journal! You have to define an analytic journal on the '%s' journal!") % (obj_line.journal_id.name, )) |
187 | + if not journal.get('analytic_journal_id', False): |
188 | + raise osv.except_osv(_('Warning'),_("No Analytic Journal! You have to define an analytic journal on the '%s' journal!") % (journal.get('name', ''), )) |
189 | distrib_obj = self.pool.get('analytic.distribution').browse(cr, uid, line_distrib_id, context=context) |
190 | # create lines |
191 | for distrib_lines in [distrib_obj.funding_pool_lines, distrib_obj.free_1_lines, distrib_obj.free_2_lines]: |
192 | for distrib_line in distrib_lines: |
193 | - context.update({'date': obj_line.source_date or obj_line.date}) |
194 | + context.update({'date': obj_line.get('source_date', False) or obj_line.get('date', False)}) |
195 | anal_amount = distrib_line.percentage*amount/100 |
196 | line_vals = { |
197 | - 'name': obj_line.name, |
198 | - 'date': obj_line.date, |
199 | - 'ref': obj_line.ref, |
200 | - 'journal_id': obj_line.journal_id.analytic_journal_id.id, |
201 | - 'amount': -1 * self.pool.get('res.currency').compute(cr, uid, obj_line.currency_id.id, company_currency, |
202 | + 'name': obj_line.get('name', ''), |
203 | + 'date': obj_line.get('date', False), |
204 | + 'ref': obj_line.get('ref', ''), |
205 | + 'journal_id': journal.get('analytic_journal_id', [False])[0], |
206 | + 'amount': -1 * self.pool.get('res.currency').compute(cr, uid, obj_line.get('currency_id', [False])[0], company_currency, |
207 | anal_amount, round=False, context=context), |
208 | 'amount_currency': -1 * anal_amount, |
209 | 'account_id': distrib_line.analytic_id.id, |
210 | - 'general_account_id': obj_line.account_id.id, |
211 | - 'move_id': obj_line.id, |
212 | + 'general_account_id': account.get('id'), |
213 | + 'move_id': obj_line.get('id'), |
214 | 'distribution_id': distrib_obj.id, |
215 | 'user_id': uid, |
216 | - 'currency_id': obj_line.currency_id.id, |
217 | + 'currency_id': obj_line.get('currency_id', [False])[0], |
218 | 'distrib_line_id': '%s,%s'%(distrib_line._name, distrib_line.id), |
219 | - 'document_date': obj_line.document_date, |
220 | + 'document_date': obj_line.get('document_date', False), |
221 | } |
222 | # Update values if we come from a funding pool |
223 | if distrib_line._name == 'funding.pool.distribution.line': |
224 | @@ -172,11 +196,11 @@ |
225 | line_vals.update({'cost_center_id': distrib_line.cost_center_id and distrib_line.cost_center_id.id or False, |
226 | 'destination_id': destination_id,}) |
227 | # Update value if we come from a write-off |
228 | - if obj_line.is_write_off: |
229 | + if obj_line.get('is_write_off', False): |
230 | line_vals.update({'from_write_off': True,}) |
231 | # Add source_date value for account_move_line that are a correction of another account_move_line |
232 | - if obj_line.corrected_line_id and obj_line.source_date: |
233 | - line_vals.update({'source_date': obj_line.source_date}) |
234 | + if obj_line.get('corrected_line_id', False) and obj_line.get('source_date', False): |
235 | + line_vals.update({'source_date': obj_line.get('source_date', False)}) |
236 | self.pool.get('account.analytic.line').create(cr, uid, line_vals, context=context) |
237 | return True |
238 | |
239 | @@ -187,11 +211,18 @@ |
240 | """ |
241 | if not context: |
242 | context = {} |
243 | - # Search moves |
244 | + # Search manual moves to revalidate |
245 | move_ids = [] |
246 | - for ml in self.browse(cr, uid, ids): |
247 | - if ml.move_id and ml.move_id.state == 'manu': |
248 | - move_ids.append(ml.move_id.id) |
249 | + sql = """ |
250 | + SELECT m.id |
251 | + FROM account_move_line AS ml, account_move AS m |
252 | + WHERE ml.move_id = m.id |
253 | + AND m.status = 'manu' |
254 | + AND ml.id IN %s |
255 | + GROUP BY m.id |
256 | + ORDER BY m.id;""" |
257 | + cr.execute(sql, (tuple(ids),)) |
258 | + move_ids += cr.fetchall() |
259 | # Search analytic lines |
260 | ana_ids = self.pool.get('account.analytic.line').search(cr, uid, [('move_id', 'in', ids)]) |
261 | self.pool.get('account.analytic.line').unlink(cr, uid, ana_ids) |
262 | |
263 | === modified file 'analytic_distribution/analytic_account.py' |
264 | --- analytic_distribution/analytic_account.py 2014-01-14 17:29:00 +0000 |
265 | +++ analytic_distribution/analytic_account.py 2014-03-21 08:49:45 +0000 |
266 | @@ -190,6 +190,51 @@ |
267 | res.update(tmp_res) |
268 | return res |
269 | |
270 | + def _get_parent_of(self, cr, uid, ids, limit=10, context=None): |
271 | + """ |
272 | + Get all parents from the given accounts. |
273 | + To avoid problem of recursion, set a limit from 1 to 10. |
274 | + """ |
275 | + # Some checks |
276 | + if context is None: |
277 | + context = {} |
278 | + if not ids: |
279 | + return [] |
280 | + if isinstance(ids, (int, long)): |
281 | + ids = [ids] |
282 | + if limit < 1 or limit > 10: |
283 | + raise osv.except_osv(_('Error'), _("You're only allowed to use a limit between 1 and 10.")) |
284 | + # Prepare some values |
285 | + account_ids = list(ids) |
286 | + sql = """ |
287 | + SELECT parent_id |
288 | + FROM account_analytic_account |
289 | + WHERE id IN %s |
290 | + AND parent_id IS NOT NULL |
291 | + GROUP BY parent_id""" |
292 | + cr.execute(sql, (tuple(ids),)) |
293 | + if not cr.rowcount: |
294 | + return account_ids |
295 | + parent_ids = [x[0] for x in cr.fetchall()] |
296 | + account_ids += parent_ids |
297 | + stop = 1 |
298 | + while parent_ids: |
299 | + # Stop the search if we reach limit |
300 | + if stop >= limit: |
301 | + break |
302 | + stop += 1 |
303 | + cr.execute(sql, (tuple(parent_ids),)) |
304 | + if not cr.rowcount: |
305 | + parent_ids = False |
306 | + tmp_res = cr.fetchall() |
307 | + tmp_ids = [x[0] for x in tmp_res] |
308 | + if None in tmp_ids: |
309 | + parent_ids = False |
310 | + else: |
311 | + parent_ids = list(tmp_ids) |
312 | + account_ids += tmp_ids |
313 | + return account_ids |
314 | + |
315 | _columns = { |
316 | 'name': fields.char('Name', size=128, required=True, translate=1), |
317 | 'code': fields.char('Code', size=24), |
318 | |
319 | === modified file 'analytic_distribution/analytic_distribution.py' |
320 | --- analytic_distribution/analytic_distribution.py 2014-01-13 13:44:46 +0000 |
321 | +++ analytic_distribution/analytic_distribution.py 2014-03-21 08:49:45 +0000 |
322 | @@ -65,8 +65,8 @@ |
323 | # Have an analytic distribution on another account than analytic-a-holic account make no sense. So their analytic distribution is valid |
324 | logger = netsvc.Logger() |
325 | if account_id: |
326 | - account = self.pool.get('account.account').browse(cr, uid, account_id) |
327 | - if account and not account.is_analytic_addicted: |
328 | + account = self.pool.get('account.account').read(cr, uid, account_id, ['is_analytic_addicted']) |
329 | + if account and not account.get('is_analytic_addicted', False): |
330 | return 'valid' |
331 | if not id: |
332 | if parent_id: |
333 | @@ -80,10 +80,10 @@ |
334 | 'analytic_account_msf_private_funds')[1] |
335 | except ValueError: |
336 | fp_id = 0 |
337 | - account = self.pool.get('account.account').browse(cr, uid, account_id) |
338 | + account = self.pool.get('account.account').read(cr, uid, account_id, ['destination_ids']) |
339 | # Check Cost Center lines with destination/account link |
340 | for cc_line in distrib.cost_center_lines: |
341 | - if cc_line.destination_id.id not in [x.id for x in account.destination_ids]: |
342 | + if cc_line.destination_id.id not in account.get('destination_ids', []): |
343 | logger.notifyChannel("analytic distribution", netsvc.LOG_WARNING, _("%s: Error, destination not compatible with G/L account in CC lines") % (id or '')) |
344 | return 'invalid' |
345 | # Check Funding pool lines regarding: |
346 | @@ -91,7 +91,7 @@ |
347 | # - If analytic account is MSF Private funds |
348 | # - Cost center and funding pool compatibility |
349 | for fp_line in distrib.funding_pool_lines: |
350 | - if fp_line.destination_id.id not in [x.id for x in account.destination_ids]: |
351 | + if fp_line.destination_id.id not in account.get('destination_ids', []): |
352 | logger.notifyChannel("analytic distribution", netsvc.LOG_WARNING, _("%s: Error, destination not compatible with G/L account for FP lines") % (id or '')) |
353 | return 'invalid' |
354 | # If fp_line is MSF Private Fund, all is ok |
355 | @@ -276,11 +276,11 @@ |
356 | for dl_name in ['cost.center.distribution.line', 'funding.pool.distribution.line', 'free.1.distribution.line', 'free.2.distribution.line']: |
357 | dl_obj = self.pool.get(dl_name) |
358 | dl_ids = dl_obj.search(cr, uid, [('distribution_id', '=', distrib_id)], context=context) |
359 | - for dl in dl_obj.browse(cr, uid, dl_ids, context=context): |
360 | + for dl in dl_obj.read(cr, uid, dl_ids, ['percentage'], context=context): |
361 | dl_vals = { |
362 | - 'amount': round(dl.percentage * amount) / 100.0, |
363 | + 'amount': round(dl.get('percentage', False) * amount) / 100.0, |
364 | } |
365 | - dl_obj.write(cr, uid, [dl.id], dl_vals, context=context) |
366 | + dl_obj.write(cr, uid, [dl.get('id')], dl_vals, context=context) |
367 | return True |
368 | |
369 | def update_distribution_line_account(self, cr, uid, line_ids, account_id, context=None): |
370 | |
371 | === modified file 'analytic_distribution/wizard/analytic_distribution_wizard.py' |
372 | --- analytic_distribution/wizard/analytic_distribution_wizard.py 2014-02-06 16:53:32 +0000 |
373 | +++ analytic_distribution/wizard/analytic_distribution_wizard.py 2014-03-21 08:49:45 +0000 |
374 | @@ -1326,7 +1326,7 @@ |
375 | self.pool.get('account.commitment').create_analytic_lines(cr, uid, [wizard.commitment_id.id], context=context) |
376 | elif wizard.register_line_id and wizard.register_line_id.state == 'temp': |
377 | # Update analytic lines |
378 | - self.pool.get('account.bank.statement.line').update_analytic_lines(cr, uid, [wizard.register_line_id.id], distrib=distrib.id) |
379 | + self.pool.get('account.bank.statement.line').update_analytic_lines(cr, uid, [wizard.register_line_id], distrib=distrib.id) |
380 | return True |
381 | |
382 | analytic_distribution_wizard() |
383 | |
384 | === modified file 'analytic_distribution_invoice/invoice.py' |
385 | --- analytic_distribution_invoice/invoice.py 2014-01-16 10:50:16 +0000 |
386 | +++ analytic_distribution_invoice/invoice.py 2014-03-21 08:49:45 +0000 |
387 | @@ -328,7 +328,7 @@ |
388 | res = {} |
389 | for invl in self.browse(cr, uid, ids): |
390 | res[invl.id] = [] |
391 | - for ml in invl.move_lines or []: |
392 | + for ml in (invl.move_lines or []): |
393 | if ml.analytic_lines: |
394 | res[invl.id] = self.pool.get('account.analytic.line').get_corrections_history(cr, uid, [x.id for x in ml.analytic_lines]) |
395 | return res |
396 | |
397 | === modified file 'delivery_mechanism/delivery_mechanism.py' |
398 | --- delivery_mechanism/delivery_mechanism.py 2014-02-26 13:56:21 +0000 |
399 | +++ delivery_mechanism/delivery_mechanism.py 2014-03-21 08:49:45 +0000 |
400 | @@ -2,7 +2,7 @@ |
401 | ############################################################################## |
402 | # |
403 | # OpenERP, Open Source Management Solution |
404 | -# Copyright (C) 2011 TeMPO Consulting, MSF |
405 | +# Copyright (C) 2011 TeMPO Consulting, MSF |
406 | # |
407 | # This program is free software: you can redistribute it and/or modify |
408 | # it under the terms of the GNU Affero General Public License as |
409 | @@ -41,33 +41,9 @@ |
410 | } |
411 | _defaults = {'line_number': 0, |
412 | 'in_out_updated': False, |
413 | - 'original_qty_partial': -1} |
414 | + 'original_qty_partial':-1} |
415 | _order = 'line_number, date_expected desc, id' |
416 | - |
417 | - def create(self, cr, uid, vals, context=None): |
418 | - ''' |
419 | - add the corresponding line number |
420 | - |
421 | - if a corresponding purchase order line or sale order line exist |
422 | - we take the line number from there |
423 | - ''' |
424 | - # objects |
425 | - picking_obj = self.pool.get('stock.picking') |
426 | - seq_pool = self.pool.get('ir.sequence') |
427 | |
428 | - # line number correspondance to be checked with Magali |
429 | - if vals.get('picking_id', False): |
430 | - if not vals.get('line_number', False): |
431 | - # new number needed - gather the line number from the sequence |
432 | - sequence_id = picking_obj.read(cr, uid, [vals['picking_id']], ['move_sequence_id'], context=context)[0]['move_sequence_id'][0] |
433 | - line = seq_pool.get_id(cr, uid, sequence_id, code_or_id='id', context=context) |
434 | - # update values with line value |
435 | - vals.update({'line_number': line}) |
436 | - |
437 | - # create the new object |
438 | - result = super(stock_move, self).create(cr, uid, vals, context=context) |
439 | - return result |
440 | - |
441 | def copy_data(self, cr, uid, id, defaults=None, context=None): |
442 | ''' |
443 | If the line_number is not in the defaults, we set it to False. |
444 | @@ -78,7 +54,7 @@ |
445 | defaults = {} |
446 | if context is None: |
447 | context = {} |
448 | - |
449 | + |
450 | # we set line_number, so it will not be copied in copy_data - keepLineNumber - the original Line Number will be kept |
451 | if 'line_number' not in defaults and not context.get('keepLineNumber', False): |
452 | defaults.update({'line_number': False}) |
453 | @@ -91,7 +67,7 @@ |
454 | input_loc = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'msf_cross_docking', 'stock_location_input')[1] |
455 | defaults.update(location_dest_id=input_loc) |
456 | return super(stock_move, self).copy_data(cr, uid, id, defaults, context=context) |
457 | - |
458 | + |
459 | def unlink(self, cr, uid, ids, context=None): |
460 | ''' |
461 | check the numbering on deletion |
462 | @@ -101,37 +77,37 @@ |
463 | context = {} |
464 | if isinstance(ids, (int, long)): |
465 | ids = [ids] |
466 | - |
467 | + |
468 | # objects |
469 | tools_obj = self.pool.get('sequence.tools') |
470 | - |
471 | + |
472 | if not context.get('skipResequencing', False): |
473 | - # re sequencing only happen if purchase order is draft (behavior 1) |
474 | + # re sequencing only happen if purchase order is draft (behavior 1) |
475 | # get ids with corresponding po at draft state |
476 | draft_not_wkf_ids = self.allow_resequencing(cr, uid, ids, context=context) |
477 | tools_obj.reorder_sequence_number_from_unlink(cr, uid, draft_not_wkf_ids, 'stock.picking', 'move_sequence_id', 'stock.move', 'picking_id', 'line_number', context=context) |
478 | - |
479 | + |
480 | return super(stock_move, self).unlink(cr, uid, ids, context=context) |
481 | - |
482 | + |
483 | def allow_resequencing(self, cr, uid, ids, context=None): |
484 | ''' |
485 | define if a resequencing has to be performed or not |
486 | - |
487 | + |
488 | return the list of ids for which resequencing will can be performed |
489 | - |
490 | + |
491 | linked to Picking + Picking draft + not linked to Po/Fo |
492 | ''' |
493 | # objects |
494 | pick_obj = self.pool.get('stock.picking') |
495 | - |
496 | + |
497 | resequencing_ids = [x.id for x in self.browse(cr, uid, ids, context=context) if x.picking_id and pick_obj.allow_resequencing(cr, uid, x.picking_id, context=context)] |
498 | return resequencing_ids |
499 | - |
500 | + |
501 | def _create_chained_picking_move_values_hook(self, cr, uid, context=None, *args, **kwargs): |
502 | ''' |
503 | Please copy this to your module's method also. |
504 | This hook belongs to the action_process method from stock>stock.py>stock_picking |
505 | - |
506 | + |
507 | - set the line number of the original picking, could have used the keepLineNumber flag, but used hook to modify original class minimally |
508 | ''' |
509 | if context is None: |
510 | @@ -160,44 +136,44 @@ |
511 | def _do_partial_hook(self, cr, uid, ids, context, *args, **kwargs): |
512 | ''' |
513 | hook to update defaults data |
514 | - |
515 | + |
516 | - update the line number, keep original line number |
517 | >> performed for all cases (too few (copy - new numbering policy), complete (simple update - no impact), to many (simple update - no impact) |
518 | ''' |
519 | # variable parameters |
520 | move = kwargs.get('move', False) |
521 | assert move, 'delivery_mechanism.py >> stock_move: _do_partial_hook - missing move' |
522 | - |
523 | + |
524 | # calling super method |
525 | defaults = super(stock_move, self)._do_partial_hook(cr, uid, ids, context, *args, **kwargs) |
526 | assert defaults is not None, 'delivery_mechanism.py >> stock_move: _do_partial_hook - missing defaults' |
527 | # update the line number, copy original line_number value |
528 | defaults.update({'line_number': move.line_number}) |
529 | - |
530 | + |
531 | return defaults |
532 | - |
533 | + |
534 | def get_mirror_move(self, cr, uid, ids, data_back, context=None): |
535 | ''' |
536 | return a dictionary with IN for OUT and OUT for IN, if exists, False otherwise |
537 | - |
538 | + |
539 | only one mirror object should exist for each object (to check) |
540 | return objects which are not done |
541 | - |
542 | + |
543 | same sale_line_id/purchase_line_id - same product - same quantity |
544 | - |
545 | + |
546 | IN: move -> po line -> procurement -> so line -> move |
547 | OUT: move -> so line -> procurement -> po line -> move |
548 | - |
549 | + |
550 | I dont use move.move_dest_id because of back orders both on OUT and IN sides |
551 | ''' |
552 | if context is None: |
553 | context = {} |
554 | if isinstance(ids, (int, long)): |
555 | ids = [ids] |
556 | - |
557 | + |
558 | # objects |
559 | so_line_obj = self.pool.get('sale.order.line') |
560 | - |
561 | + |
562 | res = {} |
563 | for obj in self.browse(cr, uid, ids, context=context): |
564 | res[obj.id] = {'move_id': False, 'picking_id': False, 'picking_version': 0, 'quantity': 0, 'moves': []} |
565 | @@ -215,33 +191,50 @@ |
566 | if so_line_ids: |
567 | # find the corresponding OUT move |
568 | # move_ids = self.search(cr, uid, [('product_id', '=', obj.product_id.id), ('product_qty', '=', obj.product_qty), ('state', 'in', ('assigned', 'confirmed')), ('sale_line_id', '=', so_line_ids[0])], context=context) |
569 | - move_ids = self.search(cr, uid, [('product_id', '=', data_back['product_id']), |
570 | - ('state', 'in', ('assigned', 'confirmed')), |
571 | + move_ids = self.search(cr, uid, [('product_id', '=', data_back['product_id']), |
572 | + ('state', 'in', ('assigned', 'confirmed')), |
573 | ('sale_line_id', '=', so_line_ids[0]), |
574 | - ('in_out_updated', '=', False)], order="state desc", context=context) |
575 | + ('in_out_updated', '=', False), |
576 | + ('picking_id.type', '=', 'out'), |
577 | + ('processed_stock_move', '=', False), |
578 | + ], order="state desc", context=context) |
579 | # list of matching out moves |
580 | integrity_check = [] |
581 | for move in self.browse(cr, uid, move_ids, context=context): |
582 | # move from draft picking or standard picking |
583 | - if (move.product_qty != 0.00 and not move.processed_stock_move and move.picking_id.subtype == 'picking' and not move.picking_id.backorder_id and move.picking_id.state == 'draft') or (move.picking_id.subtype == 'standard') and move.picking_id.type == 'out': |
584 | + if (move.product_qty != 0.00 and move.picking_id.subtype == 'picking' and not move.picking_id.backorder_id and move.picking_id.state == 'draft') or (move.picking_id.subtype == 'standard'): |
585 | integrity_check.append(move) |
586 | # return the first one matching |
587 | if integrity_check: |
588 | if all([not move.processed_stock_move for move in integrity_check]): |
589 | # the out stock moves (draft picking or std out) have not yet been processed, we can therefore update them |
590 | - res[obj.id]['move_id'] = integrity_check[0].id |
591 | - res[obj.id]['moves'] = integrity_check |
592 | - res[obj.id]['picking_id'] = integrity_check[0].picking_id.id |
593 | - res[obj.id]['picking_version'] = integrity_check[0].picking_id.update_version_from_in_stock_picking |
594 | - res[obj.id]['quantity'] = integrity_check[0].product_qty |
595 | + res[obj.id].update({ |
596 | + 'move_id': integrity_check[0].id, |
597 | + 'moves': integrity_check, |
598 | + 'picking_id': integrity_check[0].picking_id.id, |
599 | + 'picking_version': integrity_check[0].picking_id.update_version_from_in_stock_picking, |
600 | + 'quantity': integrity_check[0].product_qty, |
601 | + }) |
602 | else: |
603 | # the corresponding OUT move have been processed completely or partially,, we do not update the OUT |
604 | - self.log(cr, uid, integrity_check[0].id, _('The Stock Move %s from %s has already been processed and is therefore not updated.')%(integrity_check[0].name, integrity_check[0].picking_id.name)) |
605 | - |
606 | + self.log(cr, uid, integrity_check[0].id, _('The Stock Move %s from %s has already been processed and is therefore not updated.') % (integrity_check[0].name, integrity_check[0].picking_id.name)) |
607 | + |
608 | else: |
609 | # we are looking for corresponding IN from on_order purchase order |
610 | assert False, 'This method is not implemented for OUT or Internal moves' |
611 | - |
612 | + |
613 | + return res |
614 | + |
615 | + def create_data_back(self, move): |
616 | + ''' |
617 | + build data_back dictionary |
618 | + ''' |
619 | + res = {'id': move.id, |
620 | + 'name': move.product_id.partner_ref, |
621 | + 'product_id': move.product_id.id, |
622 | + 'product_uom': move.product_uom.id, |
623 | + 'product_qty': move.product_qty, |
624 | + } |
625 | return res |
626 | |
627 | def hook__create_chained_picking(self, cr, uid, pick_values, picking): |
628 | @@ -251,7 +244,7 @@ |
629 | res['auto_picking'] = picking.type == 'in' and picking.move_lines[0]['direct_incoming'] |
630 | |
631 | return res |
632 | - |
633 | + |
634 | stock_move() |
635 | |
636 | |
637 | @@ -263,12 +256,12 @@ |
638 | _columns = {'move_sequence_id': fields.many2one('ir.sequence', string='Moves Sequence', help="This field contains the information related to the numbering of the moves of this picking.", required=True, ondelete='cascade'), |
639 | 'change_reason': fields.char(string='Change Reason', size=1024, readonly=True), |
640 | } |
641 | - |
642 | + |
643 | def _stock_picking_action_process_hook(self, cr, uid, ids, context=None, *args, **kwargs): |
644 | ''' |
645 | Please copy this to your module's method also. |
646 | This hook belongs to the action_process method from stock>stock.py>stock_picking |
647 | - |
648 | + |
649 | - allow to modify the data for wizard display |
650 | ''' |
651 | if context is None: |
652 | @@ -283,7 +276,7 @@ |
653 | model=res['res_model'], |
654 | step='default')) |
655 | return res |
656 | - |
657 | + |
658 | def create(self, cr, uid, vals, context=None): |
659 | ''' |
660 | create the sequence for the numbering of the lines |
661 | @@ -294,9 +287,9 @@ |
662 | seq_pool = self.pool.get('ir.sequence') |
663 | po_obj = self.pool.get('purchase.order') |
664 | so_obj = self.pool.get('sale.order') |
665 | - |
666 | + |
667 | new_seq_id = self.create_sequence(cr, uid, vals, context=context) |
668 | - vals.update({'move_sequence_id': new_seq_id,}) |
669 | + vals.update({'move_sequence_id': new_seq_id, }) |
670 | # if from order, we udpate the sequence to match the order's one |
671 | # line number correspondance to be checked with Magali |
672 | # I keep that code deactivated, as when the picking is wkf, hide_new_button must always be true |
673 | @@ -307,13 +300,13 @@ |
674 | elif vals.get('sale_id') and False: |
675 | seq_id = po_obj.read(cr, uid, [vals.get('sale_id')], ['sequence_id'], context=context)[0]['sequence_id'][0] |
676 | seq_value = seq_pool.read(cr, uid, [seq_id], ['number_next'], context=context)[0]['number_next'] |
677 | - |
678 | + |
679 | if seq_value: |
680 | # update sequence value of stock picking to match order's one |
681 | - seq_pool.write(cr, uid, [new_seq_id], {'number_next': seq_value,}) |
682 | - |
683 | + seq_pool.write(cr, uid, [new_seq_id], {'number_next': seq_value, }) |
684 | + |
685 | return super(stock_picking, self).create(cr, uid, vals, context=context) |
686 | - |
687 | + |
688 | def allow_resequencing(self, cr, uid, pick_browse, context=None): |
689 | ''' |
690 | allow resequencing criteria |
691 | @@ -321,7 +314,7 @@ |
692 | if pick_browse.state == 'draft' and not pick_browse.purchase_id and not pick_browse.sale_id: |
693 | return True |
694 | return False |
695 | - |
696 | + |
697 | def _do_partial_hook(self, cr, uid, ids, context, *args, **kwargs): |
698 | ''' |
699 | hook to update defaults data |
700 | @@ -329,42 +322,30 @@ |
701 | # variable parameters |
702 | move = kwargs.get('move') |
703 | assert move, 'delivery_mechanism.py >> stock_picking: _do_partial_hook - missing move' |
704 | - |
705 | + |
706 | # calling super method |
707 | defaults = super(stock_picking, self)._do_partial_hook(cr, uid, ids, context, *args, **kwargs) |
708 | assert defaults is not None, 'delivery_mechanism.py >> stock_picking: _do_partial_hook - missing defaults' |
709 | # update the line number, copy original line_number value |
710 | defaults.update({'line_number': move.line_number}) |
711 | - |
712 | - #UTP-972: Set the original total qty of the original move to the new partial move, for sync purpose only |
713 | + |
714 | + # UTP-972: Set the original total qty of the original move to the new partial move, for sync purpose only |
715 | orig_qty = move.product_qty |
716 | if move.original_qty_partial and move.original_qty_partial != -1: |
717 | orig_qty = move.original_qty_partial |
718 | defaults.update({'original_qty_partial': orig_qty}) |
719 | - |
720 | + |
721 | return defaults |
722 | - |
723 | - def create_data_back(self, cr, uid, move, context=None): |
724 | - ''' |
725 | - build data_back dictionary |
726 | - ''' |
727 | - res = {'id': move.id, |
728 | - 'name': move.product_id.partner_ref, |
729 | - 'product_id': move.product_id.id, |
730 | - 'product_uom': move.product_uom.id, |
731 | - 'product_qty': move.product_qty, |
732 | - } |
733 | - return res |
734 | - |
735 | + |
736 | def _update_mirror_move(self, cr, uid, ids, data_back, diff_qty, out_move=False, context=None): |
737 | ''' |
738 | update the mirror move with difference quantity diff_qty |
739 | - |
740 | + |
741 | if out_move is provided, it is used for copy if another cannot be found (meaning the one provided does |
742 | not fit anyhow) |
743 | - |
744 | + |
745 | # NOTE: the price is not update in OUT move according to average price computation. this is an open point. |
746 | - |
747 | + |
748 | if diff_qty < 0, the qty is decreased |
749 | if diff_qty > 0, the qty is increased |
750 | ''' |
751 | @@ -383,8 +364,8 @@ |
752 | 'product_uos_qty': 0, |
753 | 'product_uom': data_back['product_uom'], |
754 | 'state': 'confirmed', |
755 | - 'prodlot_id': False, # reset batch number |
756 | - 'asset_id': False, # reset asset |
757 | + 'prodlot_id': False, # reset batch number |
758 | + 'asset_id': False, # reset asset |
759 | } |
760 | out_move_id = move_obj.copy(cr, uid, out_move, values, context=context) |
761 | # update quantity |
762 | @@ -400,7 +381,7 @@ |
763 | new_move_id = move_obj.copy(cr, uid, out_move_id, {'product_qty' : diff_qty, |
764 | 'product_uom': data_back['product_uom'], |
765 | 'product_uos': data_back['product_uom'], |
766 | - 'product_uos_qty': diff_qty,}, context=context) |
767 | + 'product_uos_qty': diff_qty, }, context=context) |
768 | move_obj.action_confirm(cr, uid, [new_move_id], context=context) |
769 | # if present_qty == 0.00: |
770 | # move_obj.write(cr, uid, [out_move_id], {'state': 'draft'}) |
771 | @@ -409,14 +390,14 @@ |
772 | move_obj.write(cr, uid, [out_move_id], {'product_qty' : new_qty, |
773 | 'product_uom': data['product_uom'][0], |
774 | 'product_uos': data['product_uom'][0], |
775 | - 'product_uos_qty': new_qty,}, context=context) |
776 | - |
777 | + 'product_uos_qty': new_qty, }, context=context) |
778 | + |
779 | # log the modification |
780 | # log creation message |
781 | - move_obj.log(cr, uid, out_move_id, _('The Stock Move %s from %s has been updated to %s %s.')%(stock_move_name, picking_out_name, new_qty, uom_name)) |
782 | + move_obj.log(cr, uid, out_move_id, _('The Stock Move %s from %s has been updated to %s %s.') % (stock_move_name, picking_out_name, new_qty, uom_name)) |
783 | # return updated move or False |
784 | return out_move_id |
785 | - |
786 | + |
787 | def _do_incoming_shipment_first_hook(self, cr, uid, ids, context=None, *args, **kwargs): |
788 | ''' |
789 | hook to update values for stock move if first encountered |
790 | @@ -424,31 +405,188 @@ |
791 | values = kwargs.get('values') |
792 | assert values is not None, 'missing values' |
793 | return values |
794 | - |
795 | - def do_incoming_shipment(self, cr, uid, ids, context=None): |
796 | - ''' |
797 | - validate the picking ticket from selected stock moves |
798 | - |
799 | - move here the logic of validate picking |
800 | - available for picking loop |
801 | - ''' |
802 | - assert context, 'context is not defined' |
803 | - assert 'partial_datas' in context, 'partial datas not present in context' |
804 | - partial_datas = context['partial_datas'] |
805 | - if isinstance(ids, (int, long)): |
806 | - ids = [ids] |
807 | - |
808 | - # sequence object |
809 | + |
810 | + def _get_db_data_dict(self, cr, uid): |
811 | + """ |
812 | + Get some data from data.xml file (like stock locations, Unifield setup...) |
813 | + """ |
814 | + # Objects |
815 | + data_obj = self.pool.get('ir.model.data') |
816 | + loc_obj = self.pool.get('stock.location') |
817 | + |
818 | + setup = self.pool.get('unifield.setup.configuration').get_config(cr, uid) |
819 | + |
820 | + cd_loc = loc_obj.get_cross_docking_location(cr, uid) |
821 | + service_loc = loc_obj.get_service_location(cr, uid) |
822 | + non_stock = loc_obj.search(cr, uid, [('non_stockable_ok', '=', True)]) |
823 | + if non_stock: |
824 | + non_stock = non_stock[0] |
825 | + input_loc = data_obj.get_object_reference(cr, uid, 'msf_cross_docking', 'stock_location_input')[1] |
826 | + |
827 | + db_data = { |
828 | + 'setup': setup, |
829 | + 'cd_loc': cd_loc, |
830 | + 'service_loc': service_loc, |
831 | + 'non_stock': non_stock, |
832 | + 'input_loc': input_loc |
833 | + } |
834 | + |
835 | + return db_data |
836 | + |
837 | + def _compute_average_values(self, cr, uid, move, line, product_availability, context=None): |
838 | + """ |
839 | + Compute the average price of the product according to processed quantities |
840 | + """ |
841 | + # Objects |
842 | + uom_obj = self.pool.get('product.uom') |
843 | + currency_obj = self.pool.get('res.currency') |
844 | + product_obj = self.pool.get('product.product') |
845 | + |
846 | + if context is None: |
847 | + context = {} |
848 | + |
849 | + average_values = {} |
850 | + |
851 | + move_currency_id = move.company_id.currency_id.id |
852 | + context['currency_id'] = move_currency_id |
853 | + |
854 | + qty = line.quantity |
855 | + if line.uom_id.id != line.product_id.uom_id.id: |
856 | + qty = uom_obj._compute_qty(cr, uid, line.uom_id.id, line.quantity, line.product_id.uom_id.id) |
857 | + |
858 | + product_availability.setdefault(line.product_id.id, line.product_id.qty_available) |
859 | + product_availability[line.product_id.id] += qty |
860 | + |
861 | + if qty > 0.00: |
862 | + new_price = line.cost |
863 | + # Recompute unit price if the currency used is not the functional currency |
864 | + if line.currency.id != move_currency_id: |
865 | + new_price = currency_obj.compute(cr, uid, line.currency.id, move_currency_id, |
866 | + new_price, round=False, context=context) |
867 | + |
868 | + # Recompute unit price if the UoM received is not the default UoM of the product |
869 | + if line.uom_id.id != line.product_id.uom_id.id: |
870 | + new_price = uom_obj._compute_price(cr, uid, line.uom_id.id, new_price, |
871 | + line.product_id.uom_id.id) |
872 | + |
873 | + new_std_price = 0.00 |
874 | + if line.product_id.qty_available <= 0.00: |
875 | + new_std_price = new_price |
876 | + else: |
877 | + # Get the current price |
878 | + current_price = product_obj.price_get(cr, uid, [line.product_id.id], 'standard_price', context=context)[line.product_id.id] |
879 | + # Check no division by zero |
880 | + if product_availability[line.product_id.id]: |
881 | + new_std_price = ((current_price * product_availability[line.product_id.id])\ |
882 | + + (new_price * qty)) / (product_availability[line.product_id.id] + qty) |
883 | + |
884 | + # Write the field according to price type field |
885 | + product_obj.write(cr, uid, [line.product_id.id], {'standard_price': new_std_price}) |
886 | + |
887 | + # Record the values that were chosen in the wizard, so they can be |
888 | + # used for inventory valuation of real-time valuation is enabled. |
889 | + average_values = { |
890 | + 'price_unit': new_price, |
891 | + 'price_currency_id': line.currency.id, |
892 | + } |
893 | + |
894 | + return average_values |
895 | + |
896 | + def _get_values_from_line(self, cr, uid, move, line, db_data, context=None): |
897 | + """ |
898 | + Prepare the value for a processed move according to line values |
899 | + """ |
900 | + # Objects |
901 | + uom_obj = self.pool.get('product.uom') |
902 | + |
903 | + if context is None: |
904 | + context = {} |
905 | + |
906 | + wizard = line.wizard_id |
907 | + |
908 | + values = { |
909 | + 'name': line.product_id.partner_ref, |
910 | + 'product_id': line.product_id.id, |
911 | + 'original_qty_partial': move.product_qty, |
912 | + 'product_qty': line.quantity, |
913 | + 'product_uom': line.uom_id.id, |
914 | + 'product_uos_qty': line.quantity, |
915 | + 'product_uos': line.uom_id.id, |
916 | + 'prodlot_id': line.prodlot_id and line.prodlot_id.id or False, |
917 | + # 'asset_id': line.asset_id and line.asset_id.id or False, |
918 | + 'change_reason': line.change_reason, |
919 | + # Values from incoming wizard |
920 | + 'direct_incoming': line.wizard_id.direct_incoming, |
921 | + # Values for Direct Purchase Order |
922 | + 'sync_dpo': move.dpo_line_id and True or move.sync_dpo, |
923 | + 'dpo_line_id': move.dpo_line_id and move.dpo_line_id.id or False, |
924 | + } |
925 | + |
926 | + # UTP-872: Don't change the quantity if the move is canceled |
927 | + # If the quantity is changed to 0.00, a backorder is created |
928 | + # for canceled moves |
929 | + if move.state == 'cancel': |
930 | + values.update({ |
931 | + 'product_qty': move.product_qty, |
932 | + 'product_uos_qty': move.product_uos_qty |
933 | + }) |
934 | + |
935 | + # UTP-872: Added also the state into the move line if the state comes from the sync |
936 | + if line.state: |
937 | + values['state'] = line.state |
938 | + |
939 | + if line.cost: |
940 | + values['price_unit'] = line.cost |
941 | + elif line.uom_id.id != move.product_uom.id: |
942 | + new_price = uom_obj._compute_price(cr, uid, move.product_uom.id, move.price_unit, line.uom_id.id) |
943 | + values['price_unit'] = new_price |
944 | + |
945 | + # We check the dest_type for INCOMING shipment (and not the source_type which is reserved for OUTGOING shipment) |
946 | + if wizard.dest_type == 'to_cross_docking': |
947 | + if db_data.get('setup').allocation_setup == 'unallocated': |
948 | + raise osv.except_osv( |
949 | + _('Error'), |
950 | + _('You cannot made moves from/to Cross-docking locations when the Allocated stocks configuration is set to \'Unallocated\'.') |
951 | + ) |
952 | + # Below, "source_type" is only used for the outgoing shipment. We set it to "None" because by default it is |
953 | + # "default" and we do not want that info on INCOMING shipment |
954 | + wizard.source_type = None |
955 | + values.update({ |
956 | + 'location_dest_id': db_data.get('cd_loc'), |
957 | + 'cd_from_bo': True, |
958 | + }) |
959 | + elif wizard.dest_type == 'to_stock': |
960 | + # Below, "source_type" is only used for the outgoing shipment. We set it to "None because by default it is |
961 | + # "default" and we do not want that info on INCOMING shipment |
962 | + if line.product_id.type == 'consu': |
963 | + values['location_dest_id'] = db_data.get('non_stock') |
964 | + elif line.product_id.type == 'service_recep': |
965 | + values['location_dest_id'] = db_data.get('service_loc') |
966 | + else: |
967 | + # treat moves towards STOCK if NOT SERVICE |
968 | + values['location_dest_id'] = db_data.get('input_loc') |
969 | + |
970 | + values['cd_from_bo'] = False |
971 | + |
972 | + if wizard.dest_type != 'to_cross_docking': |
973 | + values['direct_incoming'] = wizard.direct_incoming |
974 | + |
975 | + return values |
976 | + |
977 | + def do_incoming_shipment(self, cr, uid, wizard_ids, context=None): |
978 | + """ |
979 | + Take the data in wizard_ids and lines of stock.incoming.processor and |
980 | + do the split of stock.move according to the data. |
981 | + """ |
982 | + # Objects |
983 | + inc_proc_obj = self.pool.get('stock.incoming.processor') |
984 | + move_proc_obj = self.pool.get('stock.move.in.processor') |
985 | + loc_obj = self.pool.get('stock.location') |
986 | + uom_obj = self.pool.get('product.uom') |
987 | + move_obj = self.pool.get('stock.move') |
988 | sequence_obj = self.pool.get('ir.sequence') |
989 | - # stock move object |
990 | - move_obj = self.pool.get('stock.move') |
991 | - product_obj = self.pool.get('product.product') |
992 | - currency_obj = self.pool.get('res.currency') |
993 | - uom_obj = self.pool.get('product.uom') |
994 | - # create picking object |
995 | - create_picking_obj = self.pool.get('create.picking') |
996 | - # workflow |
997 | wf_service = netsvc.LocalService("workflow") |
998 | +<<<<<<< TREE |
999 | internal_loc_ids = self.pool.get('stock.location').search(cr, uid, [('usage','=','internal'), ('cross_docking_location_ok', '=', False)]) |
1000 | ctx_avg = context.copy() |
1001 | ctx_avg['location'] = internal_loc_ids |
1002 | @@ -487,204 +625,131 @@ |
1003 | # keep data for back order creation |
1004 | data_back = self.create_data_back(cr, uid, move, context=context) |
1005 | # qty selected |
1006 | +======= |
1007 | + |
1008 | + if context is None: |
1009 | + context = {} |
1010 | + |
1011 | + if isinstance(wizard_ids, (int, long)): |
1012 | + wizard_ids = [wizard_ids] |
1013 | + |
1014 | + db_data_dict = self._get_db_data_dict(cr, uid) |
1015 | + |
1016 | + # UF-1617: Get the sync_message case |
1017 | + sync_in = context.get('sync_message_execution', False) |
1018 | + |
1019 | + internal_loc = loc_obj.search(cr, uid, [('usage', '=', 'internal'), ('cross_docking_location_ok', '=', False)]) |
1020 | + context['location'] = internal_loc |
1021 | + |
1022 | + product_availability = {} |
1023 | + |
1024 | + for wizard in inc_proc_obj.browse(cr, uid, wizard_ids, context=context): |
1025 | + picking = wizard.picking_id |
1026 | + backordered_moves = [] # Moves that need to be put in a backorder |
1027 | + done_moves = [] # Moves that are completed |
1028 | + out_picks = set() |
1029 | + processed_out_moves = [] |
1030 | + |
1031 | + for move in picking.move_lines: |
1032 | + # Get all processed lines that processed this stock move |
1033 | + proc_ids = move_proc_obj.search(cr, uid, [('wizard_id', '=', wizard.id), ('move_id', '=', move.id)], context=context) |
1034 | + # The processed quantity |
1035 | +>>>>>>> MERGE-SOURCE |
1036 | count = 0 |
1037 | - # flag to update the first move - if split was performed during the validation, new stock moves are created |
1038 | - first = True |
1039 | - # force complete flag = validate all partial for the same move have the same force complete value |
1040 | - force_complete = False |
1041 | - # initial qty |
1042 | - initial_qty = move.product_qty |
1043 | - # initial uom |
1044 | - initial_uom = move.product_uom.id |
1045 | - # corresponding out move |
1046 | + need_split = False |
1047 | + |
1048 | + data_back = move_obj.create_data_back(move) |
1049 | mirror_data = move_obj.get_mirror_move(cr, uid, [move.id], data_back, context=context)[move.id] |
1050 | - out_move_id = mirror_data['move_id'] |
1051 | out_moves = mirror_data['moves'] |
1052 | - processed_moves = [] |
1053 | - # update out flag |
1054 | - count_partial = len(partial_datas[pick.id][move.id]) |
1055 | - update_out = count_partial > 1 |
1056 | - # average price computation, new values - should be the same for every partial |
1057 | average_values = {} |
1058 | |
1059 | - orig_qty = move.product_qty |
1060 | - if move.original_qty_partial and move.original_qty_partial != -1: |
1061 | - orig_qty = move.original_qty_partial |
1062 | - |
1063 | - # partial list |
1064 | - for partial in partial_datas[pick.id][move.id]: |
1065 | - # original openERP logic - average price computation - To be validated by Matthias |
1066 | - # Average price computation |
1067 | - # selected product from wizard must be tested |
1068 | - product = product_obj.browse(cr, uid, partial['product_id'], context=ctx_avg) |
1069 | - values = {'name': partial['name'], |
1070 | - 'product_id': partial['product_id'], |
1071 | - 'product_qty': partial['product_qty'], |
1072 | - 'product_uos_qty': partial['product_qty'], |
1073 | - 'original_qty_partial': orig_qty, |
1074 | - 'prodlot_id': partial['prodlot_id'], |
1075 | - 'product_uom': partial['product_uom'], |
1076 | - 'product_uos': partial['product_uom'], |
1077 | - 'sync_dpo': move.sync_dpo, |
1078 | - 'asset_id': partial['asset_id'], |
1079 | - 'change_reason': partial['change_reason'], |
1080 | - 'direct_incoming': partial.get('direct_incoming'), |
1081 | - } |
1082 | - |
1083 | - if partial.get('dpo_line_id'): |
1084 | - values['dpo_line_id'] = partial['dpo_line_id'] |
1085 | - values['sync_dpo'] = partial['dpo_line_id'] and True or False |
1086 | - |
1087 | - # UTP-872: Don't change the quantity if the move is canceled |
1088 | - # If the quantity is changed to 0.00, a backorder is created |
1089 | - # for canceled moves |
1090 | - if move.state == 'cancel': |
1091 | - values.update({'product_qty': move.product_qty, |
1092 | - 'product_uos_qty': move.product_uos_qty}) |
1093 | - |
1094 | - if 'state' in partial: # UTP-872: Added also the state into the move line if the state comes from the sync |
1095 | - values.update({'state': partial['state']}) |
1096 | - if 'product_price' in partial: |
1097 | - values.update({'price_unit': partial['product_price']}) |
1098 | - elif 'product_uom' in partial and partial['product_uom'] != move.product_uom.id: |
1099 | - new_price = self.pool.get('product.uom')._compute_price(cr, uid, move.product_uom.id, move.price_unit, partial['product_uom']) |
1100 | - values.update({'price_unit': new_price}) |
1101 | - values = self._do_incoming_shipment_first_hook(cr, uid, ids, context, values=values) |
1102 | - compute_average = pick.type == 'in' and product.cost_method == 'average' and not move.location_dest_id.cross_docking_location_ok |
1103 | - if values.get('location_dest_id'): |
1104 | - val_loc = self.pool.get('stock.location').browse(cr, uid, values.get('location_dest_id'), context=context) |
1105 | - compute_average = pick.type == 'in' and product.cost_method == 'average' and not val_loc.cross_docking_location_ok |
1106 | - |
1107 | - # why do not used get_picking_type: original do_partial do not use it |
1108 | - # when an incoming shipment has a avg product to Service, the average price computation is of no use |
1109 | - |
1110 | + for line in move_proc_obj.browse(cr, uid, proc_ids, context=context): |
1111 | + values = self._get_values_from_line(cr, uid, move, line, db_data_dict, context=context) |
1112 | + |
1113 | + # Check if we must re-compute the price of the product |
1114 | + compute_average = False |
1115 | + if values.get('location_dest_id', False): |
1116 | + dest_loc = loc_obj.browse(cr, uid, values['location_dest_id'], context=context) |
1117 | + compute_average = picking.type == 'in' and line.product_id.cost_method == 'average' and dest_loc.cross_docking_location_ok |
1118 | + |
1119 | if compute_average: |
1120 | - move_currency_id = move.company_id.currency_id.id |
1121 | - context['currency_id'] = move_currency_id |
1122 | - # datas from partial |
1123 | - product_uom = partial['product_uom'] |
1124 | - product_qty = partial['product_qty'] |
1125 | - product_currency = partial.get('product_currency', False) |
1126 | - product_price = partial.get('product_price', 0.0) |
1127 | - qty = uom_obj._compute_qty(cr, uid, product_uom, product_qty, product.uom_id.id) |
1128 | - |
1129 | - if product.id in product_avail: |
1130 | - product_avail[product.id] += qty |
1131 | - else: |
1132 | - product_avail[product.id] = product.qty_available |
1133 | - |
1134 | - if qty > 0: |
1135 | - new_price = currency_obj.compute(cr, uid, product_currency, |
1136 | - move_currency_id, product_price, round=False, context=context) |
1137 | - new_price = uom_obj._compute_price(cr, uid, product_uom, new_price, |
1138 | - product.uom_id.id) |
1139 | - if product.qty_available <= 0: |
1140 | - new_std_price = new_price |
1141 | - else: |
1142 | - # Get the standard price |
1143 | - amount_unit = product.price_get('standard_price', context)[product.id] |
1144 | - # check no division by zero |
1145 | - if product_avail[product.id] + qty: |
1146 | - new_std_price = ((amount_unit * product_avail[product.id])\ |
1147 | - + (new_price * qty))/(product_avail[product.id] + qty) |
1148 | - else: |
1149 | - new_std_price = 0.0 |
1150 | - |
1151 | - # Write the field according to price type field |
1152 | - product_obj.write(cr, uid, [product.id], {'standard_price': new_std_price}) |
1153 | - |
1154 | - # Record the values that were chosen in the wizard, so they can be |
1155 | - # used for inventory valuation if real-time valuation is enabled. |
1156 | - average_values = {'price_unit': product_price, |
1157 | - 'price_currency_id': product_currency} |
1158 | - |
1159 | - # the quantity |
1160 | - count = count + uom_obj._compute_qty(cr, uid, partial['product_uom'], partial['product_qty'], initial_uom) |
1161 | - count_partial -= 1 |
1162 | - if first: |
1163 | - first = False |
1164 | - # line number does not need to be updated |
1165 | - # average computation - empty if not average |
1166 | + average_values = self._compute_average_values(cr, uid, move, line, product_availability, context=context) |
1167 | values.update(average_values) |
1168 | - |
1169 | -# # if split happened, we update the corresponding OUT move |
1170 | -# if out_move_id: |
1171 | -# # UF-1690 : Remove the location_dest_id from values |
1172 | -# out_values = values.copy() |
1173 | -# if out_values.get('location_dest_id', False): |
1174 | -# out_values.pop('location_dest_id') |
1175 | -# second_assign_moves.append(out_move_id) |
1176 | -# if update_out: |
1177 | -# move_obj.write(cr, uid, [out_move_id], out_values, context=context) |
1178 | -# elif move.product_id.id != partial['product_id']: |
1179 | -# move_obj.write(cr, uid, [out_move_id], out_values, context=context) |
1180 | -# # we force update flag - out will be updated if qty is missing - possibly with the creation of a new move |
1181 | -# update_out = True |
1182 | - # we update the values with the _do_incoming_shipment_first_hook only if we are on an 'IN' |
1183 | - values = self._do_incoming_shipment_first_hook(cr, uid, ids, context, values=values) |
1184 | - # mark the done IN stock as processed |
1185 | - move_obj.write(cr, uid, [move.id], dict(values, processed_stock_move=True), context=context) |
1186 | + |
1187 | + # The quantity |
1188 | + if line.uom_id.id != move.product_uom.id: |
1189 | + count += uom_obj._compute_qty(cr, uid, line.uom_id.id, line.quantity, move.product_uom.id) |
1190 | + else: |
1191 | + count += line.quantity |
1192 | + |
1193 | + values['processed_stock_move'] = True |
1194 | + if not need_split: |
1195 | + need_split = True |
1196 | + # Mark the done IN stock move as processed |
1197 | + move_obj.write(cr, uid, [move.id], values, context=context) |
1198 | done_moves.append(move.id) |
1199 | - |
1200 | else: |
1201 | - # split happened during the validation |
1202 | - # copy the stock move and set the quantity |
1203 | - # we keep original line number |
1204 | - values.update({'state': 'assigned'}) |
1205 | - # average computation - empty if not average |
1206 | - values.update(average_values) |
1207 | - # mark the done IN stock as processed |
1208 | - new_move = move_obj.copy(cr, uid, move.id, dict(values, processed_stock_move=True), context=dict(context, keepLineNumber=True)) |
1209 | - done_moves.append(new_move) |
1210 | - |
1211 | - |
1212 | + values['state'] = 'assigned' |
1213 | + context['keepLineNumber'] = True |
1214 | + new_move_id = move_obj.copy(cr, uid, move.id, values, context=context) |
1215 | + context['keepLineNumber'] = False |
1216 | + done_moves.append(new_move_id) |
1217 | + |
1218 | + values['processed_stock_move'] = False |
1219 | + |
1220 | out_values = values.copy() |
1221 | - # Remove sync. dpo fields |
1222 | - out_values['dpo_line_id'] = 0 |
1223 | - out_values['sync_dpo'] = False |
1224 | - out_values.update({'state': 'confirmed'}) |
1225 | + # Remove sync. DPO fields |
1226 | + out_values.update({ |
1227 | + 'dpo_line_id': 0, |
1228 | + 'sync_dpo': False, |
1229 | + 'state': 'confirmed', |
1230 | + }) |
1231 | if out_values.get('location_dest_id', False): |
1232 | out_values.pop('location_dest_id') |
1233 | - |
1234 | - partial_qty = partial['product_qty'] |
1235 | - count_out = len(out_moves) |
1236 | - |
1237 | + |
1238 | + remaining_out_qty = line.quantity |
1239 | + out_move = None |
1240 | for out_move in out_moves: |
1241 | - if not partial_qty: |
1242 | - break |
1243 | - |
1244 | - out_pick = out_move.picking_id |
1245 | - if out_pick and out_pick.type == 'out' and out_pick.subtype == 'picking' and \ |
1246 | - out_pick.state == 'draft' and out_pick.id not in out_picks: |
1247 | - out_picks.append(out_move.picking_id.id) |
1248 | - |
1249 | out_move = move_obj.browse(cr, uid, out_move.id, context=context) |
1250 | - count_out -= 1 |
1251 | - |
1252 | - uom_partial_qty = self.pool.get('product.uom')._compute_qty(cr, uid, partial['product_uom'], partial_qty, out_move.product_uom.id) |
1253 | - if count_partial or uom_partial_qty < out_move.product_qty: |
1254 | - # Split the out move |
1255 | - vals = dict(out_values, product_qty=partial_qty, product_uom=partial['product_uom']) |
1256 | - if not sync_in: |
1257 | - vals.update({'in_out_updated': True}) |
1258 | - new_move = move_obj.copy(cr, uid, out_move.id, vals, context=dict(context, keepLineNumber=True)) |
1259 | - # Update the initial out move qty |
1260 | - move_obj.write(cr, uid, [out_move.id], {'product_qty': out_move.product_qty - uom_partial_qty, 'original_qty_partial': orig_qty}, context=context) |
1261 | - backlinks.append((move.id, new_move)) |
1262 | - partial_qty = 0.00 |
1263 | -# if not count_out: |
1264 | -# backlinks.append((move.id, out_move.id)) |
1265 | - elif not count_out or uom_partial_qty == out_move.product_qty: |
1266 | - # Update the initial out move qty with the processed qty |
1267 | - vals = dict(out_values, product_qty=partial_qty, product_uom=partial['product_uom']) |
1268 | - if not sync_in: |
1269 | - vals.update({'in_out_updated': True}) |
1270 | - move_obj.write(cr, uid, [out_move.id], vals, context=context) |
1271 | - backlinks.append((move.id, out_move.id)) |
1272 | - processed_moves.append(out_move.id) |
1273 | - partial_qty = 0.00 |
1274 | + |
1275 | + # List the Picking Ticket that need to be created from the Draft Picking Ticket |
1276 | + if out_move.picking_id.type == 'out' \ |
1277 | + and out_move.picking_id.subtype == 'picking' \ |
1278 | + and out_move.picking_id.state == 'draft': |
1279 | + out_picks.add(out_move.picking_id.id) |
1280 | + |
1281 | + if line.uom_id.id != out_move.product_uom.id: |
1282 | + uom_partial_qty = uom_obj._compute_qty(cr, uid, line.uom_id.id, remaining_out_qty, out_move.product_uom.id) |
1283 | + else: |
1284 | + uom_partial_qty = remaining_out_qty |
1285 | + |
1286 | + if uom_partial_qty < out_move.product_qty: |
1287 | + # Splt the out move |
1288 | + out_values.update({ |
1289 | + 'product_qty': remaining_out_qty, |
1290 | + 'product_uom': line.uom_id.id, |
1291 | + 'in_out_updated': sync_in and False or True, |
1292 | + }) |
1293 | + context['keepLineNumber'] = True |
1294 | + new_out_move_id = move_obj.copy(cr, uid, out_move.id, out_values, context=context) |
1295 | + context['keepLineNumber'] = False |
1296 | + remaining_out_qty = 0.00 |
1297 | + move_obj.write(cr, uid, [out_move.id], { |
1298 | + 'product_qty': out_move.product_qty - uom_partial_qty, |
1299 | + 'product_uos_qty': out_move.product_qty - uom_partial_qty, |
1300 | + }, context=context) |
1301 | + processed_out_moves.append(new_out_move_id) |
1302 | + elif uom_partial_qty == out_move.product_qty: |
1303 | + out_values.update({ |
1304 | + 'product_qty': remaining_out_qty, |
1305 | + 'product_uom': line.uom_id.id, |
1306 | + 'in_out_updated': sync_in and False or True, |
1307 | + }) |
1308 | + move_obj.write(cr, uid, [out_move.id], out_values, context=context) |
1309 | + processed_out_moves.append(out_move.id) |
1310 | else: |
1311 | # Just update the data of the initial out move |
1312 | +<<<<<<< TREE |
1313 | vals = dict(out_values, product_qty=out_move.product_qty, product_uom=partial['product_uom']) |
1314 | if not sync_in: |
1315 | vals.update({'in_out_updated': True}) |
1316 | @@ -786,74 +851,117 @@ |
1317 | wf_service.trg_write(uid, 'stock.picking', pick.id, cr) |
1318 | else: |
1319 | # UF-1617: when it is from the sync, then just send the IN to shipped, then return the backorder_id |
1320 | +======= |
1321 | + out_values.update({ |
1322 | + 'product_qty': uom_partial_qty, |
1323 | + 'product_uom': line.uom_id.id, |
1324 | + 'in_out_updated': sync_in and False or True |
1325 | + }) |
1326 | + move_obj.write(cr, uid, [out_move.id], out_values, context=context) |
1327 | + processed_out_moves.append(out_move.id) |
1328 | + |
1329 | + # Decrement the inital move, cannot be less than zero |
1330 | + diff_qty = move.product_qty - count |
1331 | + # If there is remaining quantity for the move, put the ID of the move |
1332 | + # and the remaining quantity to list of moves to put in backorder |
1333 | + if diff_qty > 0.00 and move.state != 'cancel': |
1334 | + backordered_moves.append((move, diff_qty, average_values)) |
1335 | + |
1336 | + # Create the backorder if needed |
1337 | + if backordered_moves: |
1338 | + backorder_id = self.copy(cr, uid, picking.id, { |
1339 | + 'name': sequence_obj.get(cr, uid, 'stock.picking.%s' % (picking.type)), |
1340 | + 'move_lines': [], |
1341 | + 'state': 'draft', |
1342 | + }, context=context) |
1343 | + |
1344 | + for bo_move, bo_qty, av_values in backordered_moves: |
1345 | + # Create the corresponding move in the backorder - reset batch - reset asset_id |
1346 | + bo_values = { |
1347 | + 'asset_id': False, |
1348 | + 'product_qty': bo_qty, |
1349 | + 'product_uos_qty': bo_qty, |
1350 | + 'product_uom': data_back['product_uom'], |
1351 | + 'product_uos': data_back['product_uom'], |
1352 | + 'prodlot_id': False, |
1353 | + 'state': 'assigned', |
1354 | + 'move_dest_id': False, |
1355 | + 'change_reason': False, |
1356 | + 'processed_stock_move': True, |
1357 | + } |
1358 | + bo_values.update(av_values) |
1359 | + context['keepLineNumber'] = True |
1360 | + move_obj.copy(cr, uid, move.id, bo_values, context=context) |
1361 | + context['keepLineNumber'] = False |
1362 | + |
1363 | + # Put the done moves in this new picking |
1364 | + move_obj.write(cr, uid, done_moves, { |
1365 | + 'picking_id': backorder_id, |
1366 | + 'dpo_line_id': 0, |
1367 | + }, context=context) |
1368 | + |
1369 | + |
1370 | + if sync_in: |
1371 | + # UF-1617: When it is from the sync., then just send the IN to shipped, then return the backorder_id |
1372 | +>>>>>>> MERGE-SOURCE |
1373 | wf_service.trg_validate(uid, 'stock.picking', backorder_id, 'button_shipped', cr) |
1374 | return backorder_id |
1375 | + |
1376 | + |
1377 | + wf_service.trg_validate(uid, 'stock.picking', backorder_id, 'button_confirm', cr) |
1378 | + # Then we finish the good picking |
1379 | + self.write(cr, uid, [picking.id], { |
1380 | + 'backorder_id': backorder_id, |
1381 | + 'cd_from_bo': values.get('cd_from_bo', False), |
1382 | + }, context=context) |
1383 | + self.action_move(cr, uid, [backorder_id]) |
1384 | + wf_service.trg_validate(uid, 'stock.picking', backorder_id, 'button_done', cr) |
1385 | + wf_service.trg_write(uid, 'stock.picking', picking.id, cr) |
1386 | else: |
1387 | - if sync_in: # if it's from sync, then we just send the pick to become Available Shipped, not completely close! |
1388 | - self.write(cr, uid, [pick.id], {'state': 'shipped'}, context=context) |
1389 | - return pick.id |
1390 | + if sync_in: # If it's from sync, then we just send the pick to become Available Shippde, not completely close! |
1391 | + self.write(cr, uid, [picking.id], {'state': 'shipped'}, context=context) |
1392 | + return picking.id |
1393 | else: |
1394 | - self.action_move(cr, uid, [pick.id], context) |
1395 | - wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_done', cr) |
1396 | - |
1397 | - for move, out_move in backlinks: |
1398 | - if move in done_moves: |
1399 | - move_obj.write(cr, uid, [move], {'state': 'done'}, context=context) |
1400 | - if not sync_in: |
1401 | - move_obj.action_assign(cr, uid, [out_move]) |
1402 | - pick_moves.append(out_move) |
1403 | - |
1404 | - # update the out version |
1405 | - if update_pick_version: |
1406 | - self.write(cr, uid, [update_pick_version], {'update_version_from_in_stock_picking': mirror_data['picking_version']+1}, context=context) |
1407 | - |
1408 | - |
1409 | - # Create the first picking ticket if we are on a draft picking ticket |
1410 | - for picking in self.browse(cr, uid, out_picks, context=context): |
1411 | - if picking.type == 'out' and picking.subtype == 'picking' and picking.state == 'draft': |
1412 | - wiz = self.create_picking(cr, uid, [picking.id], context=context) |
1413 | - wiz_obj = self.pool.get(wiz['res_model']) |
1414 | - moves_picking = wiz_obj.browse(cr, uid, wiz['res_id'], context=wiz['context']).product_moves_picking |
1415 | - # We delete the lines which is not from the IN |
1416 | - for line in moves_picking: |
1417 | - if line.move_id.id not in pick_moves: |
1418 | - self.pool.get('stock.move.memory.picking').unlink(cr, uid, [line.id], context=context) |
1419 | - if wiz_obj.browse(cr, uid, wiz['res_id'], context=wiz['context']).product_moves_picking: |
1420 | - # We copy all data in lines |
1421 | - wiz_obj.copy_all(cr, uid, [wiz['res_id']], context=wiz['context']) |
1422 | - # We process the creation of the picking |
1423 | - wiz_obj.do_create_picking(cr, uid, [wiz['res_id']], context=wiz['context']) |
1424 | - |
1425 | - # Assign all updated out moves |
1426 | -# for move in move_obj.browse(cr, uid, to_assign_moves): |
1427 | -# if not move.product_qty and move.state not in ('done', 'cancel'): |
1428 | -# to_assign_moves.remove(move.id) |
1429 | -# move.unlink(context=dict(context, call_unlink=True)) |
1430 | -# for move in move_obj.browse(cr, uid, second_assign_moves): |
1431 | -# if not move.product_qty and move.state not in ('done', 'cancel'): |
1432 | -# second_assign_moves.remove(move.id) |
1433 | -# move.unlink(context=dict(context, call_unlink=True)) |
1434 | -# move_obj.action_assign(cr, uid, second_assign_moves) |
1435 | -# move_obj.action_assign(cr, uid, to_assign_moves) |
1436 | - |
1437 | - # If a line has been canceled on PICK/OUT |
1438 | - for so in self.pool.get('sale.order').browse(cr, uid, list(so_to_check), context=context): |
1439 | - if so.state == 'shipping_except': |
1440 | - wf_service.trg_validate(uid, 'sale.order', so.id, 'ship_corrected', cr) |
1441 | + self.action_move(cr, uid, [picking.id], context=context) |
1442 | + wf_service.trg_validate(uid, 'stock.picking', picking.id, 'button_done', cr) |
1443 | + |
1444 | + if not sync_in: |
1445 | + move_obj.action_assign(cr, uid, processed_out_moves) |
1446 | + |
1447 | + |
1448 | + # Create the first picking ticket if we are on a draft picking ticket |
1449 | + for picking in self.browse(cr, uid, list(out_picks), context=context): |
1450 | + wiz = self.create_picking(cr, uid, [picking.id], context=context) |
1451 | + wiz_obj = self.pool.get(wiz['res_model']) |
1452 | + wiz_context = wiz.get('context', {}) |
1453 | + moves_picking = wiz_obj.browse(cr, uid, wiz['res_id'], context=wiz_context).move_ids |
1454 | + nb_lines = len(moves_picking) |
1455 | + # We delete the lines which is not from the IN |
1456 | +# for line in moves_picking: |
1457 | +# if line.move_id.id not in pick_moves: |
1458 | +# self.pool.get('stock.move.memeroy.picking').unlink(cr, uid, [line.id], context=context) |
1459 | +# nb_lines -= 1 |
1460 | + |
1461 | + if nb_lines: |
1462 | + # We copy all data in lines |
1463 | + wiz_obj.copy_all(cr, uid, [wiz['res_id']], context=wiz_context) |
1464 | + # We process the creation of the picking |
1465 | + wiz_obj.do_create_picking(cr, uid, [wiz['res_id']], context=wiz_context) |
1466 | |
1467 | if context.get('from_simu_screen'): |
1468 | view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'view_picking_in_form')[1] |
1469 | - return {'type': 'ir.actions.act_window', |
1470 | - 'res_model': 'stock.picking', |
1471 | - 'res_id': ids[0], |
1472 | - 'view_id': [view_id], |
1473 | - 'view_mode': 'form, tree', |
1474 | - 'view_type': 'form', |
1475 | - 'target': 'crush', |
1476 | - 'context': context} |
1477 | + return { |
1478 | + 'type': 'ir.actions.act_window', |
1479 | + 'res_model': 'stock.picking', |
1480 | + 'res_id': picking.id, |
1481 | + 'view_id': [view_id], |
1482 | + 'view_mode': 'form, tree', |
1483 | + 'view_type': 'form', |
1484 | + 'target': 'crush', |
1485 | + 'context': context} |
1486 | |
1487 | return {'type': 'ir.actions.act_window_close'} |
1488 | - |
1489 | + |
1490 | def enter_reason(self, cr, uid, ids, context=None): |
1491 | ''' |
1492 | open reason wizard |
1493 | @@ -868,7 +976,7 @@ |
1494 | wiz_obj = self.pool.get('wizard') |
1495 | # open the selected wizard |
1496 | return wiz_obj.open_wizard(cr, uid, ids, name=name, model=model, step=step, context=dict(context, picking_id=ids[0])) |
1497 | - |
1498 | + |
1499 | def cancel_and_update_out(self, cr, uid, ids, context=None): |
1500 | ''' |
1501 | update corresponding out picking if exists and cancel the picking |
1502 | @@ -877,18 +985,18 @@ |
1503 | context = {} |
1504 | if isinstance(ids, (int, long)): |
1505 | ids = [ids] |
1506 | - |
1507 | + |
1508 | # objects |
1509 | move_obj = self.pool.get('stock.move') |
1510 | purchase_obj = self.pool.get('purchase.order') |
1511 | # workflow |
1512 | wf_service = netsvc.LocalService("workflow") |
1513 | - |
1514 | + |
1515 | for obj in self.browse(cr, uid, ids, context=context): |
1516 | # corresponding sale ids to be manually corrected after purchase workflow trigger |
1517 | sale_ids = [] |
1518 | for move in obj.move_lines: |
1519 | - data_back = self.create_data_back(cr, uid, move, context=context) |
1520 | + data_back = self.create_data_back(move) |
1521 | diff_qty = -data_back['product_qty'] |
1522 | # update corresponding out move - no move created, no need to handle line sequencing policy |
1523 | out_move_id = self._update_mirror_move(cr, uid, ids, data_back, diff_qty, out_move=False, context=context) |
1524 | @@ -906,8 +1014,8 @@ |
1525 | move_id = move_obj.search(cr, uid, [('picking_id.type', '=', 'out'), |
1526 | ('picking_id.subtype', 'in', ('standard', 'picking')), |
1527 | ('picking_id.sale_id', '=', sale_id), |
1528 | - ('state', 'not in', ('done', 'cancel')), |
1529 | - ('processed_stock_move', '=', True),], context=context) |
1530 | + ('state', 'not in', ('done', 'cancel')), |
1531 | + ('processed_stock_move', '=', True), ], context=context) |
1532 | if move_id: |
1533 | proc_id = self.pool.get('procurement.order').search(cr, uid, [('move_id', '=', out_move_id)], context=context) |
1534 | self.pool.get('procurement.order').write(cr, uid, proc_id, {'move_id': move_id[0]}, context=context) |
1535 | @@ -920,15 +1028,15 @@ |
1536 | if out_move.picking_id and out_move.picking_id.sale_id: |
1537 | if out_move.picking_id.sale_id.id not in sale_ids: |
1538 | sale_ids.append(out_move.picking_id.sale_id.id) |
1539 | - |
1540 | + |
1541 | # correct the corresponding po manually if exists - should be in shipping exception |
1542 | if obj.purchase_id: |
1543 | wf_service.trg_validate(uid, 'purchase.order', obj.purchase_id.id, 'picking_ok', cr) |
1544 | - purchase_obj.log(cr, uid, obj.purchase_id.id, _('The Purchase Order %s is %s%% received.')%(obj.purchase_id.name, round(obj.purchase_id.shipped_rate,2))) |
1545 | + purchase_obj.log(cr, uid, obj.purchase_id.id, _('The Purchase Order %s is %s%% received.') % (obj.purchase_id.name, round(obj.purchase_id.shipped_rate, 2))) |
1546 | # correct the corresponding so |
1547 | for sale_id in sale_ids: |
1548 | wf_service.trg_validate(uid, 'sale.order', sale_id, 'ship_corrected', cr) |
1549 | - |
1550 | + |
1551 | return True |
1552 | |
1553 | stock_picking() |
1554 | @@ -939,34 +1047,13 @@ |
1555 | add the link to procurement order |
1556 | ''' |
1557 | _inherit = 'purchase.order.line' |
1558 | - _columns= {'procurement_id': fields.many2one('procurement.order', string='Procurement Reference', readonly=True,), |
1559 | + _columns = {'procurement_id': fields.many2one('procurement.order', string='Procurement Reference', readonly=True,), |
1560 | } |
1561 | - _defaults = {'procurement_id': False,} |
1562 | - |
1563 | + _defaults = {'procurement_id': False, } |
1564 | + |
1565 | purchase_order_line() |
1566 | |
1567 | |
1568 | -class purchase_order(osv.osv): |
1569 | - ''' |
1570 | - hook to modify created In moves |
1571 | - ''' |
1572 | - _inherit = 'purchase.order' |
1573 | - |
1574 | - def _hook_action_picking_create_stock_picking(self, cr, uid, ids, context=None, *args, **kwargs): |
1575 | - ''' |
1576 | - modify data for stock move creation |
1577 | - - line number of stock move is taken from purchase order line |
1578 | - ''' |
1579 | - if context is None: |
1580 | - context = {} |
1581 | - move_values = super(purchase_order, self)._hook_action_picking_create_stock_picking(cr, uid, ids, context=context, *args, **kwargs) |
1582 | - order_line = kwargs['order_line'] |
1583 | - move_values.update({'line_number': order_line.line_number}) |
1584 | - return move_values |
1585 | - |
1586 | -purchase_order() |
1587 | - |
1588 | - |
1589 | class procurement_order(osv.osv): |
1590 | ''' |
1591 | inherit po_values_hook |
1592 | @@ -977,7 +1064,7 @@ |
1593 | ''' |
1594 | Please copy this to your module's method also. |
1595 | This hook belongs to the make_po method from purchase>purchase.py>procurement_order |
1596 | - |
1597 | + |
1598 | - allow to modify the data for purchase order line creation |
1599 | ''' |
1600 | if isinstance(ids, (int, long)): |
1601 | @@ -986,13 +1073,13 @@ |
1602 | line = super(procurement_order, self).po_line_values_hook(cr, uid, ids, context=context, *args, **kwargs) |
1603 | # give the purchase order line a link to corresponding procurement |
1604 | procurement = kwargs['procurement'] |
1605 | - line.update({'procurement_id': procurement.id,}) |
1606 | + line.update({'procurement_id': procurement.id, }) |
1607 | # for Internal Request (IR) on make_to_order we update PO line data according to the data of the IR (=sale_order) |
1608 | - sale_order_line_ids = sale_obj.search(cr, uid, [('procurement_id','=', procurement.id)], context=context) |
1609 | + sale_order_line_ids = sale_obj.search(cr, uid, [('procurement_id', '=', procurement.id)], context=context) |
1610 | for sol in sale_obj.browse(cr, uid, sale_order_line_ids, context=context): |
1611 | if sol.order_id.procurement_request and not sol.product_id and sol.comment: |
1612 | line.update({'product_id': False, |
1613 | - 'name': 'Description: %s' %sol.comment, |
1614 | + 'name': 'Description: %s' % sol.comment, |
1615 | 'comment': sol.comment, |
1616 | 'product_qty': sol.product_uom_qty, |
1617 | 'price_unit': sol.price_unit, |
1618 | @@ -1009,6 +1096,6 @@ |
1619 | 'nomen_sub_4': sol.nomen_sub_4.id or False, |
1620 | 'nomen_sub_5': sol.nomen_sub_5.id or False}) |
1621 | return line |
1622 | - |
1623 | + |
1624 | procurement_order() |
1625 | |
1626 | |
1627 | === modified file 'delivery_mechanism/wizard/change_product_memory_move_view.xml' |
1628 | --- delivery_mechanism/wizard/change_product_memory_move_view.xml 2013-08-30 14:16:23 +0000 |
1629 | +++ delivery_mechanism/wizard/change_product_memory_move_view.xml 2014-03-21 08:49:45 +0000 |
1630 | @@ -1,9 +1,9 @@ |
1631 | <?xml version="1.0" encoding="utf-8"?> |
1632 | <openerp> |
1633 | <data> |
1634 | - <record id="view_change_product_memory_move_form_wizard" model="ir.ui.view"> |
1635 | - <field name="name">Change Product move</field> |
1636 | - <field name="model">change.product.memory.move</field> |
1637 | + <record id="change_product_move_processor_form_view" model="ir.ui.view"> |
1638 | + <field name="name">change.product.move.processor.form.view</field> |
1639 | + <field name="model">change.product.move.processor</field> |
1640 | <field name="type">form</field> |
1641 | <field name="arch" type="xml"> |
1642 | <form string="Change Product of Memory Move"> |
1643 | |
1644 | === modified file 'import_data/import_data.py' |
1645 | --- import_data/import_data.py 2014-01-15 09:20:59 +0000 |
1646 | +++ import_data/import_data.py 2014-03-21 08:49:45 +0000 |
1647 | @@ -2,7 +2,7 @@ |
1648 | ############################################################################## |
1649 | # |
1650 | # OpenERP, Open Source Management Solution |
1651 | -# Copyright (C) 2011 TeMPO Consulting, MSF |
1652 | +# Copyright (C) 2011 TeMPO Consulting, MSF |
1653 | # |
1654 | # This program is free software: you can redistribute it and/or modify |
1655 | # it under the terms of the GNU Affero General Public License as |
1656 | @@ -49,7 +49,6 @@ |
1657 | |
1658 | def _set_full_path_nomen(self, cr, uid, headers, row, col): |
1659 | if not col: |
1660 | - self._cache = {} |
1661 | # modify headers if needed |
1662 | for n,h in enumerate(headers): |
1663 | m = re.match("^nomen_manda_([0123]).name$", h) |
1664 | @@ -63,9 +62,6 @@ |
1665 | row[col[manda]] = ' | '.join([row[col[manda-1]], row[col[manda]]]) |
1666 | return col |
1667 | |
1668 | - def _del_product_cache(self, cr, uid): |
1669 | - self._cache = {} |
1670 | - |
1671 | def _set_default_value(self, cr, uid, data, row, headers): |
1672 | # Create new list of headers with the name of each fields (without dots) |
1673 | new_headers = [] |
1674 | @@ -78,10 +74,11 @@ |
1675 | # Get the default value |
1676 | defaults = self.pool.get('product.product').default_get(cr, uid, new_headers) |
1677 | # If no value in file, set the default value |
1678 | - for n, h in enumerate(new_headers): |
1679 | + for h in new_headers: |
1680 | if h in defaults and (not h in data or not data[h]): |
1681 | data[h] = defaults[h] |
1682 | |
1683 | + |
1684 | post_hook = { |
1685 | 'account.budget.post': _set_code_name, |
1686 | 'product.nomenclature': _set_nomen_level, |
1687 | @@ -89,11 +86,10 @@ |
1688 | } |
1689 | |
1690 | pre_hook = { |
1691 | - 'product.product': _set_full_path_nomen, |
1692 | + 'product.product': _set_full_path_nomen, |
1693 | } |
1694 | |
1695 | post_load_hook = { |
1696 | - 'product.product': _del_product_cache, |
1697 | } |
1698 | |
1699 | def _get_image(self, cr, uid, context=None): |
1700 | @@ -105,7 +101,7 @@ |
1701 | 'debug': fields.boolean('Debug to server log'), |
1702 | 'object': fields.selection([ |
1703 | ('product.nomenclature','Product Nomenclature'), |
1704 | - ('product.category','Product Category'), |
1705 | + ('product.category','Product Category'), |
1706 | ('product.product', 'Product'), |
1707 | ('res.partner.category','Partner Category'), |
1708 | ('res.partner','Partner'), |
1709 | @@ -132,13 +128,13 @@ |
1710 | |
1711 | obj = self.read(cr, uid, ids[0]) |
1712 | import_mode = obj.get('import_mode') |
1713 | - |
1714 | + |
1715 | objname = "" |
1716 | for sel in self._columns['object'].selection: |
1717 | if sel[0] == obj['object']: |
1718 | objname = sel[1] |
1719 | break |
1720 | - |
1721 | + |
1722 | fileobj = TemporaryFile('w+') |
1723 | fileobj.write(base64.decodestring(obj['file'])) |
1724 | fileobj.seek(0) |
1725 | @@ -146,6 +142,71 @@ |
1726 | reader = csv.reader(fileobj, quotechar='"', delimiter=';') |
1727 | headers = [] |
1728 | |
1729 | + if impobj._name == 'product.product': |
1730 | + # Create the cache |
1731 | + if not hasattr(self, '_cache'): |
1732 | + self._cache = {} |
1733 | + self._cache.setdefault(dbname, {}) |
1734 | + |
1735 | + if not hasattr(self.pool.get('product.nomenclature'), '_cache'): |
1736 | + self.pool.get('product.nomenclature')._cache = {} |
1737 | + self.pool.get('product.nomenclature')._cache.setdefault(dbname, {}) |
1738 | + |
1739 | + # Clear the cache |
1740 | + self._cache[dbname] = {'product.nomenclature': {'name': {}, 'complete_name': {}}, |
1741 | + 'product.uom': {'name': {}}, |
1742 | + 'product.asset.type': {'name': {}}, |
1743 | + 'product.international.status': {'name': {}}, |
1744 | + } |
1745 | + # Product nomenclature |
1746 | + cr.execute('SELECT name, id FROM product_nomenclature;') |
1747 | + for nv in cr.dictfetchall(): |
1748 | + self._cache[dbname]['product.nomenclature']['name'].update({nv['name']: nv['id']}) |
1749 | + # Product category |
1750 | + cr.execute('SELECT id, family_id FROM product_category;') |
1751 | + for pc in cr.dictfetchall(): |
1752 | + self.pool.get('product.nomenclature')._cache[dbname].update({pc['family_id']: pc['id']}) |
1753 | + # Product nomenclature complete name |
1754 | + cr.execute('''SELECT id, name FROM |
1755 | +( |
1756 | +(SELECT |
1757 | + n0.id, n0.name AS name |
1758 | +FROM product_nomenclature n0 |
1759 | +WHERE n0.level = 0) |
1760 | +UNION |
1761 | +(SELECT n1.id, n0.name ||' | '|| n1.name AS name |
1762 | +FROM product_nomenclature n1 |
1763 | + LEFT JOIN product_nomenclature n0 ON n1.parent_id = n0.id |
1764 | +WHERE n1.level = 1) |
1765 | +UNION |
1766 | +(SELECT n2.id, n0.name ||' | '|| n1.name ||' | '|| n2.name AS name |
1767 | +FROM product_nomenclature n1 |
1768 | + LEFT JOIN product_nomenclature n0 ON n1.parent_id = n0.id |
1769 | + LEFT JOIN product_nomenclature n2 ON n2.parent_id = n1.id |
1770 | +WHERE n2.level = 2) |
1771 | +UNION |
1772 | +(SELECT n3.id, n0.name ||' | '|| n1.name ||' | '|| n2.name ||' | '|| n3.name AS name |
1773 | +FROM product_nomenclature n1 |
1774 | + LEFT JOIN product_nomenclature n0 ON n1.parent_id = n0.id |
1775 | + LEFT JOIN product_nomenclature n2 ON n2.parent_id = n1.id |
1776 | + LEFT JOIN product_nomenclature n3 ON n3.parent_id = n2.id |
1777 | +WHERE n3.level = 3) |
1778 | +) AS cn''') |
1779 | + for cnv in cr.dictfetchall(): |
1780 | + self._cache[dbname]['product.nomenclature']['complete_name'].update({cnv['name']: cnv['id']}) |
1781 | + # Product UoM |
1782 | + cr.execute('SELECT name, id FROM product_uom;') |
1783 | + for uv in cr.dictfetchall(): |
1784 | + self._cache[dbname]['product.uom']['name'].update({uv['name']: uv['id']}) |
1785 | + # Asset type |
1786 | + cr.execute('SELECT name, id FROM product_asset_type;') |
1787 | + for av in cr.dictfetchall(): |
1788 | + self._cache[dbname]['product.asset.type']['name'].update({av['name']: av['id']}) |
1789 | + # International status |
1790 | + cr.execute('SELECT name, id FROM product_international_status;') |
1791 | + for iv in cr.dictfetchall(): |
1792 | + self._cache[dbname]['product.international.status']['name'].update({iv['name']: iv['id']}) |
1793 | + |
1794 | errorfile = TemporaryFile('w+') |
1795 | writer = csv.writer(errorfile, quotechar='"', delimiter=';') |
1796 | |
1797 | @@ -160,11 +221,18 @@ |
1798 | def _get_obj(header, value, fields_def): |
1799 | list_obj = header.split('.') |
1800 | relation = fields_def[list_obj[0]]['relation'] |
1801 | + if impobj._name == 'product.product' and value in self._cache.get(dbname, {}).get(relation, {}).get(list_obj[1], {}): |
1802 | + return self._cache[dbname][relation][list_obj[1]][value] |
1803 | new_obj = self.pool.get(relation) |
1804 | newids = new_obj.search(cr, uid, [(list_obj[1], '=', value)], limit=1) |
1805 | if not newids: |
1806 | - # TODO: no obj |
1807 | + # no obj |
1808 | raise osv.except_osv(_('Warning !'), _('%s does not exist')%(value,)) |
1809 | + |
1810 | + if impobj._name == 'product.product': |
1811 | + self._cache[dbname].setdefault(relation, {}) |
1812 | + self._cache[dbname][relation].setdefault(list_obj[1], {}) |
1813 | + self._cache[dbname][relation][list_obj[1]][value] = newids[0] |
1814 | return newids[0] |
1815 | |
1816 | def process_data(field, value, fields_def): |
1817 | @@ -172,17 +240,23 @@ |
1818 | return |
1819 | if '.' not in field: |
1820 | # type datetime, date, bool, int, float |
1821 | - if value and fields_def[field]['type'] == 'boolean': |
1822 | + if fields_def[field]['type'] == 'boolean': |
1823 | value = value.lower() not in ('0', 'false', 'off','-', 'no', 'n') |
1824 | - elif value and fields_def[field]['type'] == 'selection': |
1825 | - for key, val in fields_def[field]['selection']: |
1826 | - if value.lower() in [tools.ustr(key).lower(), tools.ustr(val).lower()]: |
1827 | - value = key |
1828 | - break |
1829 | - elif value and fields_def[field]['type'] == 'date': |
1830 | + elif fields_def[field]['type'] == 'selection': |
1831 | + if impobj == 'product.product' and self._cache[dbname].get('product.product.%s.%s' % (field, value), False): |
1832 | + value = self._cache[dbname]['product.product.%s.%s' % (field, value)] |
1833 | + else: |
1834 | + for key, val in fields_def[field]['selection']: |
1835 | + if value.lower() in [tools.ustr(key).lower(), tools.ustr(val).lower()]: |
1836 | + value = key |
1837 | + if impobj == 'product.product': |
1838 | + self._cache[dbname].setdefault('product.product.%s' % field, {}) |
1839 | + self._cache[dbname]['product.product.%s.%s' % (field, value)] = key |
1840 | + break |
1841 | + elif fields_def[field]['type'] == 'date': |
1842 | dt = DateTime.strptime(value,"%d/%m/%Y") |
1843 | - value = dt.strftime("%Y-%m-%d") |
1844 | - elif value and fields_def[field]['type'] == 'float': |
1845 | + value = dt.strftime("%Y-%m-%d") |
1846 | + elif fields_def[field]['type'] == 'float': |
1847 | # remove space and unbreakable space |
1848 | value = re.sub('[Â ]+', '', value) |
1849 | value = float(value.replace(',', '.')) |
1850 | @@ -191,9 +265,9 @@ |
1851 | else: |
1852 | if fields_def[field.split('.')[0]]['type'] in 'many2one': |
1853 | return _get_obj(field, value, fields_def) |
1854 | - |
1855 | + |
1856 | raise osv.except_osv(_('Warning !'), _('%s does not exist')%(value,)) |
1857 | - |
1858 | + |
1859 | i = 1 |
1860 | nb_error = 0 |
1861 | nb_succes = 0 |
1862 | @@ -301,13 +375,13 @@ |
1863 | import_type = 'Import' |
1864 | if import_mode == 'update': |
1865 | import_type = 'Update' |
1866 | - summary = '''Datas Import Summary: |
1867 | + summary = '''Datas Import Summary: |
1868 | Object: %s |
1869 | Records updated: %s |
1870 | Records created: %s |
1871 | '''%(objname, nb_update_success, nb_succes) |
1872 | else: |
1873 | - summary = '''Datas Import Summary: |
1874 | + summary = '''Datas Import Summary: |
1875 | Object: %s |
1876 | Records created: %s |
1877 | '''%(objname, nb_succes) |
1878 | @@ -336,9 +410,14 @@ |
1879 | 'description': 'Rejected Lines', |
1880 | 'res_model': 'res.request', |
1881 | 'res_id': req_id, |
1882 | - 'datas': base64.encodestring(errorfile.read()), |
1883 | + 'datas': base64.encodestring(errorfile.read()), |
1884 | }) |
1885 | |
1886 | + if impobj == 'product.product': |
1887 | + # Clear the cache |
1888 | + self._cache[dbname] = {} |
1889 | + self.pool.get('product.nomenclature')._cache[dbname] = {} |
1890 | + |
1891 | errorfile.close() |
1892 | cr.commit() |
1893 | cr.close() |
1894 | |
1895 | === modified file 'kit/kit_creation.py' |
1896 | --- kit/kit_creation.py 2013-11-26 07:54:42 +0000 |
1897 | +++ kit/kit_creation.py 2014-03-21 08:49:45 +0000 |
1898 | @@ -1285,9 +1285,6 @@ |
1899 | 'original_from_process_stock_move': False, |
1900 | } |
1901 | |
1902 | - def write(self, cr, uid, ids, vals, context=None): |
1903 | - return super(stock_move, self).write(cr, uid, ids, vals, context=context) |
1904 | - |
1905 | def assign_to_kit(self, cr, uid, ids, context=None): |
1906 | ''' |
1907 | open the assign to kit wizard |
1908 | |
1909 | === modified file 'msf_audittrail/audittrail.py' |
1910 | --- msf_audittrail/audittrail.py 2014-03-12 08:56:42 +0000 |
1911 | +++ msf_audittrail/audittrail.py 2014-03-21 08:49:45 +0000 |
1912 | @@ -24,14 +24,14 @@ |
1913 | from osv.orm import orm_template |
1914 | from tools.translate import _ |
1915 | from lxml import etree |
1916 | -from datetime import * |
1917 | +from datetime import datetime |
1918 | import ir |
1919 | import pooler |
1920 | import time |
1921 | import tools |
1922 | import logging |
1923 | from tools.safe_eval import safe_eval as eval |
1924 | -import logging |
1925 | + |
1926 | |
1927 | class purchase_order(osv.osv): |
1928 | _name = 'purchase.order' |
1929 | @@ -40,6 +40,7 @@ |
1930 | |
1931 | purchase_order() |
1932 | |
1933 | + |
1934 | class purchase_order_line(osv.osv): |
1935 | _name = 'purchase.order.line' |
1936 | _inherit = 'purchase.order.line' |
1937 | @@ -47,6 +48,7 @@ |
1938 | |
1939 | purchase_order_line() |
1940 | |
1941 | + |
1942 | class sale_order(osv.osv): |
1943 | _name = 'sale.order' |
1944 | _inherit = 'sale.order' |
1945 | @@ -54,6 +56,7 @@ |
1946 | |
1947 | sale_order() |
1948 | |
1949 | + |
1950 | class sale_order_line(osv.osv): |
1951 | _name = 'sale.order.line' |
1952 | _inherit = 'sale.order.line' |
1953 | @@ -61,6 +64,7 @@ |
1954 | |
1955 | sale_order_line() |
1956 | |
1957 | + |
1958 | class stock_picking(osv.osv): |
1959 | _name = 'stock.picking' |
1960 | _inherit = 'stock.picking' |
1961 | @@ -68,6 +72,7 @@ |
1962 | |
1963 | stock_picking() |
1964 | |
1965 | + |
1966 | class stock_move(osv.osv): |
1967 | _name = 'stock.move' |
1968 | _inherit = 'stock.move' |
1969 | @@ -80,6 +85,7 @@ |
1970 | |
1971 | stock_move() |
1972 | |
1973 | + |
1974 | class account_invoice(osv.osv): |
1975 | _name = 'account.invoice' |
1976 | _inherit = 'account.invoice' |
1977 | @@ -87,6 +93,7 @@ |
1978 | |
1979 | account_invoice() |
1980 | |
1981 | + |
1982 | class account_invoice_line(osv.osv): |
1983 | _name = 'account.invoice.line' |
1984 | _inherit = 'account.invoice.line' |
1985 | @@ -94,6 +101,7 @@ |
1986 | |
1987 | account_invoice_line() |
1988 | |
1989 | + |
1990 | class account_bank_statement(osv.osv): |
1991 | _name = 'account.bank.statement' |
1992 | _inherit = 'account.bank.statement' |
1993 | @@ -101,6 +109,7 @@ |
1994 | |
1995 | account_bank_statement() |
1996 | |
1997 | + |
1998 | class account_bank_statement_line(osv.osv): |
1999 | _name = 'account.bank.statement.line' |
2000 | _inherit = 'account.bank.statement.line' |
2001 | @@ -149,6 +158,7 @@ |
2002 | |
2003 | account_analytic_account() |
2004 | |
2005 | + |
2006 | class account_period(osv.osv): |
2007 | _name = 'account.period' |
2008 | _inherit = 'account.period' |
2009 | @@ -167,13 +177,12 @@ |
2010 | res = super(ir_module, self).update_translations(cr, uid, ids, filter_lang=None, context=context) |
2011 | |
2012 | msf_profile_id = self.search(cr, uid, [('name', '=', 'msf_profile')], context=context) |
2013 | - |
2014 | + |
2015 | if not msf_profile_id or msf_profile_id[0] not in ids: |
2016 | return res |
2017 | |
2018 | tr_obj = self.pool.get('ir.translation') |
2019 | act_obj = self.pool.get('ir.actions.act_window') |
2020 | - language_obj = self.browse(cr, uid, ids)[0] |
2021 | src = 'Track changes' |
2022 | if not filter_lang: |
2023 | pool = pooler.get_pool(cr.dbname) |
2024 | @@ -194,11 +203,11 @@ |
2025 | # Search all actions to rename |
2026 | act_ids = act_obj.search(cr, uid, [('name', '=', src)], context=context) |
2027 | for act in act_ids: |
2028 | - exist = tr_obj.search(cr, uid, [('lang', '=', lang), |
2029 | - ('type', '=', 'model'), |
2030 | - ('src', '=', src), |
2031 | - ('name', '=', 'ir.actions.act_window,name'), |
2032 | - ('value', '=', trans), |
2033 | + exist = tr_obj.search(cr, uid, [('lang', '=', lang), |
2034 | + ('type', '=', 'model'), |
2035 | + ('src', '=', src), |
2036 | + ('name', '=', 'ir.actions.act_window,name'), |
2037 | + ('value', '=', trans), |
2038 | ('res_id', '=', act)], context=context) |
2039 | if not exist: |
2040 | tr_obj.create(cr, uid, {'lang': lang, |
2041 | @@ -224,6 +233,7 @@ |
2042 | |
2043 | audittrail_log_sequence() |
2044 | |
2045 | + |
2046 | class audittrail_rule(osv.osv): |
2047 | """ |
2048 | For Auddittrail Rule |
2049 | @@ -236,9 +246,9 @@ |
2050 | "log_read": fields.boolean("Log Reads", help="Select this if you want to keep track of read/open on any record of the object of this rule"), |
2051 | "log_write": fields.boolean("Log Writes", help="Select this if you want to keep track of modification on any record of the object of this rule"), |
2052 | "log_unlink": fields.boolean("Log Deletes", help="Select this if you want to keep track of deletion on any record of the object of this rule"), |
2053 | - "log_create": fields.boolean("Log Creates",help="Select this if you want to keep track of creation on any record of the object of this rule"), |
2054 | - "log_action": fields.boolean("Log Action",help="Select this if you want to keep track of actions on the object of this rule"), |
2055 | - "log_workflow": fields.boolean("Log Workflow",help="Select this if you want to keep track of workflow on any record of the object of this rule"), |
2056 | + "log_create": fields.boolean("Log Creates", help="Select this if you want to keep track of creation on any record of the object of this rule"), |
2057 | + "log_action": fields.boolean("Log Action", help="Select this if you want to keep track of actions on the object of this rule"), |
2058 | + "log_workflow": fields.boolean("Log Workflow", help="Select this if you want to keep track of workflow on any record of the object of this rule"), |
2059 | "domain_filter": fields.char(size=128, string="Domain", help="Python expression !"), |
2060 | "state": fields.selection((("draft", "Draft"), |
2061 | ("subscribed", "Subscribed")), |
2062 | @@ -257,15 +267,6 @@ |
2063 | 'domain_filter': [], |
2064 | } |
2065 | |
2066 | -# we replace the sql_constraint below by a Python constraint which checks that there is one type of rule per type. |
2067 | -# _sql_constraints = [ |
2068 | -# ('model_uniq', 'unique (object_id)', """There is a rule defined on this object\n You can not define other on the same!""") |
2069 | -# ] |
2070 | - |
2071 | - _sql_constraints = [ |
2072 | - ('rule_name_uniq', 'unique(name)', """The AuditTrail rule name must be unique!""") |
2073 | - ] |
2074 | - |
2075 | def _check_domain_filter(self, cr, uid, ids, context=None): |
2076 | """ |
2077 | Check that if you select cross docking, you do not have an other location than cross docking |
2078 | @@ -274,20 +275,39 @@ |
2079 | ids = [ids] |
2080 | if context is None: |
2081 | context = {} |
2082 | - |
2083 | + |
2084 | for rule in self.browse(cr, uid, ids, context=context): |
2085 | domain = eval(rule.domain_filter) |
2086 | for d in tuple(domain): |
2087 | if len(d[0].split('.')) > 2: |
2088 | return False |
2089 | - |
2090 | + |
2091 | return True |
2092 | - |
2093 | + |
2094 | + |
2095 | + _sql_constraints = [ |
2096 | + ('rule_name_uniq', 'unique(name)', """The AuditTrail rule name must be unique!""") |
2097 | + ] |
2098 | + |
2099 | _constraints = [ |
2100 | (_check_domain_filter, 'The domain shouldn\'t contain a right element in condition with more than 2 elements.', ['domain_filter']), |
2101 | ] |
2102 | + |
2103 | __functions = {} |
2104 | |
2105 | + |
2106 | + def write(self, cr, uid, ids, value, context=None): |
2107 | + if isinstance(ids, (int, long)): |
2108 | + ids = [ids] |
2109 | + for rule in self.browse(cr, uid, ids): |
2110 | + self.get_functionnal_fields.clear_cache(cr.dbname, objname=rule.object_id.model, ids=[rule.id]) |
2111 | + for method in ['read', 'create', 'write', 'unlink']: |
2112 | + field_name = 'log_' + method |
2113 | + if getattr(rule, field_name): |
2114 | + self.to_trace.clear_cache(cr.dbname, model=rule.object_id.model, method=method) |
2115 | + return super(audittrail_rule, self).write(cr, uid, ids, value, context=context) |
2116 | + |
2117 | + |
2118 | def subscribe(self, cr, uid, ids, *args): |
2119 | """ |
2120 | Subscribe Rule for auditing changes on object and apply shortcut for logs on that object. |
2121 | @@ -298,17 +318,17 @@ |
2122 | """ |
2123 | if isinstance(ids, (int, long)): |
2124 | ids = [ids] |
2125 | - |
2126 | + |
2127 | obj_action = self.pool.get('ir.actions.act_window') |
2128 | obj_model = self.pool.get('ir.model.data') |
2129 | - #start Loop |
2130 | + |
2131 | for thisrule in self.browse(cr, uid, ids): |
2132 | obj = self.pool.get(thisrule.object_id.model) |
2133 | if not obj: |
2134 | raise osv.except_osv( |
2135 | _('WARNING: audittrail is not part of the pool'), |
2136 | _('Change audittrail depends -- Setting rule as DRAFT')) |
2137 | - self.write(cr, uid, [thisrule.id], {"state": "draft"}) |
2138 | + |
2139 | search_view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'msf_audittrail', 'view_audittrail_log_line_search') |
2140 | val = { |
2141 | "name": _('Track changes'), |
2142 | @@ -326,8 +346,8 @@ |
2143 | keyword = 'client_action_relate' |
2144 | value = 'ir.actions.act_window,' + str(action_id) |
2145 | obj_model.ir_set(cr, uid, 'action', keyword, 'View_log_' + thisrule.object_id.model, [thisrule.object_id.model], value, replace=True, isobject=True, xml_id=False) |
2146 | - #End Loop |
2147 | - |
2148 | + # End Loop |
2149 | + |
2150 | # Check if an export model already exist for audittrail.rule |
2151 | export_ids = self.pool.get('ir.exports').search(cr, uid, [('name', '=', 'Log Lines'), ('resource', '=', 'audittrail.log.line')]) |
2152 | if not export_ids: |
2153 | @@ -335,7 +355,7 @@ |
2154 | 'resource': 'audittrail.log.line'}) |
2155 | fields = ['log', 'timestamp', 'sub_obj_name', 'method', 'field_description', 'old_value', 'new_value', 'user_id'] |
2156 | for f in fields: |
2157 | - self.pool.get('ir.exports.line').create(cr, uid, {'name': f, 'export_id': export_id}) |
2158 | + self.pool.get('ir.exports.line').create(cr, uid, {'name': f, 'export_id': export_id}) |
2159 | |
2160 | return True |
2161 | |
2162 | @@ -349,8 +369,8 @@ |
2163 | """ |
2164 | obj_action = self.pool.get('ir.actions.act_window') |
2165 | val_obj = self.pool.get('ir.values') |
2166 | - value='' |
2167 | - #start Loop |
2168 | + value = '' |
2169 | + # start Loop |
2170 | for thisrule in self.browse(cr, uid, ids): |
2171 | if thisrule.id in self.__functions: |
2172 | for function in self.__functions[thisrule.id]: |
2173 | @@ -363,10 +383,182 @@ |
2174 | if val_id: |
2175 | ir.ir_del(cr, uid, val_id[0]) |
2176 | self.write(cr, uid, [thisrule.id], {"state": "draft"}) |
2177 | - #End Loop |
2178 | - |
2179 | + # End Loop |
2180 | + |
2181 | return True |
2182 | |
2183 | + @tools.cache(skiparg=3) |
2184 | + def get_functionnal_fields(self, cr, uid, objname, ids): |
2185 | + # no context to not disturb caching |
2186 | + fields_obj = self.pool.get('ir.model.fields') |
2187 | + fields_ids = fields_obj.search(cr, uid, [('audittrail_rule_ids', 'in', ids)]) |
2188 | + if fields_ids: |
2189 | + ret = [] |
2190 | + obj = self.pool.get(objname) |
2191 | + for field in fields_obj.read(cr, uid, fields_ids, ['name']): |
2192 | + col = obj._all_columns[field['name']].column |
2193 | + if col._properties and not col._classic_write: |
2194 | + ret.append(field['name']) |
2195 | + return ret |
2196 | + return [] |
2197 | + |
2198 | + @tools.cache(skiparg=3) |
2199 | + def to_trace(self, cr, uid, model, method): |
2200 | + obj = self.pool.get(model) |
2201 | + if not obj or not obj._trace: |
2202 | + return False |
2203 | + |
2204 | + log_field = 'log_' + method |
2205 | + return self.search(cr, 1, [('object_id.model', '=', model), (log_field, '=', True), ('state', '=', 'subscribed')]) |
2206 | + |
2207 | + |
2208 | + def audit_log(self, cr, uid, ids, obj, objids, method, previous_value=None, current=None, context=None): |
2209 | + uid_orig = uid |
2210 | + uid = 1 |
2211 | + log_line_obj = self.pool.get('audittrail.log.line') |
2212 | + |
2213 | + if isinstance(objids, (int, long)): |
2214 | + obj_ids = [objids] |
2215 | + previous = [previous_value] |
2216 | + else: |
2217 | + obj_ids = objids[:] |
2218 | + previous = previous_value |
2219 | + |
2220 | + for rule in self.browse(cr, uid, ids, context=context): |
2221 | + if not obj_ids: |
2222 | + # if a previous rule has been applied, stop the log |
2223 | + # i.e: if multiple rules a set for a an object, only the 1st is applied |
2224 | + return True |
2225 | + domain = [] |
2226 | + if rule.domain_filter: |
2227 | + domain = eval(rule.domain_filter) |
2228 | + if domain: |
2229 | + new_dom = ['&', ('id', 'in', obj_ids)] + domain |
2230 | + res_ids = obj.search(cr, uid, new_dom) |
2231 | + if not res_ids: |
2232 | + continue |
2233 | + |
2234 | + # test next rule on res_ids exluded by the rule domain |
2235 | + obj_ids = [x for x in obj_ids if x not in res_ids] |
2236 | + else: |
2237 | + res_ids = obj_ids[:] |
2238 | + obj_ids = [] |
2239 | + |
2240 | + model_name_tolog = rule.object_id.model |
2241 | + parent_field = False |
2242 | + if rule.parent_field_id: |
2243 | + parent_field_display = rule.name_get_field_id.name |
2244 | + parent_field = rule.parent_field_id.name |
2245 | + model_name_tolog = rule.parent_field_id.relation |
2246 | + model_parent_id = self.pool.get('ir.model').search(cr, uid, [('model', '=', model_name_tolog)])[0] |
2247 | + |
2248 | + if method in ('write', 'create'): |
2249 | + original_fields = current.values()[0].keys() |
2250 | + fields_to_trace = {} |
2251 | + |
2252 | + for field in rule.field_ids: |
2253 | + if field.name in original_fields or field.is_function: |
2254 | + fields_to_trace[field.name] = field |
2255 | + |
2256 | + if method != 'create' and not fields_to_trace: |
2257 | + # no changes written in field to trace |
2258 | + continue |
2259 | + |
2260 | + new_values_computed = {} |
2261 | + if parent_field: |
2262 | + new_values_computed = dict((x['id'], x) for x in obj.read(cr, uid, res_ids, [parent_field, parent_field_display], context=context)) |
2263 | + |
2264 | + |
2265 | + for res_id in res_ids: |
2266 | + parent_field_id = False |
2267 | + if parent_field: |
2268 | + parent_field_id = new_values_computed[res_id][parent_field][0] |
2269 | + |
2270 | + vals = { |
2271 | + 'name': rule.object_id.name, |
2272 | + 'method': method, |
2273 | + 'object_id': rule.object_id.id, |
2274 | + 'user_id': uid_orig, |
2275 | + 'res_id': parent_field_id or res_id, |
2276 | + } |
2277 | + |
2278 | + # Add the name of the created sub-object |
2279 | + if parent_field_id: |
2280 | + # get the parent model_id |
2281 | + vals.update({ |
2282 | + 'sub_obj_name': new_values_computed[res_id][parent_field_display], |
2283 | + 'rule_id': rule.id, |
2284 | + 'fct_object_id': rule.object_id.id, |
2285 | + 'object_id': model_parent_id, |
2286 | + 'fct_res_id': res_id |
2287 | + }) |
2288 | + if method == 'unlink': |
2289 | + vals.update({ |
2290 | + 'field_description': get_field_description(rule.object_id), |
2291 | + 'log': self.get_sequence(cr, uid, model_name_tolog, vals['res_id'], context=context), |
2292 | + }) |
2293 | + log_line_obj.create(cr, uid, vals) |
2294 | + |
2295 | + elif method in ('write', 'create'): |
2296 | + if method == 'create': |
2297 | + vals.update({ |
2298 | + 'log': self.get_sequence(cr, uid, model_name_tolog, vals['res_id'], context=context), |
2299 | + 'field_description': get_field_description(rule.object_id), |
2300 | + }) |
2301 | + log_line_obj.create(cr, uid, vals) |
2302 | + if method == 'write': |
2303 | + previous_values = dict((x['id'], x) for x in previous) |
2304 | + record = previous_values[res_id] |
2305 | + else: |
2306 | + record = {} |
2307 | + |
2308 | + for field in fields_to_trace.keys(): |
2309 | + old_value = record.get(field, False) |
2310 | + new_value = current[res_id].get(field, False) |
2311 | + if old_value != new_value: |
2312 | + if fields_to_trace[field].ttype == 'datetime' and old_value and new_value and old_value[:10] == new_value[:10]: |
2313 | + continue |
2314 | + line = vals.copy() |
2315 | + description = fields_to_trace[field].field_description |
2316 | + # UTP-360 |
2317 | + if description == 'Pricelist': |
2318 | + description = 'Currency' |
2319 | + line.update({ |
2320 | + 'field_id': fields_to_trace[field].id, |
2321 | + 'field_description': description, |
2322 | + 'log': self.get_sequence(cr, uid, model_name_tolog, vals['res_id'], context=context), |
2323 | + 'name': field, |
2324 | + 'new_value': new_value, |
2325 | + 'old_value': old_value, |
2326 | + }) |
2327 | + log_line_obj.create(cr, uid, line) |
2328 | + |
2329 | + def get_sequence(self, cr, uid, obj_name, res_id, context=None): |
2330 | + log_seq_obj = self.pool.get('audittrail.log.sequence') |
2331 | + log_sequence = log_seq_obj.search(cr, uid, [('model', '=', obj_name), ('res_id', '=', res_id)]) |
2332 | + if log_sequence: |
2333 | + log_seq = log_seq_obj.browse(cr, uid, log_sequence[0]).sequence |
2334 | + log = log_seq.get_id(code_or_id='id') |
2335 | + else: |
2336 | + # Create a new sequence |
2337 | + seq_pool = self.pool.get('ir.sequence') |
2338 | + seq_typ_pool = self.pool.get('ir.sequence.type') |
2339 | + types = { |
2340 | + 'name': obj_name, |
2341 | + 'code': obj_name, |
2342 | + } |
2343 | + seq_typ_pool.create(cr, uid, types) |
2344 | + seq = { |
2345 | + 'name': obj_name, |
2346 | + 'code': obj_name, |
2347 | + 'prefix': '', |
2348 | + 'padding': 1, |
2349 | + } |
2350 | + seq_id = seq_pool.create(cr, uid, seq) |
2351 | + log_seq_obj.create(cr, uid, {'model': obj_name, 'res_id': res_id, 'sequence': seq_id}) |
2352 | + log = seq_pool.browse(cr, uid, seq_id).get_id(code_or_id='id') |
2353 | + return log |
2354 | + |
2355 | audittrail_rule() |
2356 | |
2357 | |
2358 | @@ -378,61 +570,12 @@ |
2359 | _description = "Log Line" |
2360 | _order = 'timestamp asc' |
2361 | |
2362 | - def _get_name_line(self, cr, uid, ids, field_name, args, context=None): |
2363 | - ''' |
2364 | - Return the value of the field set in the rule |
2365 | - ''' |
2366 | - res = {} |
2367 | - |
2368 | - for line in self.browse(cr, uid, ids, context=context): |
2369 | - if not line.rule_id or not line.fct_res_id or not line.fct_object_id: |
2370 | - res[line.id] = False |
2371 | - else: |
2372 | - field = line.rule_id.name_get_field_id.name |
2373 | - res_id = line.fct_res_id |
2374 | - object_id = self.pool.get(line.fct_object_id.model) |
2375 | - try: |
2376 | - res[line.id] = object_id.read(cr, uid, res_id, [field], context=context)[field] |
2377 | - except TypeError: |
2378 | - res[line.id] = False |
2379 | - |
2380 | - return res |
2381 | - |
2382 | - #### |
2383 | - # TODO : To validate |
2384 | - #### |
2385 | - def _search_name_line(self, cr, uid, obj, name, args, context=None): |
2386 | - ''' |
2387 | - Returns all lines corresponding to the args |
2388 | - ''' |
2389 | - ids = [] |
2390 | - |
2391 | - if not context: |
2392 | - return [] |
2393 | - |
2394 | - for arg in args: |
2395 | - if not arg[2]: |
2396 | - return [] |
2397 | - if arg[0] == 'sub_obj_name' and arg[1] == 'ilike' and arg[2]: |
2398 | - line_ids = self.browse(cr, uid, context.get('active_ids'), context=context) |
2399 | - for line in line_ids: |
2400 | - if line.rule_id and line.fct_res_id and line.fct_object_id: |
2401 | - field = line.rule_id.name_get_field_id.name |
2402 | - res_id = line.fct_res_id |
2403 | - object_id = self.pool.get(line.fct_object_id.model) |
2404 | - if str(object_id.read(cr, uid, res_id, [field], context=context)[field]) == arg[2]: |
2405 | - ids.append(line.id) |
2406 | - |
2407 | - return [('id', 'in', ids)] |
2408 | - |
2409 | - return [] |
2410 | - |
2411 | def _get_values(self, cr, uid, ids, field_name, arg, context=None): |
2412 | ''' |
2413 | Return the value of the field according to his type |
2414 | ''' |
2415 | res = {} |
2416 | - |
2417 | + |
2418 | for line in self.browse(cr, uid, ids, context=context): |
2419 | res[line.id] = {'old_value_fct': False, 'new_value_fct': False} |
2420 | if not line.old_value_text: |
2421 | @@ -443,14 +586,14 @@ |
2422 | res[line.id]['new_value_fct'] = get_value_text(self, cr, uid, line.field_id.id, False, line.new_value, line.fct_object_id or line.object_id, context=context) |
2423 | else: |
2424 | res[line.id]['new_value_fct'] = line.new_value_text |
2425 | - |
2426 | + |
2427 | if not line.old_value_text and not line.new_value_text: |
2428 | self.write(cr, uid, [line.id], {'old_value_text': res[line.id]['old_value_fct'], 'new_value_text': res[line.id]['new_value_fct']}) |
2429 | elif not line.old_value_text: |
2430 | - self.write(cr, uid, [line.id], {'old_value_text': res[line.id]['old_value_fct'],}) |
2431 | + self.write(cr, uid, [line.id], {'old_value_text': res[line.id]['old_value_fct'], }) |
2432 | elif not line.new_value_text: |
2433 | - self.write(cr, uid, [line.id], {'new_value_text': res[line.id]['new_value_fct'],}) |
2434 | - |
2435 | + self.write(cr, uid, [line.id], {'new_value_text': res[line.id]['new_value_fct'], }) |
2436 | + |
2437 | return res |
2438 | |
2439 | def _get_field_name(self, cr, uid, ids, field_name, arg, context=None): |
2440 | @@ -511,7 +654,7 @@ |
2441 | |
2442 | for arg in args: |
2443 | if arg[0] == 'trans_field_description': |
2444 | - tr_fields = tr_obj.search(cr, uid, [('lang', '=', lang), |
2445 | + tr_fields = tr_obj.search(cr, uid, [('lang', '=', lang), |
2446 | ('type', 'in', ['field', 'model']), |
2447 | ('value', arg[1], arg[2])], context=context) |
2448 | |
2449 | @@ -542,27 +685,15 @@ |
2450 | 'field_description': fields.char('Field Description', size=64), |
2451 | 'trans_field_description': fields.function(_get_field_name, fnct_search=_src_field_name, method=True, type='char', size=64, string='Field Description', store=False), |
2452 | 'sub_obj_name': fields.char(size=64, string='Order line'), |
2453 | -# 'sub_obj_name': fields.function(fnct=_get_name_line, fnct_search=_search_name_line, method=True, type='char', string='Order line', store=False), |
2454 | # These 3 fields allows the computation of the name of the subobject (sub_obj_name) |
2455 | 'rule_id': fields.many2one('audittrail.rule', string='Rule'), |
2456 | 'fct_res_id': fields.integer(string='Res. Id'), |
2457 | 'fct_object_id': fields.many2one('ir.model', string='Fct. Object'), |
2458 | } |
2459 | |
2460 | - def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): |
2461 | - ''' |
2462 | - Display the name of the resource on the tree view |
2463 | - ''' |
2464 | - res = super(osv.osv, self).fields_view_get(cr, uid, view_id, view_type, context=context, toolbar=toolbar, submenu=submenu) |
2465 | - # TODO: Waiting OEB-86 |
2466 | -# if view_type == 'tree' and context.get('active_ids') and context.get('active_model'): |
2467 | -# element_name = self.pool.get(context.get('active_model')).name_get(cr, uid, context.get('active_ids'), context=context)[0][1] |
2468 | -# xml_view = etree.fromstring(res['arch']) |
2469 | -# for element in xml_view.iter("tree"): |
2470 | -# element.set('string', element_name) |
2471 | -# res['arch'] = etree.tostring(xml_view) |
2472 | - return res |
2473 | - |
2474 | + _defaults = { |
2475 | + 'timestamp': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), |
2476 | + } |
2477 | def _get_report_name(self, cr, uid, ids, context=None): |
2478 | if isinstance(ids, (int, long)): |
2479 | ids = [ids] |
2480 | @@ -574,11 +705,18 @@ |
2481 | if obj: |
2482 | name_get = obj.name_get(cr, uid, [self_info.res_id]) |
2483 | if name_get and name_get[0]: |
2484 | - name = name_get[0][1].replace('/','_') |
2485 | + name = name_get[0][1].replace('/', '_') |
2486 | return "LL_%s_%s" % (name, time.strftime('%Y%m%d')) |
2487 | |
2488 | audittrail_log_line() |
2489 | |
2490 | +class ir_model_fields(osv.osv): |
2491 | + _name = 'ir.model.fields' |
2492 | + _inherit = 'ir.model.fields' |
2493 | + _columns = { |
2494 | + 'audittrail_rule_ids': fields.many2many('audittrail.rule', 'audit_rule_field_rel', 'field_id', 'rule_id', string='Audit rules'), |
2495 | + } |
2496 | +ir_model_fields() |
2497 | |
2498 | def get_value_text(self, cr, uid, field_id, field_name, values, model, context=None): |
2499 | """ |
2500 | @@ -593,7 +731,7 @@ |
2501 | """ |
2502 | if not context: |
2503 | context = {} |
2504 | - if field_name in('__last_update','id'): |
2505 | + if field_name in('__last_update', 'id'): |
2506 | return values |
2507 | pool = pooler.get_pool(cr.dbname) |
2508 | field_pool = pool.get('ir.model.fields') |
2509 | @@ -624,7 +762,7 @@ |
2510 | res = relation_model_object[relation_model_pool._rec_name] |
2511 | return res |
2512 | |
2513 | - elif field['ttype'] in ('many2many','one2many'): |
2514 | + elif field['ttype'] in ('many2many', 'one2many'): |
2515 | res = [] |
2516 | if values and values != '[]': |
2517 | values = values[1:-1].split(',') |
2518 | @@ -644,7 +782,6 @@ |
2519 | if values: |
2520 | # Display only the date on log line (Comment the next line and uncomment the next one if you want display the time) |
2521 | date_format = self.pool.get('date.tools').get_date_format(cr, uid, context=context) |
2522 | - #date_format = self.pool.get('date.tools').get_datetime_format(cr, uid, context=context) |
2523 | try: |
2524 | res = datetime.strptime(values, '%Y-%m-%d %H:%M:%S') |
2525 | except ValueError: |
2526 | @@ -660,532 +797,19 @@ |
2527 | res = dict(sel[field['name']]['selection']).get(values) |
2528 | name = '%s,%s' % (fct_object, field['name']) |
2529 | # Search translation |
2530 | - res_tr_ids = self.pool.get('ir.translation').search(cr, uid, [('type', '=', 'selection'), ('name', '=', name),('src', 'in', [values])]) |
2531 | + res_tr_ids = self.pool.get('ir.translation').search(cr, uid, [('type', '=', 'selection'), ('name', '=', name), ('src', 'in', [values])]) |
2532 | if res_tr_ids: |
2533 | res = self.pool.get('ir.translation').read(cr, uid, res_tr_ids, ['value'])[0]['value'] |
2534 | return res |
2535 | |
2536 | return values |
2537 | |
2538 | -def create_log_line(self, cr, uid, model, lines=[]): |
2539 | - """ |
2540 | - Creates lines for changed fields with its old and new values |
2541 | - |
2542 | - @param cr: the current row, from the database cursor, |
2543 | - @param uid: the current user’s ID for security checks, |
2544 | - @param model: Object who's values are being changed |
2545 | - @param lines: List of values for line is to be created |
2546 | - """ |
2547 | - pool = pooler.get_pool(cr.dbname) |
2548 | - obj_pool = pool.get(model.model) |
2549 | - model_pool = pool.get('ir.model') |
2550 | - field_pool = pool.get('ir.model.fields') |
2551 | - log_line_pool = pool.get('audittrail.log.line') |
2552 | - #start Loop |
2553 | - for line in lines: |
2554 | - dict_of_values = {} |
2555 | - if line['name'] in('__last_update','id'): |
2556 | - continue |
2557 | - if obj_pool._inherits: |
2558 | - inherits_ids = model_pool.search(cr, uid, [('model', '=', obj_pool._inherits.keys()[0])]) |
2559 | - field_ids = field_pool.search(cr, uid, [('name', '=', line['name']), ('model_id', 'in', (model.id, inherits_ids[0]))]) |
2560 | - else: |
2561 | - field_ids = field_pool.search(cr, uid, [('name', '=', line['name']), ('model_id', '=', model.id)]) |
2562 | - field_id = field_ids and field_ids[0] or False |
2563 | - |
2564 | - if field_id: |
2565 | - field = field_pool.read(cr, uid, field_id) |
2566 | - if field['ttype'] == 'selection': |
2567 | - # if we have a fields.selection, we want to evaluate the 2nd part of the tuple which is user readable |
2568 | - try: |
2569 | - dict_of_values = dict(self.pool.get(field['model'])._columns[line['name']].selection) |
2570 | - except TypeError as e: |
2571 | - logging.getLogger('Track changes').warning("""Can\'t track changes for the field %s of the model %s. Error is %s""" |
2572 | - % (line['name'], model.name, e)) |
2573 | - |
2574 | - # Get the values |
2575 | - old_value = line.get('old_value') |
2576 | - new_value = line.get('new_value') |
2577 | - method = line.get('method') |
2578 | - |
2579 | -# if old_value == new_value and method not in ('create', 'unlink'): |
2580 | -# continue |
2581 | - # the check below is for the case where we have empty fields but with different types (i.e. transport_type that was comparing a unicode and a boolean) |
2582 | - if not old_value: |
2583 | - old_value = False |
2584 | - if not new_value: |
2585 | - new_value = False |
2586 | - if new_value == old_value: |
2587 | - continue # nothing has changed, nothing to log |
2588 | - |
2589 | - # for the many2one field, we compare old_value and new_value with the name (uf_1624), so the 2nd part of the tupe (old_value[1] == new_value[1]) |
2590 | - if method not in ('create', 'unlink') and (old_value == new_value \ |
2591 | - or (field['ttype'] == 'datetime' and old_value and new_value and old_value[:10] == new_value[:10])\ |
2592 | - or (field['ttype'] == 'many2one' and old_value and new_value and old_value[1] == new_value[1])\ |
2593 | - or (field['ttype'] == 'selection' and old_value and new_value and dict_of_values.get(old_value) == dict_of_values.get(new_value))): |
2594 | - continue |
2595 | - |
2596 | - res_id = line.get('res_id') |
2597 | - name = line.get('name', '') |
2598 | - object_id = line.get('object_id') |
2599 | - user_id = line.get('user_id') |
2600 | - timestamp = line.get('timestamp', time.strftime('%Y-%m-%d %H:%M:%S')) |
2601 | - log = line.get('log') |
2602 | - field_description = line.get('field_description', '') |
2603 | - sub_obj_name = line.get('sub_obj_name', '') |
2604 | - rule_id = line.get('rule_id') |
2605 | - fct_res_id = line.get('fct_res_id') |
2606 | - fct_object_id = line.get('fct_object_id') |
2607 | - |
2608 | - if res_id: |
2609 | - # Get the log number |
2610 | - seq_object_id = object_id |
2611 | - seq_res_id = res_id |
2612 | - fct_object = self.pool.get('ir.model').browse(cr, uid, seq_object_id) |
2613 | - log_sequence = self.pool.get('audittrail.log.sequence').search(cr, uid, [('model', '=', fct_object.model), ('res_id', '=', seq_res_id)]) |
2614 | - if log_sequence: |
2615 | - log_seq = self.pool.get('audittrail.log.sequence').browse(cr, uid, log_sequence[0]).sequence |
2616 | - log = log_seq.get_id(code_or_id='id') |
2617 | - else: |
2618 | - # Create a new sequence |
2619 | - seq_pool = self.pool.get('ir.sequence') |
2620 | - seq_typ_pool = self.pool.get('ir.sequence.type') |
2621 | - types = { |
2622 | - 'name': fct_object.name, |
2623 | - 'code': fct_object.model, |
2624 | - } |
2625 | - seq_typ_pool.create(cr, uid, types) |
2626 | - seq = { |
2627 | - 'name': fct_object.name, |
2628 | - 'code': fct_object.model, |
2629 | - 'prefix': '', |
2630 | - 'padding': 1, |
2631 | - } |
2632 | - seq_id = seq_pool.create(cr, uid, seq) |
2633 | - self.pool.get('audittrail.log.sequence').create(cr, uid, {'model': fct_object.model, 'res_id': seq_res_id, 'sequence': seq_id}) |
2634 | - log = self.pool.get('ir.sequence').browse(cr, uid, seq_id).get_id(code_or_id='id') |
2635 | - |
2636 | - |
2637 | - if field_id: |
2638 | - field_description = field['field_description'] |
2639 | - if field_description == 'Pricelist': |
2640 | - field_description = 'Currency' |
2641 | - |
2642 | -# if field['ttype'] == 'many2one': |
2643 | -# if type(old_value) == tuple: |
2644 | -# old_value = old_value[0] |
2645 | -# if type(new_value) == tuple: |
2646 | -# new_value = new_value[0] |
2647 | - |
2648 | - |
2649 | - vals = { |
2650 | - "field_id": field_id, |
2651 | - "old_value": old_value or '', |
2652 | - "new_value": new_value or '', |
2653 | - "field_description": field_description, |
2654 | - "res_id": res_id, |
2655 | - "name": name, |
2656 | - "object_id": object_id, |
2657 | - "user_id": user_id, |
2658 | - "method": method, |
2659 | - "timestamp": timestamp, |
2660 | - "log": log, |
2661 | - "rule_id": rule_id, |
2662 | - "fct_res_id": fct_res_id, |
2663 | - "fct_object_id": fct_object_id, |
2664 | - "sub_obj_name": sub_obj_name, |
2665 | - } |
2666 | - log_line_pool.create(cr, uid, vals) |
2667 | - #End Loop |
2668 | - return True |
2669 | - |
2670 | -def _get_domain_fields(self, domain=[]): |
2671 | - ''' |
2672 | - Returns fields to read from the domain |
2673 | - ''' |
2674 | - ret_f = [] |
2675 | - for d in domain: |
2676 | - ret_f.append(d[0]) |
2677 | - |
2678 | - return ret_f |
2679 | - |
2680 | -def _check_domain(self, cr, uid, vals=[], domain=[], model=False, res_id=False): |
2681 | - ''' |
2682 | - Check if the values check with the domain |
2683 | - ''' |
2684 | - res = True |
2685 | - pool = pooler.get_pool(cr.dbname) |
2686 | - for d in tuple(domain): |
2687 | - assert d[1] in ('=', '!=', 'in', 'not in'), _("'%s' Not comprehensive operator... Please use only '=', '!=', 'in' and 'not in' operators") %(d[1]) |
2688 | - |
2689 | - if len(d[0].split('.')) == 2 and model: |
2690 | - p_rel, p_field = d[0].split('.') |
2691 | - parent_field_id = pool.get('ir.model.fields').search(cr, uid, [('model', '=', model.model), ('name', '=', p_rel)]) |
2692 | - parent_field = pool.get('ir.model.fields').browse(cr, uid, parent_field_id) |
2693 | - if not vals.get(p_rel) and res_id: |
2694 | - vals[d[0]] = self.pool.get(model.model).read(cr, uid, res_id, [p_rel])[p_rel] |
2695 | - if parent_field and parent_field[0].relation and vals.get(p_rel): |
2696 | - if isinstance(vals[p_rel], (int, long)): |
2697 | - p_rel_id = vals[p_rel] |
2698 | - else: |
2699 | - p_rel_id = vals[p_rel][0] |
2700 | - value = pool.get(parent_field[0].relation).read(cr, uid, p_rel_id, [p_field]) |
2701 | - if value: |
2702 | - d = (p_field, d[1], d[2]) |
2703 | - vals[p_field] = value[p_field] |
2704 | - |
2705 | - if d[0] not in vals and model and res_id: |
2706 | - obj = self.pool.get(model.model).read(cr, uid, res_id, [d[0]]) |
2707 | - vals[d[0]] = obj[d[0]] |
2708 | - |
2709 | - if d[1] == '=' and vals[d[0]] != d[2]: |
2710 | - res = False |
2711 | - elif d[1] == '!=' and vals[d[0]] == d[2]: |
2712 | - res = False |
2713 | - elif d[1] == 'in' and vals[d[0]] not in d[2]: |
2714 | - res = False |
2715 | - elif d[1] == 'not in' and vals[d[0]] in d[2]: |
2716 | - res = False |
2717 | - |
2718 | - return res |
2719 | - |
2720 | def get_field_description(model): |
2721 | """ |
2722 | Redefine the field_description for sale order and sale order line |
2723 | """ |
2724 | - if model.model == 'sale.order': |
2725 | - field_description = 'Field Order' |
2726 | - elif model.model == 'sale.order.line': |
2727 | - field_description = 'Field Order Line' |
2728 | - elif model.model== 'stock.picking': |
2729 | - field_description = 'Incoming Shipment' |
2730 | - else: |
2731 | - field_description = model.name |
2732 | - return field_description |
2733 | - |
2734 | -def log_fct(self, cr, uid, model, method, fct_src, fields_to_trace=None, rule_id=False, parent_field_id=False, name_get_field='name', domain='[]', *args, **kwargs): |
2735 | - """ |
2736 | - Logging function: This function is performs logging operations according to method |
2737 | - @param cr: the current database |
2738 | - @param uid: the current user’s ID for security checks, |
2739 | - @param model: Object who's values are being changed |
2740 | - @param method: method to log: create, read, write, unlink |
2741 | - @param fct_src: execute method of Object proxy |
2742 | - |
2743 | - @return: Returns result as per method of Object proxy |
2744 | - """ |
2745 | - if not fields_to_trace: |
2746 | - fields_to_trace = [] |
2747 | - uid_orig = uid |
2748 | - uid = 1 |
2749 | - pool = pooler.get_pool(cr.dbname) |
2750 | - resource_pool = pool.get(model) |
2751 | - model_pool = pool.get('ir.model') |
2752 | - |
2753 | - model_ids = model_pool.search(cr, uid, [('model', '=', model)]) |
2754 | - model_id = model_ids and model_ids[0] or False |
2755 | - assert model_id, _("'%s' Model does not exist...") %(model,) |
2756 | - model = model_pool.browse(cr, uid, model_id) |
2757 | - domain = eval(domain) |
2758 | - fields_to_read = ['id'] |
2759 | - |
2760 | - old_values = {} |
2761 | - if method in ('create'): |
2762 | - res_id = fct_src(self, *args, **kwargs) |
2763 | - |
2764 | - # If the object doesn't match with the domain |
2765 | - if domain and not _check_domain(self, cr, uid, args[2], domain, model, res_id): |
2766 | - return res_id |
2767 | - |
2768 | - model_id = model.id |
2769 | - model_name = model.name |
2770 | - # If we are on the children object, escalate to the parent log |
2771 | - if parent_field_id: |
2772 | - parent_field = pool.get('ir.model.fields').browse(cr, uid, parent_field_id) |
2773 | - model_id = model_pool.search(cr, uid, [('model', '=', parent_field.relation)]) |
2774 | - if not model_id or not args[2].get(parent_field.name, False): |
2775 | - return res_id |
2776 | - else: |
2777 | - model_id = model_id[0] |
2778 | - model_name = parent_field.model_id.name |
2779 | - resource = resource_pool.read(cr, uid, res_id, [parent_field.name, name_get_field or 'name']) |
2780 | - res_id2 = resource[parent_field.name][0] |
2781 | - else: |
2782 | - res_id2 = res_id |
2783 | - |
2784 | - vals = { |
2785 | - "name": '%s' %model.name, |
2786 | - "method": method, |
2787 | - "object_id": model_id, |
2788 | - "user_id": uid_orig, |
2789 | - "res_id": res_id2, |
2790 | - "field_description": get_field_description(model), |
2791 | - } |
2792 | - |
2793 | - #Â Add the name of the created sub-object |
2794 | - if parent_field_id: |
2795 | - vals.update({'sub_obj_name': resource[name_get_field or 'name'], |
2796 | - 'rule_id': rule_id, |
2797 | - 'fct_object_id': model.id, |
2798 | - 'fct_res_id': res_id}) |
2799 | - |
2800 | - # We create only one line on creation (not one line by field) |
2801 | - create_log_line(self, cr, uid, model, [vals]) |
2802 | - |
2803 | - # Get new values |
2804 | - if res_id and fields_to_trace: |
2805 | - resource = resource_pool.read(cr, uid, res_id, fields_to_trace) |
2806 | - if 'id' in resource: |
2807 | - del resource['id'] |
2808 | - |
2809 | - # now we create one line for each field tracked |
2810 | - lines = [] |
2811 | - for field in resource.keys(): |
2812 | - line = vals.copy() |
2813 | - line.update({ |
2814 | - 'name': field, |
2815 | - 'new_value': resource[field], |
2816 | - }) |
2817 | - lines.append(line) |
2818 | - |
2819 | - create_log_line(self, cr, uid, model, lines) |
2820 | - |
2821 | - return res_id |
2822 | - |
2823 | - elif method in ('unlink'): |
2824 | - res_ids = [] |
2825 | - if isinstance(args[2], (int, long)): |
2826 | - res_ids = [args[2]] |
2827 | - else: |
2828 | - res_ids = list(args[2]) |
2829 | - model_name = model.name |
2830 | - model_id = model.id |
2831 | - fields_to_read = [name_get_field, 'name'] |
2832 | - fields_to_read.extend(_get_domain_fields(self, domain)) |
2833 | - |
2834 | - if parent_field_id: |
2835 | - parent_field = pool.get('ir.model.fields').browse(cr, uid, parent_field_id) |
2836 | - model_id = model_pool.search(cr, uid, [('model', '=', parent_field.relation)]) |
2837 | - # If the parent object is not a valid object |
2838 | - if not model_id: |
2839 | - return fct_src(self, *args, **kwargs) |
2840 | - else: |
2841 | - model_id = model_id[0] |
2842 | - model_name = parent_field.model_id.name |
2843 | - |
2844 | - for res_id in res_ids: |
2845 | - old_values[res_id] = resource_pool.read(cr, uid, res_id, fields_to_read) |
2846 | - # If the object doesn't match with the domain |
2847 | - if domain and not _check_domain(self, cr, uid, old_values[res_id], domain, model, res_id): |
2848 | - res_ids.pop(res_ids.index(res_id)) |
2849 | - continue |
2850 | - if model_name == 'Sales Order': |
2851 | - model_name = 'Field Order' |
2852 | - elif model_name == 'Sales Order Line': |
2853 | - model_name = 'Field Order Line' |
2854 | - elif model_name == 'Picking List': |
2855 | - model_name = 'Incoming Shipment' |
2856 | - vals = { |
2857 | - "name": "%s" %model_name, |
2858 | - "method": method, |
2859 | - "object_id": model_id, |
2860 | - "user_id": uid_orig, |
2861 | - "field_description": model_name, |
2862 | - } |
2863 | - |
2864 | - if not parent_field_id: |
2865 | - vals.update({'res_id': res_id}) |
2866 | - else: |
2867 | - ressource = resource_pool.read(cr, uid, res_id, [parent_field.name, name_get_field or 'name']) |
2868 | - res_id = ressource[parent_field.name] |
2869 | - #Â Add the name of the created sub-object |
2870 | - if res_id: |
2871 | - res_id = res_id[0] |
2872 | - else: |
2873 | - continue |
2874 | - vals = { |
2875 | - "name": "%s" %model_name, |
2876 | - "sub_obj_name": "%s" %ressource.get(name_get_field or 'name', ''), |
2877 | - "method": method, |
2878 | - "object_id": model_id, |
2879 | - "user_id": uid_orig, |
2880 | - "res_id": res_id, |
2881 | - "field_description": model_name, |
2882 | - } |
2883 | - |
2884 | - # We create only one line when deleting a record |
2885 | - create_log_line(self, cr, uid, model, [vals]) |
2886 | - res = fct_src(self, *args, **kwargs) |
2887 | - return res |
2888 | - else: |
2889 | - res_ids = [] |
2890 | - res = True |
2891 | - fields = [] |
2892 | - if args: |
2893 | - if isinstance(args[2], (long, int)): |
2894 | - res_ids = [args[2]] |
2895 | - else: |
2896 | - res_ids = list(args[2]) |
2897 | - if len(args)>3 and type(args[3]) == dict: |
2898 | - fields.extend(list(set(args[3]) & set(fields_to_trace))) |
2899 | - # we take below the fields.function that were ignored |
2900 | - fields_obj = self.pool.get('ir.model.fields') |
2901 | - fields_to_trace_ids = fields_obj.search(cr, uid, [('name', 'in', fields_to_trace), ('model_id', '=', model_id)]) |
2902 | - for fields_value in fields_obj.read(cr, uid, fields_to_trace_ids, ['is_function', 'name']): |
2903 | - if fields_value['is_function']: |
2904 | - fields.append(fields_value['name']) |
2905 | - |
2906 | - model_id = model.id |
2907 | - |
2908 | - # if no change on traced fields, variable fields is empty: so nothing to do. |
2909 | - if fields: |
2910 | - if parent_field_id: |
2911 | - parent_field = pool.get('ir.model.fields').browse(cr, uid, parent_field_id) |
2912 | - model_id = model_pool.search(cr, uid, [('model', '=', parent_field.relation)]) |
2913 | - # If the parent object is not a valid object |
2914 | - if not model_id: |
2915 | - return res |
2916 | - else: |
2917 | - model_id = model_id[0] |
2918 | - |
2919 | - if parent_field.name not in fields: |
2920 | - fields.append(parent_field.name) |
2921 | - |
2922 | - fields.extend(_get_domain_fields(self, domain)) |
2923 | - # Remove double entries |
2924 | - fields = list(set(fields)) |
2925 | - if name_get_field not in fields: |
2926 | - fields.append(name_get_field) |
2927 | - |
2928 | - #Â Get old values |
2929 | - if res_ids: |
2930 | - for resource in resource_pool.read(cr, uid, res_ids, fields): |
2931 | - if parent_field_id and not args[3].get(parent_field.name, resource[parent_field.name]): |
2932 | - continue |
2933 | - if domain and not _check_domain(self, cr, uid, resource, domain, model): |
2934 | - res_ids.pop(res_ids.index(resource['id'])) |
2935 | - continue |
2936 | - |
2937 | - resource_id = resource['id'] |
2938 | - if 'id' in resource: |
2939 | - del resource['id'] |
2940 | - |
2941 | - old_value = resource.copy() |
2942 | -# for field in resource.keys(): |
2943 | -# old_value = resource.copy() |
2944 | - |
2945 | - old_values[resource_id] = {'value': old_value} |
2946 | - |
2947 | - # Run the method on object |
2948 | - res = fct_src(self, *args, **kwargs) |
2949 | - |
2950 | - # Get new values |
2951 | - if fields and res_ids: |
2952 | - for resource in resource_pool.read(cr, uid, res_ids, fields): |
2953 | - if parent_field_id and not args[3].get(parent_field.name, resource[parent_field.name]): |
2954 | - continue |
2955 | - res_id = resource['id'] |
2956 | - res_id2 = parent_field_id and resource[parent_field.name][0] or res_id |
2957 | - if 'id' in resource: |
2958 | - del resource['id'] |
2959 | - |
2960 | - vals = { |
2961 | - "method": method, |
2962 | - "object_id": model_id, |
2963 | - "user_id": uid_orig, |
2964 | - "res_id": res_id2, |
2965 | - } |
2966 | - if 'name' in resource: |
2967 | - vals.update({'name': resource['name']}) |
2968 | - |
2969 | - # Add the name of the created sub-object |
2970 | - if parent_field_id: |
2971 | - vals.update({'sub_obj_name': resource[name_get_field], |
2972 | - 'rule_id': rule_id, |
2973 | - 'fct_object_id': model.id, |
2974 | - 'fct_res_id': res_id}) |
2975 | - |
2976 | - lines = [] |
2977 | - for field in resource.keys(): |
2978 | - line = vals.copy() |
2979 | - line.update({ |
2980 | - 'name': field, |
2981 | - 'new_value': resource[field], |
2982 | - 'old_value': old_values[res_id]['value'][field], |
2983 | - }) |
2984 | - lines.append(line) |
2985 | - |
2986 | - create_log_line(self, cr, uid, model, lines) |
2987 | - return res |
2988 | - return True |
2989 | - |
2990 | - |
2991 | -######################################################################### |
2992 | -# # |
2993 | -# OVERRIDE OSV METHODS (only create, write and unlink for the moment) # |
2994 | -# # |
2995 | -######################################################################### |
2996 | - |
2997 | -_old_create = orm.orm.create |
2998 | -_old_write = orm.orm.write |
2999 | -_old_unlink = orm.orm.unlink |
3000 | - |
3001 | -def _audittrail_osv_method(self, old_method, method_name, cr, *args, **kwargs): |
3002 | - """ General wrapper for osv methods """ |
3003 | - # If the object is not marked as traced object, just return the normal method |
3004 | - if not self._trace: |
3005 | - return old_method(self, *args, **kwargs) |
3006 | - |
3007 | - # If the object is traceable |
3008 | - uid_orig = args[1] |
3009 | - model = self._name |
3010 | - pool = pooler.get_pool(cr.dbname) |
3011 | - model_pool = pool.get('ir.model') |
3012 | - rule_pool = pool.get('audittrail.rule') |
3013 | - |
3014 | - def my_fct(cr, uid, model, method, *args, **kwargs): |
3015 | - rule = False |
3016 | - model_ids = model_pool.search(cr, uid, [('model', '=', model)]) |
3017 | - model_id = model_ids and model_ids[0] or False |
3018 | - |
3019 | - if not model_id: |
3020 | - return old_method(self, *args, **kwargs) |
3021 | - |
3022 | - if 'audittrail.rule' in pool.obj_list(): |
3023 | - rule = True |
3024 | - |
3025 | - if not rule: |
3026 | - return old_method(self, *args, **kwargs) |
3027 | - |
3028 | - rule_ids = rule_pool.search(cr, uid, [('object_id', '=', model_id)]) |
3029 | - if not rule_ids: |
3030 | - return old_method(self, *args, **kwargs) |
3031 | - |
3032 | - for thisrule in rule_pool.browse(cr, uid, rule_ids): |
3033 | - # if the rule for the right method, then go inside and do the track change log |
3034 | - if getattr(thisrule, 'log_' + method_name): |
3035 | - fields_to_trace = [(field.name) for field in thisrule.field_ids] |
3036 | - return log_fct(self, cr, uid_orig, model, method, old_method, fields_to_trace, thisrule.id, thisrule.parent_field_id.id, thisrule.name_get_field_id.name, thisrule.domain_filter, *args, **kwargs) |
3037 | - |
3038 | - return old_method(self, *args, **kwargs) |
3039 | - res = my_fct(cr, uid_orig, model, method_name, *args, **kwargs) |
3040 | - return res |
3041 | - |
3042 | - |
3043 | -def _audittrail_create(self, *args, **kwargs): |
3044 | - """ Wrapper to trace the osv.create method """ |
3045 | - return _audittrail_osv_method(self, _old_create, 'create', args[0], *args, **kwargs) |
3046 | - |
3047 | -def _audittrail_write(self, *args, **kwargs): |
3048 | - """ Wrapper to trace the osv.write method """ |
3049 | - return _audittrail_osv_method(self, _old_write, 'write', args[0], *args, **kwargs) |
3050 | - |
3051 | -def _audittrail_unlink(self, *args, **kwargs): |
3052 | - """ Wrapper to trace the osv.unlink method """ |
3053 | - return _audittrail_osv_method(self, _old_unlink, 'unlink', args[0], *args, **kwargs) |
3054 | - |
3055 | -orm.orm.create = _audittrail_create |
3056 | -orm.orm.write = _audittrail_write |
3057 | -orm.orm.unlink = _audittrail_unlink |
3058 | + if model.model == 'stock.picking': |
3059 | + return 'Incoming Shipment' |
3060 | + return model.name |
3061 | |
3062 | # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3063 | |
3064 | === modified file 'msf_budget/__openerp__.py' |
3065 | --- msf_budget/__openerp__.py 2012-07-25 11:49:04 +0000 |
3066 | +++ msf_budget/__openerp__.py 2014-03-21 08:49:45 +0000 |
3067 | @@ -31,12 +31,14 @@ |
3068 | 'init_xml': [], |
3069 | 'update_xml': [ |
3070 | 'security/ir.model.access.csv', |
3071 | + 'msf_budget_sequence.xml', |
3072 | 'msf_budget_report.xml', |
3073 | 'msf_budget_view.xml', |
3074 | 'msf_budget_workflow.xml', |
3075 | 'data/msf_budget_decision_moment_data.xml', |
3076 | 'wizard/wizard_budget_criteria_export_view.xml', |
3077 | 'wizard/wizard_actual_export_view.xml', |
3078 | + 'wizard/wizard_budget_summary.xml', |
3079 | ], |
3080 | 'test': [ |
3081 | 'test/budget_test.yml' |
3082 | |
3083 | === modified file 'msf_budget/analytic.py' |
3084 | --- msf_budget/analytic.py 2013-05-02 14:38:30 +0000 |
3085 | +++ msf_budget/analytic.py 2014-03-21 08:49:45 +0000 |
3086 | @@ -22,7 +22,6 @@ |
3087 | ############################################################################## |
3088 | |
3089 | from osv import osv |
3090 | -from osv import fields |
3091 | from tools.translate import _ |
3092 | |
3093 | class account_analytic_line(osv.osv): |
3094 | @@ -41,7 +40,7 @@ |
3095 | else: |
3096 | # Line without domain (consumption, overhead) |
3097 | raise osv.except_osv(_('No Analytic Domain !'),_("This budget does not have an analytic domain!")) |
3098 | - |
3099 | + |
3100 | return super(account_analytic_line, self).search(cr, uid, args, offset, limit, order, context=context, count=count) |
3101 | account_analytic_line() |
3102 | # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3103 | |
3104 | === modified file 'msf_budget/msf_budget.py' |
3105 | --- msf_budget/msf_budget.py 2014-02-11 08:32:47 +0000 |
3106 | +++ msf_budget/msf_budget.py 2014-03-21 08:49:45 +0000 |
3107 | @@ -28,19 +28,40 @@ |
3108 | _name = "msf.budget" |
3109 | _description = 'MSF Budget' |
3110 | _trace = True |
3111 | - |
3112 | + |
3113 | def _get_total_budget_amounts(self, cr, uid, ids, field_names=None, arg=None, context=None): |
3114 | res = {} |
3115 | - |
3116 | - for budget in self.browse(cr, uid, ids, context=context): |
3117 | - total_amounts = self.pool.get('msf.budget.line')._get_total_amounts(cr, uid, [x.id for x in budget.budget_line_ids], context=context) |
3118 | - |
3119 | - budget_amount = 0.0 |
3120 | - for budget_line in budget.budget_line_ids: |
3121 | - if not budget_line.parent_id: |
3122 | - res[budget.id] = total_amounts[budget_line.id]['budget_amount'] |
3123 | - break |
3124 | - |
3125 | + sql = """ |
3126 | + SELECT expense.budget_id, COALESCE(expense.total, 0.0) - COALESCE(income.total, 0.0) AS diff |
3127 | + FROM ( |
3128 | + SELECT budget_id, SUM(COALESCE(month1 + month2 + month3 + month4 + month5 + month6 + month7 + month8 + month9 + month10 + month11 + month12, 0.0)) AS total |
3129 | + FROM msf_budget_line AS l, account_account AS a, account_account_type AS t |
3130 | + WHERE budget_id IN %s |
3131 | + AND l.account_id = a.id |
3132 | + AND a.user_type = t.id |
3133 | + AND t.code = 'expense' |
3134 | + AND a.type != 'view' |
3135 | + AND l.line_type = 'destination' |
3136 | + GROUP BY budget_id |
3137 | + ) AS expense |
3138 | + LEFT JOIN ( |
3139 | + SELECT budget_id, SUM(COALESCE(month1 + month2 + month3 + month4 + month5 + month6 + month7 + month8 + month9 + month10 + month11 + month12, 0.0)) AS total |
3140 | + FROM msf_budget_line AS l, account_account AS a, account_account_type AS t |
3141 | + WHERE budget_id IN %s |
3142 | + AND l.account_id = a.id |
3143 | + AND a.user_type = t.id |
3144 | + AND t.code = 'income' |
3145 | + AND a.type != 'view' |
3146 | + AND l.line_type = 'destination' |
3147 | + GROUP BY budget_id |
3148 | + ) AS income ON expense.budget_id = income.budget_id""" |
3149 | + cr.execute(sql, (tuple(ids),tuple(ids),)) |
3150 | + tmp_res = cr.fetchall() |
3151 | + if not tmp_res: |
3152 | + return res |
3153 | + for b_id in ids: |
3154 | + res.setdefault(b_id, 0.0) |
3155 | + res.update(dict(tmp_res)) |
3156 | return res |
3157 | |
3158 | def _get_instance_type(self, cr, uid, ids, field_names=None, arg=None, context=None): |
3159 | @@ -93,84 +114,217 @@ |
3160 | 'decision_moment_order': fields.related('decision_moment_id', 'order', string="Decision Moment Order", readonly=True, store=True, type="integer"), |
3161 | 'version': fields.integer('Version'), |
3162 | 'currency_id': fields.many2one('res.currency', 'Currency', required=True), |
3163 | - 'display_type': fields.selection([('all', 'Accounts and destinations'), |
3164 | - ('expense', 'Accounts only'), |
3165 | - ('view', 'Parent accounts only')], string="Display type"), |
3166 | 'type': fields.selection([('normal', 'Normal'), ('view', 'View')], string="Budget type"), |
3167 | 'total_budget_amount': fields.function(_get_total_budget_amounts, method=True, store=False, string="Total Budget Amount", type="float", readonly=True), |
3168 | 'instance_type': fields.function(_get_instance_type, fnct_search=_search_instance_type, method=True, store=False, string='Instance type', type='selection', selection=[('section', 'HQ'), ('coordo', 'Coordo'), ('project', 'Project')], readonly=True), |
3169 | } |
3170 | - |
3171 | + |
3172 | _defaults = { |
3173 | 'currency_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.currency_id.id, |
3174 | 'state': 'draft', |
3175 | - 'display_type': 'all', |
3176 | 'type': 'normal', |
3177 | } |
3178 | |
3179 | _order = 'decision_moment_order desc, version, code' |
3180 | - |
3181 | + |
3182 | + def _check_parent(self, cr, uid, vals, context=None): |
3183 | + """ |
3184 | + Check budget's parent to see if it exist. |
3185 | + Create it if we're on another instance that top cost center one. |
3186 | + Note: context can contains a list of budget lines. This permit to avoid problem of budget line template time consuming. |
3187 | + We hope the copy() will take less time than the creation of an entire budget template. |
3188 | + """ |
3189 | + # Some checks |
3190 | + if context is None: |
3191 | + context = {} |
3192 | + # Prepare some values |
3193 | + top_cost_center = self.pool.get('res.users').browse(cr, uid, uid).company_id.instance_id.top_cost_center_id |
3194 | + ana_obj = self.pool.get('account.analytic.account') |
3195 | + fy_obj = self.pool.get('account.fiscalyear') |
3196 | + tool_obj = self.pool.get('msf.budget.tools') |
3197 | + # Fetch cost center info (id and parent) |
3198 | + cc_id = vals.get('cost_center_id', False) |
3199 | + cc = ana_obj.read(cr, uid, cc_id, ['parent_id'], context=context) |
3200 | + parent_id = cc.get('parent_id', False) and cc.get('parent_id')[0] or False |
3201 | + # Fetch fiscalyear info |
3202 | + fy_id = vals.get('fiscalyear_id', False) |
3203 | + fy = fy_obj.read(cr, uid, fy_id, ['code']) |
3204 | + # Fetch decision moment id |
3205 | + decision_moment_id = vals.get('decision_moment_id', False) |
3206 | + |
3207 | + # Check that no parent cost center exists for the given values |
3208 | + if cc_id and cc_id != top_cost_center.id and parent_id: |
3209 | + parent_cost_center = ana_obj.read(cr, uid, parent_id, ['code', 'name'], context=context) |
3210 | + have_parent_budget = self.search(cr, uid, [('fiscalyear_id', '=', fy_id), ('cost_center_id', '=', parent_id), ('decision_moment_id', '=', decision_moment_id)], count=1, context=context) |
3211 | + if have_parent_budget == 0: |
3212 | + # Create budget's parent |
3213 | + budget_vals = { |
3214 | + 'name': "Budget " + fy.get('code', '')[4:6] + " - " + parent_cost_center.get('name', ''), |
3215 | + 'code': "BU" + fy.get('code')[4:6] + " - " + parent_cost_center.get('code', ''), |
3216 | + 'fiscalyear_id': fy_id, |
3217 | + 'cost_center_id': parent_id, |
3218 | + 'decision_moment_id': decision_moment_id, |
3219 | + 'type': 'view' |
3220 | + } |
3221 | + parent_budget_id = self.create(cr, uid, budget_vals, context=context) |
3222 | + # Create budget's line. |
3223 | + tool_obj.create_budget_lines(cr, uid, parent_budget_id, context=context) |
3224 | + # Validate this parent |
3225 | + self.write(cr, uid, [parent_budget_id], {'state': 'valid'}, context=context) |
3226 | + return True |
3227 | + |
3228 | def create(self, cr, uid, vals, context=None): |
3229 | + """ |
3230 | + Create a budget then check its parent. |
3231 | + """ |
3232 | res = super(msf_budget, self).create(cr, uid, vals, context=context) |
3233 | - # If the "parent" budget does not exist and we're not on the proprietary instance level already, create it. |
3234 | - budget = self.browse(cr, uid, res, context=context) |
3235 | - prop_instance = self.pool.get('res.users').browse(cr, uid, uid).company_id.instance_id |
3236 | - if prop_instance.top_cost_center_id and budget.cost_center_id and budget.cost_center_id.id != prop_instance.top_cost_center_id.id and budget.cost_center_id.parent_id: |
3237 | - parent_cost_center = budget.cost_center_id.parent_id |
3238 | - parent_budget_ids = self.search(cr, |
3239 | - uid, |
3240 | - [('fiscalyear_id','=',budget.fiscalyear_id.id), |
3241 | - ('cost_center_id','=',parent_cost_center.id), |
3242 | - ('decision_moment_id','=',budget.decision_moment_id.id)]) |
3243 | - if len(parent_budget_ids) == 0: |
3244 | - parent_budget_id = self.create(cr, |
3245 | - uid, |
3246 | - {'name': "Budget " + budget.fiscalyear_id.code[4:6] + " - " + parent_cost_center.name, |
3247 | - 'code': "BU" + budget.fiscalyear_id.code[4:6] + " - " + parent_cost_center.code, |
3248 | - 'fiscalyear_id': budget.fiscalyear_id.id, |
3249 | - 'cost_center_id': budget.cost_center_id.parent_id.id, |
3250 | - 'decision_moment_id': budget.decision_moment_id.id, |
3251 | - 'type': 'view'}, context=context) |
3252 | - # Create all lines for all accounts/destinations (no budget values, those are retrieved) |
3253 | - expense_account_ids = self.pool.get('account.account').search(cr, uid, [('is_analytic_addicted', '=', True), |
3254 | - ('user_type_report_type', '!=', 'none'), |
3255 | - ('type', '!=', 'view')], context=context) |
3256 | - destination_obj = self.pool.get('account.destination.link') |
3257 | - destination_link_ids = destination_obj.search(cr, uid, [('account_id', 'in', expense_account_ids)], context=context) |
3258 | - account_destination_ids = [(dest.account_id.id, dest.destination_id.id) |
3259 | - for dest |
3260 | - in destination_obj.browse(cr, uid, destination_link_ids, context=context)] |
3261 | - for account_id, destination_id in account_destination_ids: |
3262 | - budget_line_vals = {'budget_id': parent_budget_id, |
3263 | - 'account_id': account_id, |
3264 | - 'destination_id': destination_id, |
3265 | - 'line_type': 'destination'} |
3266 | - self.pool.get('msf.budget.line').create(cr, uid, budget_line_vals, context=context) |
3267 | - # validate this parent |
3268 | - self.write(cr, uid, [parent_budget_id], {'state': 'valid'}, context=context) |
3269 | - return res |
3270 | - |
3271 | - # Methods for display view lines (warning, dirty, but it works) |
3272 | + # Check parent budget |
3273 | + self._check_parent(cr, uid, vals, context=context) |
3274 | + return res |
3275 | + |
3276 | + def write(self, cr, uid, ids, vals, context=None): |
3277 | + """ |
3278 | + Goal is to update parent budget regarding these criteria: |
3279 | + - context is synchronization |
3280 | + - state is in vals |
3281 | + - state is different from draft (validated or done) |
3282 | + """ |
3283 | + if context is None: |
3284 | + context = {} |
3285 | + res = super(msf_budget, self).write(cr, uid, ids, vals, context=context) |
3286 | + if context.get('sync_update_execution', False) and vals.get('state', False) and vals.get('state') != 'draft': |
3287 | + # Update parent budget |
3288 | + self.update_parent_budgets(cr, uid, ids, context=context) |
3289 | + return res |
3290 | + |
3291 | + def update(self, cr, uid, ids, context=None): |
3292 | + """ |
3293 | + Update given budget. But only update view one. |
3294 | + """ |
3295 | + # Some checks |
3296 | + if context is None: |
3297 | + context = {} |
3298 | + if isinstance(ids, (int, long)): |
3299 | + ids = [ids] |
3300 | + # Prepare some values |
3301 | + ana_obj = self.pool.get('account.analytic.account') |
3302 | + line_obj = self.pool.get('msf.budget.line') |
3303 | + sql = """ |
3304 | + SELECT |
3305 | + SUM(COALESCE(month1, 0)), |
3306 | + SUM(COALESCE(month2, 0)), |
3307 | + SUM(COALESCE(month3, 0)), |
3308 | + SUM(COALESCE(month4, 0)), |
3309 | + SUM(COALESCE(month5, 0)), |
3310 | + SUM(COALESCE(month6, 0)), |
3311 | + SUM(COALESCE(month7, 0)), |
3312 | + SUM(COALESCE(month8, 0)), |
3313 | + SUM(COALESCE(month9, 0)), |
3314 | + SUM(COALESCE(month10, 0)), |
3315 | + SUM(COALESCE(month11, 0)), |
3316 | + SUM(COALESCE(month12, 0)) |
3317 | + FROM msf_budget_line |
3318 | + WHERE id IN %s""" |
3319 | + # Filter budget to only update those that are view one |
3320 | + to_update = self.search(cr, uid, [('id', 'in', ids), ('type', '=', 'view')]) |
3321 | + # Then update budget, one by one, line by line... |
3322 | + for budget in self.browse(cr, uid, to_update, context=context): |
3323 | + cost_center_id = budget.cost_center_id and budget.cost_center_id.id or False |
3324 | + if not cost_center_id: |
3325 | + raise osv.except_osv(_('Error'), _('Problem while reading Cost Center for the given budget: %s') % (budget.get('name', ''),)) |
3326 | + child_cc_ids = ana_obj.search(cr, uid, [('parent_id', 'child_of', cost_center_id)]) |
3327 | + budget_ids = [] |
3328 | + # For each CC, search the last budget |
3329 | + for cc_id in child_cc_ids: |
3330 | + cc_args = [ |
3331 | + ('cost_center_id', '=', cc_id), |
3332 | + ('type', '!=', 'view'), |
3333 | + ('state', '!=', 'draft'), |
3334 | + ('decision_moment_id', '=', budget.decision_moment_id.id) |
3335 | + ] |
3336 | + corresponding_budget_ids = self.search(cr, uid, cc_args, limit=1, order='version DESC') |
3337 | + if corresponding_budget_ids: |
3338 | + budget_ids.append(corresponding_budget_ids) |
3339 | + # Browse each budget line to update it |
3340 | + for budget_line in budget.budget_line_ids: |
3341 | + line_vals = { |
3342 | + 'month1': 0.0, |
3343 | + 'month2': 0.0, |
3344 | + 'month3': 0.0, |
3345 | + 'month4': 0.0, |
3346 | + 'month5': 0.0, |
3347 | + 'month6': 0.0, |
3348 | + 'month7': 0.0, |
3349 | + 'month8': 0.0, |
3350 | + 'month9': 0.0, |
3351 | + 'month10': 0.0, |
3352 | + 'month11': 0.0, |
3353 | + 'month12': 0.0 |
3354 | + } |
3355 | + # search all linked budget lines |
3356 | + args = [('budget_id', 'in', budget_ids), ('account_id', '=', budget_line.account_id.id)] |
3357 | + if budget_line.destination_id: |
3358 | + args.append(('destination_id', '=', budget_line.destination_id.id)) |
3359 | + child_line_ids = line_obj.search(cr, uid, args, context=context) |
3360 | + if child_line_ids: |
3361 | + cr.execute(sql, (tuple(child_line_ids),)) |
3362 | + if cr.rowcount: |
3363 | + tmp_res = cr.fetchall() |
3364 | + res = tmp_res and tmp_res[0] |
3365 | + if res: |
3366 | + for x in xrange(1, 13, 1): |
3367 | + try: |
3368 | + line_vals.update({'month'+str(x): res[x - 1]}) |
3369 | + except IndexError, error: |
3370 | + continue |
3371 | + line_obj.write(cr, uid, [budget_line.id], line_vals) |
3372 | + return True |
3373 | + |
3374 | + def update_parent_budgets(self, cr, uid, ids, context=None): |
3375 | + """ |
3376 | + Search all parent budget and update them. |
3377 | + """ |
3378 | + # Some checks |
3379 | + if context is None: |
3380 | + context = {} |
3381 | + if isinstance(ids, (int, long)): |
3382 | + ids = [ids] |
3383 | + # We only need to update parent budgets. |
3384 | + # So we search all parent cost center (but only them, so we don't care about cost center that are linked to given budgets) |
3385 | + # Then we use these parent cost center to find budget to update (only budget lines) |
3386 | + budgets = self.read(cr, uid, ids, ['cost_center_id']) |
3387 | + cost_center_ids = [x.get('cost_center_id', False) and x.get('cost_center_id')[0] or 0 for x in budgets] |
3388 | + cc_parent_ids = self.pool.get('account.analytic.account')._get_parent_of(cr, uid, cost_center_ids, context=context) |
3389 | + parent_ids = [x for x in cc_parent_ids if x not in cost_center_ids] |
3390 | + to_update = self.search(cr, uid, [('cost_center_id', 'in', parent_ids)]) |
3391 | + # Update budgets |
3392 | + self.update(cr, uid, to_update, context=context) |
3393 | + return True |
3394 | + |
3395 | def button_display_type(self, cr, uid, ids, context=None, *args, **kwargs): |
3396 | """ |
3397 | - Change display type |
3398 | + Just reset the budget view to give the context to the one2many_budget_lines object |
3399 | """ |
3400 | - display_types = {} |
3401 | - for budget in self.read(cr, uid, ids, ['display_type']): |
3402 | - display_types[budget['id']] = budget['display_type'] |
3403 | - |
3404 | - for budget_id in ids: |
3405 | - result = 'all' |
3406 | - if display_types[budget_id] == 'all': |
3407 | - result = 'expense' |
3408 | - elif display_types[budget_id] == 'expense': |
3409 | - result = 'view' |
3410 | - elif display_types[budget_id] == 'view': |
3411 | - result = 'all' |
3412 | - self.write(cr, uid, [budget_id], {'display_type': result}, context=context) |
3413 | - return True |
3414 | - |
3415 | - |
3416 | + if context is None: |
3417 | + context = {} |
3418 | + if isinstance(ids, (int, long)): |
3419 | + ids = [ids] |
3420 | + # do not erase the previous context! |
3421 | + context.update({ |
3422 | + 'active_id': ids[0], |
3423 | + 'active_ids': ids, |
3424 | + }) |
3425 | + return { |
3426 | + 'name': _('Budgets'), |
3427 | + 'type': 'ir.actions.act_window', |
3428 | + 'res_model': 'msf.budget', |
3429 | + 'target': 'crush', |
3430 | + 'view_mode': 'form,tree', |
3431 | + 'view_type': 'form', |
3432 | + 'res_id': ids[0], |
3433 | + 'context': context, |
3434 | + } |
3435 | + |
3436 | def budget_summary_open_window(self, cr, uid, ids, context=None): |
3437 | budget_id = False |
3438 | if not ids: |
3439 | @@ -190,7 +344,7 @@ |
3440 | if isinstance(ids, (int, long)): |
3441 | ids = [ids] |
3442 | budget_id = ids[0] |
3443 | - |
3444 | + |
3445 | if budget_id: |
3446 | parent_line_id = self.pool.get('msf.budget.summary').create(cr, |
3447 | uid, {'budget_id': budget_id}, context=context) |
3448 | @@ -206,6 +360,27 @@ |
3449 | 'context': context |
3450 | } |
3451 | return {} |
3452 | - |
3453 | + |
3454 | + def action_confirmed(self, cr, uid, ids, context=None): |
3455 | + """ |
3456 | + At budget validation we should update all parent budgets. |
3457 | + To do this, each parent need to take all its validated children budget at the last version. |
3458 | + """ |
3459 | + # Some checks |
3460 | + if context is None: |
3461 | + context = {} |
3462 | + if isinstance(ids, (int, long)): |
3463 | + ids = [ids] |
3464 | + # Only validate budget that are draft! |
3465 | + to_validate = [] |
3466 | + for budget in self.read(cr, uid, ids, ['state']): |
3467 | + if budget.get('state', '') and budget.get('state') == 'draft': |
3468 | + to_validate.append(budget.get('id', 0)) |
3469 | + # Change budget statuses. Important in order to include given budgets in their parents! |
3470 | + self.write(cr, uid, to_validate, {'state': 'valid'}, context=context) |
3471 | + # Update parent budget |
3472 | + self.update_parent_budgets(cr, uid, to_validate, context=context) |
3473 | + return True |
3474 | + |
3475 | msf_budget() |
3476 | # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3477 | |
3478 | === modified file 'msf_budget/msf_budget_line.py' |
3479 | --- msf_budget/msf_budget_line.py 2013-09-20 13:40:19 +0000 |
3480 | +++ msf_budget/msf_budget_line.py 2014-03-21 08:49:45 +0000 |
3481 | @@ -20,290 +20,54 @@ |
3482 | ############################################################################## |
3483 | |
3484 | from osv import fields, osv |
3485 | -import datetime |
3486 | -from dateutil.relativedelta import relativedelta |
3487 | |
3488 | -# Overloading the one2many.get for budget lines |
3489 | -# (used for filtering budget lines in the form view; |
3490 | -# dirty as f*ck, but hey, it works) |
3491 | +# Overloading the one2many.get for budget lines to filter regarding context. |
3492 | class one2many_budget_lines(fields.one2many): |
3493 | - |
3494 | + |
3495 | def get(self, cr, obj, ids, name, uid=None, offset=0, context=None, values=None): |
3496 | + """ |
3497 | + Use 'granularity' value in context to filter budget lines. |
3498 | + If granularity is 'view', then display only budget line that have line_type = view |
3499 | + If 'expense', display budget lines that have line_type = view and normal |
3500 | + If 'all' display budget lines that are view, normal and destination line_type |
3501 | + Else, display view, normal and destination line_type ones. |
3502 | + |
3503 | + NB: This context also permit "Budget vs. Actual" report to work and display right lines regarding a given granularity. |
3504 | + """ |
3505 | if context is None: |
3506 | context = {} |
3507 | if values is None: |
3508 | values = {} |
3509 | res = {} |
3510 | display_type = {} |
3511 | - |
3512 | - for budget in obj.read(cr, uid, ids, ['display_type']): |
3513 | - res[budget['id']] = [] |
3514 | - display_type[budget['id']] = budget['display_type'] |
3515 | - # Override display_type if we come from a report |
3516 | - if context.get('report', False) and context.get('granularity', False): |
3517 | - display_type[budget['id']] = context.get('granularity') |
3518 | - |
3519 | - budget_line_obj = obj.pool.get('msf.budget.line') |
3520 | - budget_line_ids = budget_line_obj.search(cr, uid, [('budget_id', 'in', ids)]) |
3521 | - if budget_line_ids: |
3522 | - for budget_line in budget_line_obj.read(cr, uid, budget_line_ids, ['line_type', 'budget_id'], context=context): |
3523 | - budget_id = budget_line['budget_id'][0] |
3524 | - if display_type[budget_id] == 'all' \ |
3525 | - or (display_type[budget_id] == 'view' and budget_line['line_type'] == 'view') \ |
3526 | - or (display_type[budget_id] == 'expense' and budget_line['line_type'] != 'destination'): |
3527 | - res[budget_id].append(budget_line['id']) |
3528 | + |
3529 | + domain = ['view', 'normal', 'destination'] |
3530 | + tuples = { |
3531 | + 'view': ['view'], |
3532 | + 'expense': ['view', 'normal'], |
3533 | + 'all': domain, |
3534 | + } |
3535 | + line_obj = obj.pool.get('msf.budget.line') |
3536 | + |
3537 | + if 'granularity' in context: |
3538 | + display_type = context.get('granularity', False) |
3539 | + if display_type and display_type in ['view', 'expense', 'all']: |
3540 | + domain = tuples[display_type] |
3541 | + |
3542 | + for budget_id in ids: |
3543 | + res[budget_id] = line_obj.search(cr, uid, [('budget_id', '=', budget_id), ('line_type', 'in', domain)]) |
3544 | + |
3545 | return res |
3546 | |
3547 | class msf_budget_line(osv.osv): |
3548 | _name = "msf.budget.line" |
3549 | |
3550 | - def _get_comm_amounts(self, cr, uid, ids, context=None): |
3551 | - res = {} |
3552 | - if context is None: |
3553 | - context = {} |
3554 | - engagement_journal_ids = self.pool.get('account.analytic.journal').search(cr, uid, [('type', '=', 'engagement')], context=context) |
3555 | - budget_line_ids = [] |
3556 | - if len(ids) > 0: |
3557 | - budget = self.browse(cr, uid, ids[0], context=context).budget_id |
3558 | - output_currency_id = budget.currency_id.id |
3559 | - cost_center_ids = self.pool.get('msf.budget.tools')._get_cost_center_ids(budget.cost_center_id) |
3560 | - actual_domain = [('cost_center_id', 'in', cost_center_ids)] |
3561 | - actual_domain.append(('date', '>=', budget.fiscalyear_id.date_start)) |
3562 | - actual_domain.append(('date', '<=', budget.fiscalyear_id.date_stop)) |
3563 | - actual_domain.append(('journal_id', 'in', engagement_journal_ids)) |
3564 | - res = self.pool.get('msf.budget.tools')._get_actual_amounts(cr, uid, output_currency_id, actual_domain, context=context) |
3565 | - return res |
3566 | - |
3567 | - def _get_actual_amounts(self, cr, uid, ids, context=None): |
3568 | - # Input: list of budget lines |
3569 | - # Output: a dict of list {general_account_id: [jan_actual, feb_actual,...]} |
3570 | - res = {} |
3571 | - if context is None: |
3572 | - context = {} |
3573 | - # global values |
3574 | - engagement_journal_ids = self.pool.get('account.analytic.journal').search(cr, uid, [('type', '=', 'engagement')], context=context) |
3575 | - |
3576 | - # we discard the ids, but retrieve the budget from it |
3577 | - # Otherwise, view lines don't have values in "view lines only" display mode |
3578 | - budget_line_ids = [] |
3579 | - if len(ids) > 0: |
3580 | - budget = self.browse(cr, uid, ids[0], context=context).budget_id |
3581 | - output_currency_id = budget.currency_id.id |
3582 | - |
3583 | - cost_center_ids = self.pool.get('msf.budget.tools')._get_cost_center_ids(budget.cost_center_id) |
3584 | - |
3585 | - # Create search domain (one search for all analytic lines) |
3586 | - actual_domain = [('cost_center_id', 'in', cost_center_ids)] |
3587 | - actual_domain.append(('date', '>=', budget.fiscalyear_id.date_start)) |
3588 | - actual_domain.append(('date', '<=', budget.fiscalyear_id.date_stop)) |
3589 | - # 3. commitments |
3590 | - # if commitments are set to False in context, the engagement analytic journals are removed |
3591 | - # from the domain |
3592 | - actual_domain.append(('journal_id', 'not in', engagement_journal_ids)) |
3593 | - # Call budget_tools method |
3594 | - res = self.pool.get('msf.budget.tools')._get_actual_amounts(cr, uid, output_currency_id, actual_domain, context=context) |
3595 | - |
3596 | - return res |
3597 | - |
3598 | - def _get_budget_amounts(self, cr, uid, ids, context=None): |
3599 | - # Input: list of budget lines |
3600 | - # Output: a dict of list {general_account_id: [jan_budget, feb_budget,...]} |
3601 | - res = {} |
3602 | - if context is None: |
3603 | - context = {} |
3604 | - |
3605 | - if len(ids) > 0: |
3606 | - budget = self.browse(cr, uid, ids[0], context=context).budget_id |
3607 | - |
3608 | - if budget.type == 'normal': |
3609 | - # Budget values are stored in lines; just retrieve and add them |
3610 | - for budget_line in self.browse(cr, uid, ids, context=context): |
3611 | - budget_line_destination_id = budget_line.destination_id and budget_line.destination_id.id or False |
3612 | - if budget_line.budget_values: |
3613 | - res[budget_line.account_id.id, budget_line_destination_id] = eval(budget_line.budget_values) |
3614 | - else: |
3615 | - res[budget_line.account_id.id, budget_line_destination_id] = [0] * 12 |
3616 | - else: |
3617 | - # fill with 0s |
3618 | - for budget_line in self.browse(cr, uid, ids, context=context): |
3619 | - budget_line_destination_id = budget_line.destination_id and budget_line.destination_id.id or False |
3620 | - res[budget_line.account_id.id, budget_line_destination_id] = [0] * 12 |
3621 | - # Not stored in lines; retrieve child budgets, get their budget values and add |
3622 | - cost_center_list = self.pool.get('msf.budget.tools')._get_cost_center_ids(budget.cost_center_id) |
3623 | - # For each cost center, get the latest non-draft budget |
3624 | - for cost_center_id in cost_center_list: |
3625 | - cr.execute("SELECT id FROM msf_budget WHERE fiscalyear_id = %s \ |
3626 | - AND cost_center_id = %s \ |
3627 | - AND decision_moment_id = %s \ |
3628 | - AND state != 'draft' \ |
3629 | - AND type = 'normal' \ |
3630 | - ORDER BY version DESC LIMIT 1", |
3631 | - (budget.fiscalyear_id.id, |
3632 | - cost_center_id, |
3633 | - budget.decision_moment_id.id)) |
3634 | - if cr.rowcount: |
3635 | - # A budget was found; get its lines and their amounts |
3636 | - child_budget_id = cr.fetchall()[0][0] |
3637 | - child_line_ids = self.search(cr, |
3638 | - uid, |
3639 | - [('budget_id', '=', child_budget_id)], |
3640 | - context=context) |
3641 | - child_budget_amounts = self._get_budget_amounts(cr, uid, child_line_ids, context=context) |
3642 | - for child_line in self.browse(cr, uid, child_line_ids, context=context): |
3643 | - child_line_destination_id = child_line.destination_id and child_line.destination_id.id or False |
3644 | - if (child_line.account_id.id, child_line_destination_id) not in res: |
3645 | - res[child_line.account_id.id, child_line_destination_id] = child_budget_amounts[child_line.account_id.id, child_line_destination_id] |
3646 | - else: |
3647 | - res[child_line.account_id.id, child_line_destination_id] = [sum(pair) for pair in |
3648 | - zip(child_budget_amounts[child_line.account_id.id, child_line_destination_id], |
3649 | - res[child_line.account_id.id, child_line_destination_id])] |
3650 | - |
3651 | - return res |
3652 | - |
3653 | - def _compute_total_amounts(self, cr, uid, budget_amount_list, actual_amount_list, comm_amount_list, context=None): |
3654 | - # period_id |
3655 | - if context is None: |
3656 | - context = {} |
3657 | - budget_amount = 0 |
3658 | - actual_amount = 0 |
3659 | - comm_amount = 0 |
3660 | - month_stop = 0 |
3661 | - |
3662 | - if 'period_id' in context: |
3663 | - period = self.pool.get('account.period').browse(cr, uid, context['period_id'], context=context) |
3664 | - month_stop = datetime.datetime.strptime(period.date_stop, '%Y-%m-%d').month |
3665 | - else: |
3666 | - month_stop = 12 |
3667 | - |
3668 | - # actual amount |
3669 | - if actual_amount_list: |
3670 | - for i in range(month_stop): |
3671 | - actual_amount += actual_amount_list[i] |
3672 | - # budget amount |
3673 | - if budget_amount_list: |
3674 | - for i in range(month_stop): |
3675 | - budget_amount += budget_amount_list[i] |
3676 | - # comm amount |
3677 | - if comm_amount_list: |
3678 | - for i in range(month_stop): |
3679 | - comm_amount += comm_amount_list[i] |
3680 | - |
3681 | - return {'actual_amount': actual_amount, |
3682 | - 'comm_amount': comm_amount, |
3683 | - 'budget_amount': budget_amount} |
3684 | - |
3685 | - def _get_total_amounts(self, cr, uid, ids, field_names=None, arg=None, context=None): |
3686 | - res = {} |
3687 | - if context is None: |
3688 | - context = {} |
3689 | - |
3690 | - actual_amounts = self._get_actual_amounts(cr, uid, ids, context) |
3691 | - budget_amounts = self._get_budget_amounts(cr, uid, ids, context) |
3692 | - comm_amounts = self._get_comm_amounts(cr, uid, ids, context) |
3693 | - |
3694 | - # Browse each line |
3695 | - for budget_line in self.browse(cr, uid, ids, context=context): |
3696 | - budget_line_destination_id = budget_line.destination_id and budget_line.destination_id.id or False |
3697 | - line_amounts = self._compute_total_amounts(cr, |
3698 | - uid, |
3699 | - (budget_line.account_id.id, budget_line_destination_id) in budget_amounts \ |
3700 | - and budget_amounts[budget_line.account_id.id, budget_line_destination_id] \ |
3701 | - or [0] * 12, |
3702 | - (budget_line.account_id.id, budget_line_destination_id) in actual_amounts \ |
3703 | - and actual_amounts[budget_line.account_id.id, budget_line_destination_id] \ |
3704 | - or [0] * 12, |
3705 | - (budget_line.account_id.id, budget_line_destination_id) in comm_amounts \ |
3706 | - and comm_amounts[budget_line.account_id.id, budget_line_destination_id] \ |
3707 | - or [0] * 12, |
3708 | - context=context) |
3709 | - |
3710 | - actual_amount = line_amounts['actual_amount'] |
3711 | - budget_amount = line_amounts['budget_amount'] |
3712 | - comm_amount = line_amounts['comm_amount'] |
3713 | - |
3714 | - # We have budget amount and actual amount, compute the remaining ones |
3715 | - percentage = 0.0 |
3716 | - if budget_amount != 0.0: |
3717 | - percentage = round((actual_amount / budget_amount) * 100.0) |
3718 | - res[budget_line.id] = {'budget_amount': budget_amount, |
3719 | - 'actual_amount': actual_amount, |
3720 | - 'comm_amount': comm_amount, |
3721 | - 'balance': budget_amount - actual_amount, |
3722 | - 'percentage': percentage} |
3723 | - |
3724 | - return res |
3725 | - |
3726 | - def _get_monthly_amounts(self, cr, uid, ids, context=None): |
3727 | - res = [] |
3728 | - if context is None: |
3729 | - context = {} |
3730 | - |
3731 | - actual_amounts = self._get_actual_amounts(cr, uid, ids, context) |
3732 | - budget_amounts = self._get_budget_amounts(cr, uid, ids, context) |
3733 | - comm_amounts = self._get_comm_amounts(cr, uid, ids, context) |
3734 | - |
3735 | - # if period id, only retrieve a subset |
3736 | - month_stop = 0 |
3737 | - if 'period_id' in context: |
3738 | - period = self.pool.get('account.period').browse(cr, uid, context['period_id'], context=context) |
3739 | - month_stop = datetime.datetime.strptime(period.date_stop, '%Y-%m-%d').month |
3740 | - else: |
3741 | - month_stop = 12 |
3742 | - |
3743 | - # Browse each line |
3744 | - for budget_line in self.browse(cr, uid, ids, context=context): |
3745 | - budget_line_destination_id = budget_line.destination_id and budget_line.destination_id.id or False |
3746 | - |
3747 | - if budget_line.line_type == 'view' \ |
3748 | - or ('granularity' in context and context['granularity'] == 'all') \ |
3749 | - or ('granularity' in context and context['granularity'] == 'expense' and budget_line.line_type != 'destination'): |
3750 | - line_actual_amounts = [0] * 12 |
3751 | - line_budget_amounts = [0] * 12 |
3752 | - line_comm_amounts = [0] * 12 |
3753 | - if (budget_line.account_id.id, budget_line_destination_id) in actual_amounts: |
3754 | - line_actual_amounts = actual_amounts[budget_line.account_id.id, budget_line_destination_id] |
3755 | - if (budget_line.account_id.id, budget_line_destination_id) in budget_amounts: |
3756 | - line_budget_amounts = budget_amounts[budget_line.account_id.id, budget_line_destination_id] |
3757 | - if (budget_line.account_id.id, budget_line_destination_id) in comm_amounts: |
3758 | - line_comm_amounts = comm_amounts[budget_line.account_id.id, budget_line_destination_id] |
3759 | - |
3760 | - |
3761 | - line_code = budget_line.account_id.code |
3762 | - line_destination = '' |
3763 | - if budget_line.destination_id: |
3764 | - line_destination = budget_line.destination_id.code |
3765 | - line_name = budget_line.account_id.name |
3766 | - line_values = [(line_code,line_destination,line_name)] |
3767 | - |
3768 | - if 'breakdown' in context and context['breakdown'] == 'month': |
3769 | - # Need to add breakdown values |
3770 | - for i in range(month_stop): |
3771 | - line_values.append(line_budget_amounts[i]) |
3772 | - line_values.append(line_comm_amounts[i]) |
3773 | - line_values.append(line_actual_amounts[i]) |
3774 | - |
3775 | - total_amounts = self._compute_total_amounts(cr, |
3776 | - uid, |
3777 | - line_budget_amounts, |
3778 | - line_actual_amounts, |
3779 | - line_comm_amounts, |
3780 | - context=context) |
3781 | - |
3782 | - line_values.append(total_amounts['budget_amount']) |
3783 | - line_values.append(total_amounts['comm_amount']) |
3784 | - line_values.append(total_amounts['actual_amount']) |
3785 | - |
3786 | - # add to result |
3787 | - res.append(line_values) |
3788 | - |
3789 | - return res |
3790 | - |
3791 | def _get_name(self, cr, uid, ids, field_names=None, arg=None, context=None): |
3792 | result = self.browse(cr, uid, ids, context=context) |
3793 | res = {} |
3794 | for rs in result: |
3795 | account = rs.account_id |
3796 | - name = account.code |
3797 | + name = account.code |
3798 | if rs.destination_id: |
3799 | name += " " |
3800 | name += rs.destination_id.code |
3801 | @@ -311,18 +75,312 @@ |
3802 | name += account.name |
3803 | res[rs.id] = name |
3804 | return res |
3805 | - |
3806 | + |
3807 | + def _get_month_names(self, number=12): |
3808 | + """ |
3809 | + Return a list of all month field to be used from the first one to the given number (included). |
3810 | + """ |
3811 | + res = [] |
3812 | + # Do not permit to give a number superior to 12! |
3813 | + if number > 12: |
3814 | + number = 12 |
3815 | + for x in xrange(1, number+1, 1): |
3816 | + res.append('month' + str(x)) |
3817 | + return res |
3818 | + |
3819 | + def _get_domain(self, line_type, account_id, cost_center_ids, destination_id, date_start, date_stop): |
3820 | + """ |
3821 | + Create a domain regarding budget line elements (to be used in a search()). |
3822 | + Return a list. |
3823 | + """ |
3824 | + if isinstance(cost_center_ids, (int, long)): |
3825 | + cost_center_ids = [cost_center_ids] |
3826 | + res = [ |
3827 | + ('cost_center_id', 'in', cost_center_ids), |
3828 | + ('date', '>=', date_start), |
3829 | + ('date', '<=', date_stop), |
3830 | + ] |
3831 | + if line_type == 'destination': |
3832 | + res.append(('destination_id', '=', destination_id)) |
3833 | + if line_type in ['destination', 'normal']: |
3834 | + res.append(('general_account_id', '=', account_id)), |
3835 | + else: |
3836 | + res.append(('general_account_id', 'child_of', account_id)) |
3837 | + return res |
3838 | + |
3839 | + def _get_sql_domain(self, cr, uid, request, params, line_type, account_id, destination_id): |
3840 | + """ |
3841 | + Create a SQL domain regarding budget line elements (to be used in a SQL request). |
3842 | + Return a 2 params: |
3843 | + - SQL request |
3844 | + - SQL params (list) |
3845 | + """ |
3846 | + if not request: |
3847 | + request = "" |
3848 | + if not params: |
3849 | + params = [] |
3850 | + if line_type == 'destination': |
3851 | + request += """ AND destination_id = %s """ |
3852 | + params.append(destination_id) |
3853 | + if line_type in ['destination', 'normal']: |
3854 | + request += """ AND general_account_id = %s """ |
3855 | + params.append(account_id) |
3856 | + else: |
3857 | + request += """ AND general_account_id IN %s """ |
3858 | + account_ids = self.pool.get('account.account').search(cr, uid, [('parent_id', 'child_of', account_id)]) |
3859 | + params.append(tuple(account_ids)) |
3860 | + return request, params |
3861 | + |
3862 | + def _get_amounts(self, cr, uid, ids, field_names=None, arg=None, context=None): |
3863 | + """ |
3864 | + Those field can be asked for: |
3865 | + - actual_amount |
3866 | + - comm_amount |
3867 | + - balance |
3868 | + - percentage |
3869 | + With some depends: |
3870 | + - percentage needs actual_amount, comm_amount, balance and budget_amount |
3871 | + - balance needs actual_amount, comm_amount and budget_amount |
3872 | + |
3873 | + NB: |
3874 | + - if 'period_id' in context, we change date_stop for SQL request to the date_stop of the given period to reduce computation |
3875 | + - if 'currency_table_id' in context, we compute actual amounts (and commitment ones) currency by currency |
3876 | + """ |
3877 | + # Some checks |
3878 | + if context is None: |
3879 | + context = {} |
3880 | + if isinstance(ids, (int, long)): |
3881 | + ids = [ids] |
3882 | + # Prepare some values |
3883 | + res = {} |
3884 | + budget_ok = False |
3885 | + actual_ok = False |
3886 | + commitment_ok = False |
3887 | + percentage_ok = False |
3888 | + balance_ok = False |
3889 | + budget_amounts = {} |
3890 | + actual_amounts = {} |
3891 | + comm_amounts = {} |
3892 | + cur_obj = self.pool.get('res.currency') |
3893 | + # If period_id in context, use another date_stop element. |
3894 | + date_period_stop = False |
3895 | + month_number = 12 |
3896 | + if 'period_id' in context: |
3897 | + period = self.pool.get('account.period').read(cr, uid, context.get('period_id', False), ['date_stop', 'number'], context=context) |
3898 | + if period and period.get('date_stop', False): |
3899 | + date_period_stop = period.get('date_stop') |
3900 | + if period and period.get('number', False): |
3901 | + month_number = period.get('number') |
3902 | + # Check if we need to use another currency_table_id |
3903 | + other_currencies = False |
3904 | + date_context = {} |
3905 | + company_currency_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id |
3906 | + if context.get('currency_table_id', False): |
3907 | + other_currencies = True |
3908 | + date_context.update({'currency_table_id': context.get('currency_table_id')}) |
3909 | + # Check in which case we are regarding field names. Compute actual and commitment when we need balance and/or percentage. |
3910 | + if 'budget_amount' in field_names: |
3911 | + budget_ok = True |
3912 | + if 'actual_amount' in field_names: |
3913 | + actual_ok = True |
3914 | + if 'comm_amount' in field_names: |
3915 | + actual_ok = True |
3916 | + commitment_ok = True |
3917 | + if 'percentage' in field_names: |
3918 | + budget_ok = True |
3919 | + actual_ok = True |
3920 | + percentage_ok = True |
3921 | + if 'balance' in field_names: |
3922 | + budget_ok = True |
3923 | + actual_ok = True |
3924 | + balance_ok = True |
3925 | + # In some cases (reports) we don't want to display commitment values. But we have to include them into "balance" and percentage computation. |
3926 | + if 'commitment' in context: |
3927 | + commitment_ok = context.get('commitment', False) |
3928 | + # Compute actual and/or commitments |
3929 | + if actual_ok or commitment_ok or percentage_ok or balance_ok: |
3930 | + # COMPUTE ACTUAL/COMMITMENT |
3931 | + ana_obj = self.pool.get('account.analytic.line') |
3932 | + ana_account_obj = self.pool.get('account.analytic.account') |
3933 | + cur_obj = self.pool.get('res.currency') |
3934 | + # Create default values |
3935 | + for index in ids: |
3936 | + if actual_ok: |
3937 | + actual_amounts.setdefault(index, 0.0) |
3938 | + if commitment_ok: |
3939 | + comm_amounts.setdefault(index, 0.0) |
3940 | + # Now, only use 'destination' line to do process and complete parent one at the same time |
3941 | + sql = """ |
3942 | + SELECT l.id, l.line_type, l.account_id, l.destination_id, b.cost_center_id, f.date_start, f.date_stop |
3943 | + FROM msf_budget_line AS l, msf_budget AS b, account_fiscalyear AS f |
3944 | + WHERE l.budget_id = b.id |
3945 | + AND b.fiscalyear_id = f.id |
3946 | + AND l.id IN %s |
3947 | + ORDER BY l.line_type, l.id""" |
3948 | + cr.execute(sql, (tuple(ids),)) |
3949 | + # Prepare SQL2 request that contains sum of amount of given analytic lines (in functional currency) |
3950 | + sql2 = """ |
3951 | + SELECT SUM(amount) |
3952 | + FROM account_analytic_line |
3953 | + WHERE id in %s""" |
3954 | + # Prepare SQL3 request in case we have other currencies to compute |
3955 | + sql3 = """ |
3956 | + SELECT l.currency_id, SUM(l.amount_currency) |
3957 | + FROM account_analytic_line AS l, account_analytic_journal AS j |
3958 | + WHERE l.journal_id = j.id |
3959 | + AND l.cost_center_id IN %s |
3960 | + AND l.date >= %s |
3961 | + AND l.date <= %s""" |
3962 | + sql3_end = """ GROUP BY l.currency_id""" |
3963 | + # Process destination lines |
3964 | + for line in cr.fetchall(): |
3965 | + # fetch some values |
3966 | + line_id, line_type, account_id, destination_id, cost_center_id, date_start, date_stop = line |
3967 | + cost_center_ids = ana_account_obj.search(cr, uid, [('parent_id', 'child_of', cost_center_id)]) |
3968 | + if date_period_stop: |
3969 | + date_stop = date_period_stop |
3970 | + criteria = self._get_domain(line_type, account_id, cost_center_ids, destination_id, date_start, date_stop) |
3971 | + # TWO METHODS to display actual/commitments |
3972 | + # (1) Either we use functional amounts (no currency_table) |
3973 | + # (2) Or we use a currency table to change amounts to functional amounts at fiscalyear date_stop |
3974 | + if not other_currencies: |
3975 | + # (1) Use functional amounts: NO conversion |
3976 | + # fill in ACTUAL AMOUNTS |
3977 | + if actual_ok: |
3978 | + actual_criteria = list(criteria) + [('journal_id.type', '!=', 'engagement')] |
3979 | + ana_ids = ana_obj.search(cr, uid, actual_criteria) |
3980 | + if ana_ids: |
3981 | + cr.execute(sql2, (tuple(ana_ids),)) |
3982 | + mnt_result = cr.fetchall() |
3983 | + if mnt_result: |
3984 | + actual_amounts[line_id] += mnt_result[0][0] * -1 |
3985 | + # fill in COMMITMENT AMOUNTS |
3986 | + if commitment_ok: |
3987 | + commitment_criteria = list(criteria) + [('journal_id.type', '=', 'engagement')] |
3988 | + ana_ids = ana_obj.search(cr, uid, commitment_criteria) |
3989 | + if ana_ids: |
3990 | + cr.execute(sql2, (tuple(ana_ids),)) |
3991 | + mnt_result = cr.fetchall() |
3992 | + if mnt_result: |
3993 | + comm_amounts[line_id] += mnt_result[0][0] * -1 |
3994 | + else: |
3995 | + # (2) OTHER CURRENCIES to compute |
3996 | + # Note that to not compute each analytic lines we use the sum of each currency and convert it to the functional currency using the given currency_table_id in the context |
3997 | + tmp_sql_params = [tuple(cost_center_ids), date_start, date_stop] |
3998 | + tmp_sql, sql_params = self._get_sql_domain(cr, uid, sql3, tmp_sql_params, line_type, account_id, destination_id) |
3999 | + # Use fiscalyear end date as date on which we do conversion |
4000 | + date_context.update({'date': date_stop}) |
4001 | + |
4002 | + def get_amounts_and_compute_total(local_request, local_params, local_end_request): |
4003 | + """ |
4004 | + Use request. |
4005 | + Finish it with local_end_request. |
4006 | + Execute it. |
4007 | + Fetch amounts. |
4008 | + Compute them by currency. |
4009 | + Return total result |
4010 | + """ |
4011 | + total = 0.0 |
4012 | + if local_end_request: |
4013 | + local_request += local_end_request |
4014 | + cr.execute(local_request, tuple(local_params)) |
4015 | + if cr.rowcount: |
4016 | + analytic_amounts = cr.fetchall() |
4017 | + # Browse each currency amount and convert it to the functional currency (company one) |
4018 | + for currency_id, amount in analytic_amounts: |
4019 | + tmp_amount = cur_obj.compute(cr, uid, currency_id, company_currency_id, amount, round=False, context=date_context) |
4020 | + total += (tmp_amount * -1) # As analytic amounts are negative, we should use the opposite to make budget with positive values |
4021 | + return total |
4022 | + |
4023 | + if actual_ok: |
4024 | + actual_sql = tmp_sql + """ AND j.type != 'engagement' """ |
4025 | + actual_amounts[line_id] += get_amounts_and_compute_total(actual_sql, sql_params, sql3_end) |
4026 | + if commitment_ok: |
4027 | + commitment_sql = tmp_sql + """ AND j.type = 'engagement' """ |
4028 | + comm_amounts[line_id] += get_amounts_and_compute_total(commitment_sql, sql_params, sql3_end) |
4029 | + # Budget line amounts |
4030 | + if budget_ok: |
4031 | + month_names = self._get_month_names(month_number) |
4032 | + sql = """ |
4033 | + SELECT id, COALESCE(""" + '+'.join(month_names) + """, 0.0) |
4034 | + FROM msf_budget_line |
4035 | + WHERE id IN %s; |
4036 | + """ |
4037 | + cr.execute(sql, (tuple(ids),)) |
4038 | + tmp_res = cr.fetchall() |
4039 | + if tmp_res: |
4040 | + budget_amounts = dict(tmp_res) |
4041 | + # Prepare result |
4042 | + for line_id in ids: |
4043 | + actual_amount = line_id in actual_amounts and actual_amounts[line_id] or 0.0 |
4044 | + comm_amount = line_id in comm_amounts and comm_amounts[line_id] or 0.0 |
4045 | + res[line_id] = {'actual_amount': actual_amount, 'comm_amount': comm_amount, 'balance': 0.0, 'percentage': 0.0, 'budget_amount': 0.0,} |
4046 | + if budget_ok: |
4047 | + budget_amount = line_id in budget_amounts and budget_amounts[line_id] or 0.0 |
4048 | + res[line_id].update({'budget_amount': budget_amount,}) |
4049 | + if balance_ok: |
4050 | + balance = budget_amount - actual_amount |
4051 | + if commitment_ok: |
4052 | + balance -= comm_amount |
4053 | + res[line_id].update({'balance': balance,}) |
4054 | + if percentage_ok: |
4055 | + if budget_amount != 0.0: |
4056 | + base = actual_amount |
4057 | + if commitment_ok: |
4058 | + base += comm_amount |
4059 | + percentage = round(base / budget_amount * 100.0) |
4060 | + res[line_id].update({'percentage': percentage,}) |
4061 | + return res |
4062 | + |
4063 | + def _get_total(self, cr, uid, ids, field_names=None, arg=None, context=None): |
4064 | + """ |
4065 | + Give the sum of all month for the given budget lines. |
4066 | + If period_id in context, just display months from the first one to the given period month (included) |
4067 | + """ |
4068 | + # Some checks |
4069 | + if isinstance(ids,(int, long)): |
4070 | + ids = [ids] |
4071 | + month_number = 12 |
4072 | + if 'period_id' in context: |
4073 | + period = self.pool.get('account.period').read(cr, uid, context.get('period_id', False), ['number']) |
4074 | + if period and period.get('number', False): |
4075 | + month_number = period.get('number') |
4076 | + month_names = self._get_month_names(month_number) |
4077 | + # Prepare some values |
4078 | + res = {} |
4079 | + sql = """ |
4080 | + SELECT id, COALESCE(""" + '+'.join(month_names) + """, 0.0) |
4081 | + FROM msf_budget_line |
4082 | + WHERE id IN %s""" |
4083 | + cr.execute(sql, (tuple(ids),)) |
4084 | + tmp_res = cr.fetchall() |
4085 | + if tmp_res: |
4086 | + res = dict(tmp_res) |
4087 | + return res |
4088 | + |
4089 | _columns = { |
4090 | 'budget_id': fields.many2one('msf.budget', 'Budget', ondelete='cascade'), |
4091 | 'account_id': fields.many2one('account.account', 'Account', required=True, domain=[('type', '!=', 'view')]), |
4092 | 'destination_id': fields.many2one('account.analytic.account', 'Destination', domain=[('category', '=', 'DEST')]), |
4093 | 'name': fields.function(_get_name, method=True, store=False, string="Name", type="char", readonly="True", size=512), |
4094 | - 'budget_values': fields.char('Budget Values (list of float to evaluate)', size=256), |
4095 | - 'budget_amount': fields.function(_get_total_amounts, method=True, store=False, string="Budget amount", type="float", readonly="True", multi="all"), |
4096 | - 'actual_amount': fields.function(_get_total_amounts, method=True, store=False, string="Actual amount", type="float", readonly="True", multi="all"), |
4097 | - 'comm_amount': fields.function(_get_total_amounts, method=True, store=False, string="Commitments amount", type="float", readonly="True", multi="all"), |
4098 | - 'balance': fields.function(_get_total_amounts, method=True, store=False, string="Balance", type="float", readonly="True", multi="all"), |
4099 | - 'percentage': fields.function(_get_total_amounts, method=True, store=False, string="Percentage", type="float", readonly="True", multi="all"), |
4100 | + 'month1': fields.float("Month 01"), |
4101 | + 'month2': fields.float("Month 02"), |
4102 | + 'month3': fields.float("Month 03"), |
4103 | + 'month4': fields.float("Month 04"), |
4104 | + 'month5': fields.float("Month 05"), |
4105 | + 'month6': fields.float("Month 06"), |
4106 | + 'month7': fields.float("Month 07"), |
4107 | + 'month8': fields.float("Month 08"), |
4108 | + 'month9': fields.float("Month 09"), |
4109 | + 'month10': fields.float("Month 10"), |
4110 | + 'month11': fields.float("Month 11"), |
4111 | + 'month12': fields.float("Month 12"), |
4112 | + 'total': fields.function(_get_total, method=True, store=False, string="Total", type="float", readonly=True, help="Get all month total amount"), |
4113 | + 'budget_amount': fields.function(_get_amounts, method=True, store=False, string="Budget amount", type="float", readonly=True, multi="budget_amounts"), |
4114 | + 'actual_amount': fields.function(_get_amounts, method=True, store=False, string="Actual amount", type="float", readonly=True, multi="budget_amounts"), |
4115 | + 'comm_amount': fields.function(_get_amounts, method=True, store=False, string="Commitments amount", type="float", readonly=True, multi="budget_amounts"), |
4116 | + 'balance': fields.function(_get_amounts, method=True, store=False, string="Balance", type="float", readonly=True, multi="budget_amounts"), |
4117 | + 'percentage': fields.function(_get_amounts, method=True, store=False, string="Percentage", type="float", readonly=True, multi="budget_amounts"), |
4118 | 'parent_id': fields.many2one('msf.budget.line', 'Parent Line'), |
4119 | 'child_ids': fields.one2many('msf.budget.line', 'parent_id', 'Child Lines'), |
4120 | 'line_type': fields.selection([('view','View'), |
4121 | @@ -334,89 +392,30 @@ |
4122 | _order = 'account_code asc, line_type desc' |
4123 | |
4124 | _defaults = { |
4125 | - 'line_type': 'normal', |
4126 | + 'line_type': lambda *a: 'normal', |
4127 | + 'month1': lambda *a: 0.0, |
4128 | + 'month2': lambda *a: 0.0, |
4129 | + 'month3': lambda *a: 0.0, |
4130 | + 'month4': lambda *a: 0.0, |
4131 | + 'month5': lambda *a: 0.0, |
4132 | + 'month6': lambda *a: 0.0, |
4133 | + 'month7': lambda *a: 0.0, |
4134 | + 'month8': lambda *a: 0.0, |
4135 | + 'month9': lambda *a: 0.0, |
4136 | + 'month10': lambda *a: 0.0, |
4137 | + 'month11': lambda *a: 0.0, |
4138 | + 'month12': lambda *a: 0.0, |
4139 | } |
4140 | - |
4141 | - def get_parent_line(self, cr, uid, vals, context=None): |
4142 | - # Method to check if the used account has a parent, |
4143 | - # and retrieve or create the corresponding parent line. |
4144 | - # It also adds budget values to parent lines |
4145 | - parent_account_id = False |
4146 | - parent_line_ids = [] |
4147 | - if 'account_id' in vals and 'budget_id' in vals: |
4148 | - if 'destination_id' in vals: |
4149 | - # Special case: the line has a destination, so the parent is a line |
4150 | - # with the same account and no destination |
4151 | - parent_account_id = vals['account_id'] |
4152 | - parent_line_ids = self.search(cr, uid, [('account_id', '=', vals['account_id']), |
4153 | - ('budget_id', '=', vals['budget_id']), |
4154 | - ('line_type', '=', 'normal')], context=context) |
4155 | - else: |
4156 | - # search for budget line |
4157 | - account = self.pool.get('account.account').browse(cr, uid, vals['account_id'], context=context) |
4158 | - chart_of_account_ids = self.pool.get('account.account').search(cr, uid, [('code', '=', 'MSF')], context=context) |
4159 | - if account.parent_id and account.parent_id.id in chart_of_account_ids: |
4160 | - # no need to create the parent |
4161 | - return |
4162 | - else: |
4163 | - parent_account_id = account.parent_id.id |
4164 | - parent_line_ids = self.search(cr, uid, [('account_id', '=', parent_account_id), |
4165 | - ('budget_id', '=', vals['budget_id'])], context=context) |
4166 | - if len(parent_line_ids) > 0: |
4167 | - # Parent line exists |
4168 | - if 'budget_values' in vals: |
4169 | - # we add the budget values to the parent one |
4170 | - parent_line = self.browse(cr, uid, parent_line_ids[0], context=context) |
4171 | - parent_budget_values = [sum(pair) for pair in zip(eval(parent_line.budget_values), |
4172 | - eval(vals['budget_values']))] |
4173 | - # write parent |
4174 | - super(msf_budget_line, self).write(cr, |
4175 | - uid, |
4176 | - parent_line_ids, |
4177 | - {'budget_values': str(parent_budget_values)}, |
4178 | - context=context) |
4179 | - # use method on parent with original budget values |
4180 | - self.get_parent_line(cr, |
4181 | - uid, |
4182 | - {'account_id': parent_line.account_id.id, |
4183 | - 'budget_id': parent_line.budget_id.id, |
4184 | - 'budget_values': vals['budget_values']}, |
4185 | - context=context) |
4186 | - # add parent id to vals |
4187 | - vals.update({'parent_id': parent_line_ids[0]}) |
4188 | - else: |
4189 | - # Create parent line and add it to vals, except if it's the main parent |
4190 | - parent_vals = {'budget_id': vals['budget_id'], |
4191 | - 'account_id': parent_account_id} |
4192 | - if 'line_type' in vals and vals['line_type'] == 'destination': |
4193 | - parent_vals['line_type'] = 'normal' |
4194 | - else: |
4195 | - parent_vals['line_type'] = 'view' |
4196 | - # default parent budget values: the one from the (currently) only child |
4197 | - if 'budget_values' in vals: |
4198 | - parent_vals.update({'budget_values': vals['budget_values']}) |
4199 | - parent_budget_line_id = self.create(cr, uid, parent_vals, context=context) |
4200 | - vals.update({'parent_id': parent_budget_line_id}) |
4201 | - return |
4202 | - |
4203 | - |
4204 | - def create(self, cr, uid, vals, context=None): |
4205 | - self.get_parent_line(cr, uid, vals, context=context) |
4206 | - return super(msf_budget_line, self).create(cr, uid, vals, context=context) |
4207 | - |
4208 | - def write(self, cr, uid, ids, vals, context=None): |
4209 | - self.get_parent_line(cr, uid, vals, context=context) |
4210 | - return super(msf_budget_line, self).write(cr, uid, ids, vals, context=context) |
4211 | - |
4212 | + |
4213 | msf_budget_line() |
4214 | |
4215 | class msf_budget(osv.osv): |
4216 | _name = "msf.budget" |
4217 | _inherit = "msf.budget" |
4218 | - |
4219 | + |
4220 | _columns = { |
4221 | 'budget_line_ids': one2many_budget_lines('msf.budget.line', 'budget_id', 'Budget Lines'), |
4222 | } |
4223 | - |
4224 | + |
4225 | msf_budget() |
4226 | # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
4227 | |
4228 | === modified file 'msf_budget/msf_budget_report.xml' |
4229 | --- msf_budget/msf_budget_report.xml 2013-12-12 17:29:01 +0000 |
4230 | +++ msf_budget/msf_budget_report.xml 2014-03-21 08:49:45 +0000 |
4231 | @@ -23,7 +23,7 @@ |
4232 | header="False" |
4233 | auto="False" |
4234 | menu="True"/> |
4235 | - |
4236 | + |
4237 | <report id="pdf_budget_monthly" |
4238 | string="Monthly Budget (PDF)" |
4239 | model="msf.budget" |
4240 | @@ -32,7 +32,8 @@ |
4241 | rml="msf_budget/report/monthly_budget.rml" |
4242 | auto="False" |
4243 | menu="True"/> |
4244 | - |
4245 | + |
4246 | + <!-- see 'report/report_pdf_budget_summary.py' for real filename --> |
4247 | <report id="pdf_budget_summary" |
4248 | string="Budget Summary" |
4249 | model="msf.budget" |
4250 | @@ -40,8 +41,8 @@ |
4251 | target_filename="Budget_Summary_${code}_%(year)s%(month)s%(day)s" |
4252 | rml="msf_budget/report/budget_summary.rml" |
4253 | auto="False" |
4254 | - menu="True"/> |
4255 | - |
4256 | + menu="False"/> |
4257 | + |
4258 | <report id="report_local_expenses_xls" |
4259 | string="Local Expenses" |
4260 | model="account.analytic.line" |
4261 | @@ -52,7 +53,7 @@ |
4262 | header="False" |
4263 | auto="False" |
4264 | menu="False"/> |
4265 | - |
4266 | + |
4267 | <report id="pdf_engagement" |
4268 | string="Order impact vs. Budget" |
4269 | model="purchase.order" |
4270 | @@ -66,7 +67,7 @@ |
4271 | string="Actuals by CC" |
4272 | model="msf.budget" |
4273 | name="msf.budget.actual" |
4274 | - target_filename="Actuals by CC_${code}_Â %(year)s%(month)s%(day)s" |
4275 | + target_filename="Actuals by CC_${code}_%(year)s%(month)s%(day)s" |
4276 | report_type="txt" |
4277 | auto="False" |
4278 | menu="False"/> |
4279 | |
4280 | === added file 'msf_budget/msf_budget_sequence.xml' |
4281 | --- msf_budget/msf_budget_sequence.xml 1970-01-01 00:00:00 +0000 |
4282 | +++ msf_budget/msf_budget_sequence.xml 2014-03-21 08:49:45 +0000 |
4283 | @@ -0,0 +1,18 @@ |
4284 | +<?xml version="1.0" encoding="utf-8"?> |
4285 | +<openerp> |
4286 | + <data noupdate="1"> |
4287 | + |
4288 | + <record id="seq_type_budget_import" model="ir.sequence.type"> |
4289 | + <field name="name">Budget Import</field> |
4290 | + <field name="code">budget.import</field> |
4291 | + </record> |
4292 | + |
4293 | + |
4294 | + <record id="seq_budget_import" model="ir.sequence"> |
4295 | + <field name="name">Budget Import</field> |
4296 | + <field name="code">budget.import</field> |
4297 | + <field name="padding">10</field> |
4298 | + <field name="implementation">psql</field> |
4299 | + </record> |
4300 | + </data> |
4301 | +</openerp> |
4302 | |
4303 | === modified file 'msf_budget/msf_budget_tools.py' |
4304 | --- msf_budget/msf_budget_tools.py 2013-12-12 16:01:40 +0000 |
4305 | +++ msf_budget/msf_budget_tools.py 2014-03-21 08:49:45 +0000 |
4306 | @@ -19,23 +19,18 @@ |
4307 | # |
4308 | ############################################################################## |
4309 | |
4310 | -from osv import fields, osv |
4311 | +from osv import osv |
4312 | import datetime |
4313 | -from dateutil.relativedelta import relativedelta |
4314 | +from tools.translate import _ |
4315 | |
4316 | class msf_budget_tools(osv.osv): |
4317 | _name = "msf.budget.tools" |
4318 | - |
4319 | - def _get_account_parent(self, browse_account, account_list, chart_of_account_ids): |
4320 | - if browse_account.parent_id and \ |
4321 | - browse_account.parent_id.id and \ |
4322 | - browse_account.parent_id.id not in chart_of_account_ids and \ |
4323 | - browse_account.parent_id.id not in account_list: |
4324 | - account_list.append((browse_account.parent_id.id, False)) |
4325 | - self._get_account_parent(browse_account.parent_id, account_list, chart_of_account_ids) |
4326 | - return |
4327 | - |
4328 | - def _get_expense_accounts(self, cr, uid, context=None): |
4329 | + |
4330 | + def get_expense_accounts(self, cr, uid, context=None): |
4331 | + """ |
4332 | + Get all "is_analytic_addicted" accounts except if context notify to only use expense ones. |
4333 | + By using this method you also retrieve ALL parents EXCEPT the first one: MSF account. |
4334 | + """ |
4335 | # Checks |
4336 | if context is None: |
4337 | context = {} |
4338 | @@ -43,56 +38,215 @@ |
4339 | res = [] |
4340 | account_obj = self.pool.get('account.account') |
4341 | # get the last parent |
4342 | - chart_of_account_ids = account_obj.search(cr, uid, [('code', '=', 'MSF')], context=context) |
4343 | + top_ids = account_obj.search(cr, uid, [('code', '=', 'MSF')], context=context) |
4344 | # get normal analytic-a-holic accounts. UTP-944: only expenses ones if "only_expenses" in context. Do not include Extra-accounting accounts and incomes one. |
4345 | domain = [('type', '!=', 'view'), ('user_type_report_type', '!=', 'none')] |
4346 | if context.get('only_expenses', False) and context.get('only_expenses') is True: |
4347 | domain += [('user_type_code', '=', 'expense'), ('user_type_report_type', '=', 'expense')] |
4348 | else: |
4349 | domain += [('is_analytic_addicted', '=', True)] |
4350 | - general_account_ids = account_obj.search(cr, uid, domain, context=context) |
4351 | - expense_account_ids = [(account_id, False) for account_id in general_account_ids] |
4352 | - # go through parents |
4353 | - for account in account_obj.browse(cr, uid, general_account_ids, context=context): |
4354 | - self._get_account_parent(account, expense_account_ids, chart_of_account_ids) |
4355 | - return expense_account_ids |
4356 | - |
4357 | - def _create_expense_account_line_amounts(self, cr, uid, account_destination_tuple, actual_amounts, context=None): |
4358 | - if account_destination_tuple not in actual_amounts: |
4359 | - account = self.pool.get('account.account').browse(cr, uid, account_destination_tuple[0], context=context) |
4360 | - result = [0] * 12 |
4361 | - if account.type == 'view': |
4362 | - # children are accounts |
4363 | - for child_account in account.child_id: |
4364 | - if (child_account.id, False) not in actual_amounts: |
4365 | - self._create_expense_account_line_amounts(cr, uid, (child_account.id, False), actual_amounts, context=context) |
4366 | - result = [sum(pair) for pair in zip(result, actual_amounts[child_account.id, False])] |
4367 | - else: |
4368 | - # children are account, destination tuples (already in actual_amounts) |
4369 | - # get all tuples starting with (account_id) |
4370 | - for account_destination in [tuple for tuple in actual_amounts.keys() if tuple[0] == account_destination_tuple[0] and tuple[1] is not False]: |
4371 | - result = [sum(pair) for pair in zip(result, actual_amounts[account_destination])] |
4372 | - actual_amounts[account_destination_tuple[0], False] = result |
4373 | - return |
4374 | - |
4375 | - def _get_cc_children(self, browse_cost_center, cost_center_list): |
4376 | - for child in browse_cost_center.child_ids: |
4377 | - if child.type == 'view': |
4378 | - self._get_cc_children(child, cost_center_list) |
4379 | - else: |
4380 | - cost_center_list.append(child.id) |
4381 | - return |
4382 | - |
4383 | - def _get_cost_center_ids(self, browse_cost_center): |
4384 | - if browse_cost_center.type == 'normal': |
4385 | - # Normal budget, just return a 1-item list |
4386 | - return [browse_cost_center.id] |
4387 | - else: |
4388 | - # View budget: return all non-view cost centers below this one |
4389 | - cost_center_list = [] |
4390 | - self._get_cc_children(browse_cost_center, cost_center_list) |
4391 | - return cost_center_list |
4392 | - |
4393 | + account_ids = account_obj.search(cr, uid, domain, context=context) |
4394 | + if account_ids: |
4395 | + parent_ids = account_obj._get_parent_of(cr, uid, account_ids, context=context) |
4396 | + if parent_ids: |
4397 | + res = [x for x in parent_ids if x not in top_ids] |
4398 | + return res |
4399 | + |
4400 | + def get_budget_line_template(self, cr, uid, context=None): |
4401 | + """ |
4402 | + Create a template that contains all budget line main values for a new budget. |
4403 | + """ |
4404 | + # Some checks |
4405 | + if context is None: |
4406 | + context = {} |
4407 | + # Prepare some values |
4408 | + res = [] |
4409 | + # Search all income/expense accounts (except if context contains "only_expenses" set to True) |
4410 | + account_ids = self.get_expense_accounts(cr, uid, context=context) |
4411 | + if not account_ids: |
4412 | + return [] |
4413 | + # We use a SQL request to keep an order of accounts regarding their parents so that parents could be created before their childs. |
4414 | + sql = """ |
4415 | + SELECT id, CASE WHEN type != 'view' THEN 'normal' ELSE 'view' END AS account_type, parent_id |
4416 | + FROM account_account |
4417 | + WHERE id IN %s |
4418 | + ORDER BY parent_id ASC""" |
4419 | + cr.execute(sql, (tuple(account_ids),)) |
4420 | + if not cr.rowcount: |
4421 | + raise osv.except_osv(_('Error'), _('Unable to find needed info.')) |
4422 | + tmp_res = cr.fetchall() |
4423 | + # We take destination ids for a given account (and we make a dictionnary to be quickly used) |
4424 | + accounts = self.pool.get('account.account').read(cr, uid, account_ids, ['destination_ids'], context=context) |
4425 | + destinations = {} |
4426 | + for account in accounts: |
4427 | + destinations[account.get('id')] = account.get('destination_ids') |
4428 | + # We then create the final result with all needed elements |
4429 | + for line_id, line_type, parent_id in tmp_res: |
4430 | + line_vals = { |
4431 | + 'id': line_id, |
4432 | + 'type': line_type, |
4433 | + 'parent_id': parent_id, |
4434 | + 'destination_ids': [] |
4435 | + } |
4436 | + if line_id in destinations: |
4437 | + line_vals.update({'destination_ids': destinations[line_id]}) |
4438 | + res.append(line_vals) |
4439 | + return res |
4440 | + |
4441 | + def create_budget_lines(self, cr, uid, budget_id, sequence=False, context=None): |
4442 | + """ |
4443 | + Create budget lines for a given budget. |
4444 | + If no budget: do nothing. |
4445 | + If no sequence: only create budget lines without any specific amounts. |
4446 | + |
4447 | + Creation synthesis: |
4448 | + 1/ get the initial template |
4449 | + 2/ for each line create its budget line |
4450 | + 3/a) if sequence: fetch budget values and fill in destination lines, normal lines and parents |
4451 | + 3/b) if NO sequence, just create destination lines, normal lines and parents. |
4452 | + """ |
4453 | + # Some checks |
4454 | + if context is None: |
4455 | + context = {} |
4456 | + if not budget_id: |
4457 | + return False |
4458 | + # Prepare some values |
4459 | + a_obj = self.pool.get('account.account') |
4460 | + chart_of_account_ids = a_obj.search(cr, uid, [('code', '=', 'MSF')], context=context) |
4461 | + budget_line_obj = self.pool.get('msf.budget.line') |
4462 | + imported_obj = self.pool.get('imported.msf.budget.line') |
4463 | + sql = """ |
4464 | + SELECT SUM(COALESCE(month1, 0.0)), SUM(COALESCE(month2, 0.0)), SUM(COALESCE(month3, 0.0)), SUM(COALESCE(month4, 0.0)), SUM(COALESCE(month5, 0.0)), SUM(COALESCE(month6, 0.0)), SUM(COALESCE(month7, 0.0)), SUM(COALESCE(month8, 0.0)), SUM(COALESCE(month9, 0.0)), SUM(COALESCE(month10, 0.0)), SUM(COALESCE(month11, 0.0)), SUM(COALESCE(month12, 0.0)) |
4465 | + FROM msf_budget_line |
4466 | + WHERE id IN %s""" |
4467 | + # Get budget line template from budget tools |
4468 | + template = self.get_budget_line_template(cr, uid, context=context) |
4469 | + # Browse each budget line and create needed values |
4470 | + to_proceed = [] |
4471 | + mapping_accounts = {} |
4472 | + for budget_line in template: |
4473 | + # Create budget line |
4474 | + line_id = budget_line.get('id') |
4475 | + line_type = budget_line.get('type') |
4476 | + parent_id = budget_line.get('parent_id', False) |
4477 | + # Do not use top parent account (those in chart_of_account_ids) |
4478 | + if parent_id in chart_of_account_ids: |
4479 | + parent_id = False |
4480 | + budget_line_vals = { |
4481 | + 'budget_id': budget_id, |
4482 | + 'account_id': line_id, |
4483 | + 'line_type': line_type, |
4484 | + 'month1': 0.0, |
4485 | + 'month2': 0.0, |
4486 | + 'month3': 0.0, |
4487 | + 'month4': 0.0, |
4488 | + 'month5': 0.0, |
4489 | + 'month6': 0.0, |
4490 | + 'month7': 0.0, |
4491 | + 'month8': 0.0, |
4492 | + 'month9': 0.0, |
4493 | + 'month10': 0.0, |
4494 | + 'month11': 0.0, |
4495 | + 'month12': 0.0, |
4496 | + } |
4497 | + if parent_id: |
4498 | + if parent_id not in mapping_accounts: |
4499 | + raise osv.except_osv(_('Error'), _('You did not create budget line in the right order. A parent does not exist!')) |
4500 | + budget_line_vals.update({'parent_id': mapping_accounts[parent_id]}) |
4501 | + # Create line |
4502 | + budget_line_id = budget_line_obj.create(cr, uid, budget_line_vals, context=context) |
4503 | + to_proceed.append(budget_line_id) |
4504 | + mapping_accounts[line_id] = budget_line_id |
4505 | + if line_type == 'normal': |
4506 | + # Update vals with the new line type |
4507 | + budget_line_vals.update({ |
4508 | + 'line_type': 'destination', |
4509 | + }) |
4510 | + # Browse each destination to create its line |
4511 | + for destination_id in budget_line.get('destination_ids', []): |
4512 | + budget_line_vals.update({ |
4513 | + 'destination_id': destination_id, |
4514 | + 'month1': 0.0, |
4515 | + 'month2': 0.0, |
4516 | + 'month3': 0.0, |
4517 | + 'month4': 0.0, |
4518 | + 'month5': 0.0, |
4519 | + 'month6': 0.0, |
4520 | + 'month7': 0.0, |
4521 | + 'month8': 0.0, |
4522 | + 'month9': 0.0, |
4523 | + 'month10': 0.0, |
4524 | + 'month11': 0.0, |
4525 | + 'month12': 0.0, |
4526 | + 'parent_id': budget_line_id |
4527 | + }) |
4528 | + # Fetch values if sequence is given (which permit to find some lines) |
4529 | + if sequence: |
4530 | + # Search if the CSV file have this kind of tuple account/destination and fetch values |
4531 | + csv_line_ids = imported_obj.search(cr, uid, [('account_id', '=', line_id), ('destination_id', '=', destination_id)], context=context) |
4532 | + # If yes, complete budget values from month1 to month12 |
4533 | + if csv_line_ids: |
4534 | + csv_line = imported_obj.read(cr, uid, csv_line_ids[0], ['month1', 'month2', 'month3', 'month4', 'month5', 'month6', 'month7', 'month8', 'month9', 'month10', 'month11', 'month12']) |
4535 | + budget_line_vals.update({ |
4536 | + 'month1': csv_line.get('month1', 0.0), |
4537 | + 'month2': csv_line.get('month2', 0.0), |
4538 | + 'month3': csv_line.get('month3', 0.0), |
4539 | + 'month4': csv_line.get('month4', 0.0), |
4540 | + 'month5': csv_line.get('month5', 0.0), |
4541 | + 'month6': csv_line.get('month6', 0.0), |
4542 | + 'month7': csv_line.get('month7', 0.0), |
4543 | + 'month8': csv_line.get('month8', 0.0), |
4544 | + 'month9': csv_line.get('month9', 0.0), |
4545 | + 'month10': csv_line.get('month10', 0.0), |
4546 | + 'month11': csv_line.get('month11', 0.0), |
4547 | + 'month12': csv_line.get('month12', 0.0), |
4548 | + }) |
4549 | + # Create destination line |
4550 | + budget_line_obj.create(cr, uid, budget_line_vals, context=context) |
4551 | + # Fill in parent lines (only if sequence is given which means that we have probably some values in destination lines) |
4552 | + if sequence: |
4553 | + vals_headers = ['month1', 'month2', 'month3', 'month4', 'month5', 'month6', 'month7', 'month8', 'month9', 'month10', 'month11', 'month12'] |
4554 | + for budget_line_id in to_proceed: |
4555 | + # Search child_ids |
4556 | + child_ids = budget_line_obj.search(cr, uid, [('parent_id', 'child_of', budget_line_id)]) |
4557 | + # Do the sum of them |
4558 | + cr.execute(sql, (tuple(child_ids),)) |
4559 | + tmp_res = cr.fetchall() |
4560 | + # If result, write on the given budget line the result |
4561 | + if tmp_res: |
4562 | + budget_line_vals = dict(zip(vals_headers, tmp_res[0])) |
4563 | + budget_line_obj.write(cr, uid, budget_line_id, budget_line_vals, context=context) |
4564 | + return True |
4565 | + |
4566 | + def _create_expense_account_line_amounts(self, cr, uid, account_ids, actual_amounts, context=None): |
4567 | + # Some checks |
4568 | + if context is None: |
4569 | + context = {} |
4570 | + if isinstance(account_ids, (int, long)): |
4571 | + account_ids = [account_ids] |
4572 | + a_obj = self.pool.get('account.account') |
4573 | + # Browse accounts |
4574 | + for account_id in account_ids: |
4575 | + if (account_id, False) not in actual_amounts: |
4576 | + account = a_obj.browse(cr, uid, account_id, context=context) |
4577 | + result = [0] * 12 |
4578 | + if account.type == 'view': |
4579 | + # children are accounts |
4580 | + for child_account in account.child_id: |
4581 | + if (child_account.id, False) not in actual_amounts: |
4582 | + self._create_expense_account_line_amounts(cr, uid, child_account.id, actual_amounts, context=context) |
4583 | + result = [sum(pair) for pair in zip(result, actual_amounts[child_account.id, False])] |
4584 | + else: |
4585 | + # children are account, destination tuples (already in actual_amounts) |
4586 | + # get all tuples starting with (account_id) |
4587 | + for account_destination in [tuple_acc_dest for tuple_acc_dest in actual_amounts.keys() if tuple_acc_dest[0] == account_id and tuple_acc_dest[1] is not False]: |
4588 | + result = [sum(pair) for pair in zip(result, actual_amounts[account_destination])] |
4589 | + actual_amounts[account_id, False] = result |
4590 | + return |
4591 | + |
4592 | + def _get_cost_center_ids(self, cr, uid, browse_cost_center): |
4593 | + return self.pool.get('account.analytic.account').search(cr, uid, [('parent_id', 'child_of', browse_cost_center.id)]) |
4594 | + |
4595 | def _create_account_destination_domain(self, account_destination_list): |
4596 | if len(account_destination_list) == 0: |
4597 | return ['&', |
4598 | @@ -104,7 +258,7 @@ |
4599 | ('destination_id', '=', account_destination_list[0][1])] |
4600 | else: |
4601 | return ['|'] + self._create_account_destination_domain([account_destination_list[0]]) + self._create_account_destination_domain(account_destination_list[1:]) |
4602 | - |
4603 | + |
4604 | def _get_actual_amounts(self, cr, uid, output_currency_id, domain=[], context=None): |
4605 | # Input: domain for the selection of analytic lines (cost center, date, etc...) |
4606 | # Output: a dict of list {(general_account_id, destination_id): [jan_actual, feb_actual,...]} |
4607 | @@ -113,20 +267,20 @@ |
4608 | context = {} |
4609 | destination_obj = self.pool.get('account.destination.link') |
4610 | # list to store every existing destination link in the system |
4611 | - account_ids = self._get_expense_accounts(cr, uid, context=context) |
4612 | - |
4613 | - destination_link_ids = destination_obj.search(cr, uid, [('account_id', 'in', [x[0] for x in account_ids])], context=context) |
4614 | + account_ids = self.get_expense_accounts(cr, uid, context=context) |
4615 | + |
4616 | + destination_link_ids = destination_obj.search(cr, uid, [('account_id', 'in', account_ids)], context=context) |
4617 | account_destination_ids = [(dest.account_id.id, dest.destination_id.id) |
4618 | for dest |
4619 | in destination_obj.browse(cr, uid, destination_link_ids, context=context)] |
4620 | - |
4621 | + |
4622 | # Fill all general accounts |
4623 | for account_id, destination_id in account_destination_ids: |
4624 | res[account_id, destination_id] = [0] * 12 |
4625 | - |
4626 | + |
4627 | # fill search domain (one search for all analytic lines) |
4628 | domain += self._create_account_destination_domain(account_destination_ids) |
4629 | - |
4630 | + |
4631 | # Analytic domain is now done; lines are retrieved and added |
4632 | analytic_line_obj = self.pool.get('account.analytic.line') |
4633 | analytic_lines = analytic_line_obj.search(cr, uid, domain, context=context) |
4634 | @@ -134,7 +288,7 @@ |
4635 | currency_table = None |
4636 | if 'currency_table_id' in context: |
4637 | currency_table = context['currency_table_id'] |
4638 | - |
4639 | + |
4640 | # parse each line and add it to the right array |
4641 | for analytic_line in analytic_line_obj.browse(cr, uid, analytic_lines, context=context): |
4642 | date_context = {'date': analytic_line.source_date or analytic_line.date, |
4643 | @@ -142,23 +296,22 @@ |
4644 | actual_amount = self.pool.get('res.currency').compute(cr, |
4645 | uid, |
4646 | analytic_line.currency_id.id, |
4647 | - output_currency_id, |
4648 | + output_currency_id, |
4649 | analytic_line.amount_currency or 0.0, |
4650 | round=True, |
4651 | context=date_context) |
4652 | # add the amount to correct month |
4653 | month = datetime.datetime.strptime(analytic_line.date, '%Y-%m-%d').month |
4654 | res[analytic_line.general_account_id.id, analytic_line.destination_id.id][month - 1] += round(actual_amount, 2) |
4655 | - |
4656 | + |
4657 | # after all lines are parsed, absolute of every column |
4658 | for line in res.keys(): |
4659 | res[line] = [-x for x in res[line]] |
4660 | - |
4661 | + |
4662 | # do the view lines |
4663 | - for account_destination_tuple in account_ids: |
4664 | - self._create_expense_account_line_amounts(cr, uid, account_destination_tuple, res, context=context) |
4665 | - |
4666 | + self._create_expense_account_line_amounts(cr, uid, account_ids, res, context=context) |
4667 | + |
4668 | return res |
4669 | - |
4670 | + |
4671 | msf_budget_tools() |
4672 | # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
4673 | |
4674 | === modified file 'msf_budget/msf_budget_view.xml' |
4675 | --- msf_budget/msf_budget_view.xml 2014-01-13 13:44:46 +0000 |
4676 | +++ msf_budget/msf_budget_view.xml 2014-03-21 08:49:45 +0000 |
4677 | @@ -62,11 +62,13 @@ |
4678 | <field name="decision_moment_id" readonly="1" attrs="{'invisible': [('type', '=', 'view')]}"/> |
4679 | <field name="version" readonly="1" attrs="{'invisible': [('type', '=', 'view')]}"/> |
4680 | <field name="type" readonly="1" invisible="1"/> |
4681 | - <group col="4" colspan="2"> |
4682 | - <button name="button_display_type" string="Change display type" type="object" icon="terp-stock_zoom" context="context" colspan="1"/> |
4683 | - <field name="display_type" readonly="1" nolabel="1"/> |
4684 | - </group> |
4685 | + <label string="" colspan="2"/> |
4686 | <field name="currency_id" readonly="1"/> |
4687 | + <group col="3" colspan="4"> |
4688 | + <button name="button_display_type" string="Parent accounts only" type="object" icon="terp-stock_zoom" context="{'granularity': 'view'}" colspan="1"/> |
4689 | + <button name="button_display_type" string="Accounts only" type="object" icon="terp-stock_zoom" context="{'granularity': 'expense'}" colspan="1"/> |
4690 | + <button name="button_display_type" string="Accounts and destinations" type="object" icon="terp-stock_zoom" context="{'granularity': 'all'}" colspan="1"/> |
4691 | + </group> |
4692 | <field name="budget_line_ids" colspan="4" nolabel="1" readonly="1"> |
4693 | <tree string="Budget Lines" editable="top" colors="black:line_type in ('normal');blue:line_type in ('view')"> |
4694 | <field name="name"/> |
4695 | @@ -90,7 +92,7 @@ |
4696 | </field> |
4697 | <field name="state" select="1" readonly="1" attrs="{'invisible': [('type', '=', 'view')]}"/> |
4698 | <group col="4" colspan="2"> |
4699 | - <button string="Approve" name="validate" type="workflow" icon="gtk-ok" attrs="{'invisible': [('|'), ('type', '=', 'view'), ('state', '!=', 'draft')]}"/> |
4700 | + <button string="Approve" name="validate" type="workflow" icon="gtk-ok" attrs="{'invisible': [('|'), ('type', '=', 'view'), ('state', '!=', 'draft')]}" help="By using this button you will not only validate the budget but also update all parent budgets. This will take a while..."/> |
4701 | <button string="Done" name="done" type="workflow" icon="gtk-jump-to" attrs="{'invisible': [('|'), ('type', '=', 'view'), ('state', '!=', 'valid')]}"/> |
4702 | </group> |
4703 | </form> |
4704 | @@ -107,32 +109,12 @@ |
4705 | <field name="context">{'search_default_validated': 1}</field> |
4706 | <field name="help">A budget is a forecast of your company's income and expenses expected for a period in the future. With a budget, a company is able to carefully look at how much money they are taking in during a given period, and figure out the best way to divide it among various categories. By keeping track of where your money goes, you may be less likely to overspend, and more likely to meet your financial goals. Forecast a budget by detailing the expected revenue per analytic account and monitor its evolution based on the actuals realised during that period.</field> |
4707 | </record> |
4708 | - |
4709 | + |
4710 | <menuitem id="next_id_31" name="Budgets" parent="account.menu_finance" sequence="11"/> |
4711 | <menuitem parent="next_id_31" |
4712 | id="menu_act_crossovered_budget_view" |
4713 | action="act_crossovered_budget_view" sequence="1" /> |
4714 | |
4715 | - <record id="wizard_budget_criteria_export_view" model="ir.ui.view"> |
4716 | - <field name="name">Budget vs. Actuals Export</field> |
4717 | - <field name="model">wizard.budget.criteria.export</field> |
4718 | - <field name="type">form</field> |
4719 | - <field name="arch" type="xml"> |
4720 | - <form string="Budget vs. Actuals Export"> |
4721 | - <field name="currency_table_id" domain="[('state','=','valid')]" /> |
4722 | - <field name="period_id"/> |
4723 | - <field name="commitment"/> |
4724 | - <field name="breakdown"/> |
4725 | - <field name="granularity"/> |
4726 | - <separator colspan="4"/> |
4727 | - <group colspan="4" col="6"> |
4728 | - <button special="cancel" string="No" icon="gtk-cancel"/> |
4729 | - <button name="button_create_budget_2" string="Create report" type="object" icon="gtk-ok"/> |
4730 | - </group> |
4731 | - </form> |
4732 | - </field> |
4733 | - </record> |
4734 | - |
4735 | <record id="wizard_budget_actual_export_view" model="ir.ui.view"> |
4736 | <field name="name">Actuals by CC Export</field> |
4737 | <field name="model">wizard.actual.export</field> |
4738 | @@ -162,7 +144,7 @@ |
4739 | <separator colspan="4"/> |
4740 | <group colspan="4" col="6"> |
4741 | <button special="cancel" string="Cancel" icon="gtk-cancel"/> |
4742 | - <button name="import_csv_budget" string="Import Budget" type="object" icon="gtk-ok"/> |
4743 | + <button name="button_import" string="Import Budget" type="object" icon="gtk-ok"/> |
4744 | </group> |
4745 | </form> |
4746 | </field> |
4747 | @@ -207,7 +189,7 @@ |
4748 | <field name="view_id" ref="wizard_budget_import_view"/> |
4749 | <field name="target">new</field> |
4750 | </record> |
4751 | - |
4752 | + |
4753 | <record id="wizard_local_expenses_view" model="ir.ui.view"> |
4754 | <field name="name">Local Expenses</field> |
4755 | <field name="model">wizard.local.expenses</field> |
4756 | @@ -245,15 +227,15 @@ |
4757 | <!-- Menu Item: Analytic Accounts which displays only Cost Center, and not allow: delete, new, edit |
4758 | hide_new_button="1" hide_delete_button="1" |
4759 | --> |
4760 | - |
4761 | + |
4762 | <menuitem parent="account.menu_finance_generic_reporting" |
4763 | action="action_wizard_local_expenses" |
4764 | id="menu_action_wizard_local_expenses"/> |
4765 | - |
4766 | + |
4767 | <menuitem parent="next_id_31" |
4768 | action="action_wizard_budget_import" |
4769 | id="menu_action_wizard_budget_import" sequence="2"/> |
4770 | - |
4771 | + |
4772 | <record id="view_msf_budget_summary_tree" model="ir.ui.view"> |
4773 | <field name="name">msf.budget.summary.tree</field> |
4774 | <field name="model">msf.budget.summary</field> |
4775 | @@ -270,7 +252,7 @@ |
4776 | </tree> |
4777 | </field> |
4778 | </record> |
4779 | - |
4780 | + |
4781 | <record id="action_wizard_budget_summary_tree" model="ir.actions.server"> |
4782 | <field name="name">Budget Status</field> |
4783 | <field name="model_id" ref="model_msf_budget"/> |
4784 | @@ -294,7 +276,7 @@ |
4785 | <field name="domain">[('category','=','OC')]</field> |
4786 | <field name="field_parent">child_complete_ids</field> |
4787 | <field name="arch" type="xml"> |
4788 | - <tree colors="red:(date<current_date);black:(date>=current_date);black:(date==False)" string="Analytic Accounts" |
4789 | + <tree colors="red:(date<current_date);black:(date>=current_date);black:(date==False)" string="Analytic Accounts" |
4790 | toolbar="1" hide_new_button="1" hide_delete_button="1" editable="top" noteditable="1"> |
4791 | <field name="name"/> |
4792 | <field name="code"/> |
4793 | @@ -371,10 +353,10 @@ |
4794 | <field name="view_type">form</field> |
4795 | <field name="view_id" ref="view_msf_budget_decision_moment_tree"/> |
4796 | </record> |
4797 | - |
4798 | + |
4799 | <menuitem id="menu_action_msf_budget_decision_moment_tree" |
4800 | parent="next_id_31" |
4801 | action="action_msf_budget_decision_moment_tree"/> |
4802 | - |
4803 | + |
4804 | </data> |
4805 | </openerp> |
4806 | |
4807 | === modified file 'msf_budget/msf_budget_workflow.xml' |
4808 | --- msf_budget/msf_budget_workflow.xml 2012-03-20 16:56:09 +0000 |
4809 | +++ msf_budget/msf_budget_workflow.xml 2014-03-21 08:49:45 +0000 |
4810 | @@ -20,7 +20,7 @@ |
4811 | <field name="wkf_id" ref="wkf_msf_budget" /> |
4812 | <field name="name">validate</field> |
4813 | <field name="kind">function</field> |
4814 | - <field name="action">write({'state':'valid'})</field> |
4815 | + <field name="action">action_confirmed()</field> |
4816 | </record> |
4817 | |
4818 | <record model="workflow.activity" id="act_done"> |
4819 | @@ -45,4 +45,4 @@ |
4820 | </record> |
4821 | |
4822 | </data> |
4823 | -</openerp> |
4824 | \ No newline at end of file |
4825 | +</openerp> |
4826 | |
4827 | === modified file 'msf_budget/report/__init__.py' |
4828 | --- msf_budget/report/__init__.py 2012-07-25 11:49:04 +0000 |
4829 | +++ msf_budget/report/__init__.py 2014-03-21 08:49:45 +0000 |
4830 | @@ -21,7 +21,6 @@ |
4831 | |
4832 | import report_budget_criteria |
4833 | import report_budget_actual |
4834 | -import report_csv_budget_monthly |
4835 | import report_pdf_budget_monthly |
4836 | import report_pdf_budget_summary |
4837 | import report_pdf_engagement |
4838 | |
4839 | === modified file 'msf_budget/report/budget_criteria_xls.mako' |
4840 | --- msf_budget/report/budget_criteria_xls.mako 2013-09-20 13:40:19 +0000 |
4841 | +++ msf_budget/report/budget_criteria_xls.mako 2014-03-21 08:49:45 +0000 |
4842 | @@ -20,426 +20,423 @@ |
4843 | <ProtectStructure>False</ProtectStructure> |
4844 | <ProtectWindows>False</ProtectWindows> |
4845 | </ExcelWorkbook> |
4846 | - |
4847 | - |
4848 | - |
4849 | - |
4850 | - |
4851 | - |
4852 | - |
4853 | - |
4854 | -<Styles> |
4855 | -<Style ss:ID="Default" ss:Name="Normal"> |
4856 | -<Alignment ss:Vertical="Bottom"/> |
4857 | -<Borders/> |
4858 | -<Font/> |
4859 | -<Interior/> |
4860 | -<NumberFormat/> |
4861 | -<Protection/> |
4862 | -</Style> |
4863 | -<Style ss:ID="s20" ss:Name="Percent"> |
4864 | -<NumberFormat ss:Format="0%"/> |
4865 | -</Style> |
4866 | -<Style ss:ID="s21"> |
4867 | -<Interior ss:Pattern="Solid"/> |
4868 | -</Style> |
4869 | -<Style ss:ID="s22"> |
4870 | -<Borders> |
4871 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4872 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="1"/> |
4873 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="1"/> |
4874 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="1"/> |
4875 | -</Borders> |
4876 | -<Font x:Family="Swiss" ss:Bold="1"/> |
4877 | -</Style> |
4878 | -<Style ss:ID="s23"> |
4879 | -<Borders> |
4880 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4881 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="1"/> |
4882 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="1"/> |
4883 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="1"/> |
4884 | -</Borders> |
4885 | -</Style> |
4886 | -<Style ss:ID="s24"> |
4887 | -<Alignment ss:Horizontal="Left" ss:Vertical="Bottom"/> |
4888 | -<Borders> |
4889 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4890 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="1"/> |
4891 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="1"/> |
4892 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="1"/> |
4893 | -</Borders> |
4894 | -<Interior ss:Pattern="Solid"/> |
4895 | -</Style> |
4896 | -<Style ss:ID="s25"> |
4897 | -<Borders> |
4898 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4899 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="1"/> |
4900 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="1"/> |
4901 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="1"/> |
4902 | -</Borders> |
4903 | -<Font x:Family="Swiss" ss:Bold="1"/> |
4904 | -<Interior ss:Pattern="Solid"/> |
4905 | -</Style> |
4906 | -<Style ss:ID="s26"> |
4907 | -<Alignment ss:Vertical="Center" ss:WrapText="1"/> |
4908 | -</Style> |
4909 | -<Style ss:ID="s27"> |
4910 | -<Alignment ss:Vertical="Center" ss:WrapText="1"/> |
4911 | -<Borders> |
4912 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="2"/> |
4913 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="2"/> |
4914 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="2"/> |
4915 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="2"/> |
4916 | -</Borders> |
4917 | -<Font x:Family="Swiss" ss:Bold="1"/> |
4918 | -</Style> |
4919 | - |
4920 | -<Style ss:ID="s27a"> |
4921 | -<Alignment ss:Vertical="Center" ss:WrapText="1"/> |
4922 | -<Borders> |
4923 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4924 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="1"/> |
4925 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="1"/> |
4926 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="1"/> |
4927 | -</Borders> |
4928 | -<Font x:Family="Swiss" ss:Bold="1"/> |
4929 | -</Style> |
4930 | - |
4931 | - |
4932 | -<Style ss:ID="s32"> |
4933 | -<Alignment ss:Vertical="Center" ss:WrapText="1"/> |
4934 | -<Borders> |
4935 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="2"/> |
4936 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="2"/> |
4937 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="1"/> |
4938 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="2"/> |
4939 | -</Borders> |
4940 | -<Font x:Family="Swiss" ss:Bold="1"/> |
4941 | -<Interior ss:Pattern="Solid"/> |
4942 | -</Style> |
4943 | -<Style ss:ID="s33"> |
4944 | -<Alignment ss:Vertical="Center" ss:WrapText="1"/> |
4945 | -<Borders> |
4946 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="2"/> |
4947 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="1"/> |
4948 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="2"/> |
4949 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="2"/> |
4950 | -</Borders> |
4951 | -<Font x:Family="Swiss" ss:Bold="1"/> |
4952 | -<Interior ss:Pattern="Solid"/> |
4953 | -</Style> |
4954 | -<Style ss:ID="s34"> |
4955 | -<Alignment ss:Horizontal="Left" ss:Vertical="Bottom"/> |
4956 | -<Interior ss:Pattern="Solid"/> |
4957 | -</Style> |
4958 | -<Style ss:ID="s39"> |
4959 | -<Borders> |
4960 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4961 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="2"/> |
4962 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="1"/> |
4963 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="2"/> |
4964 | -</Borders> |
4965 | -<Interior ss:Pattern="Solid"/> |
4966 | -<NumberFormat ss:Format="#,##0"/> |
4967 | -</Style> |
4968 | -<Style ss:ID="s41" ss:Parent="s20"> |
4969 | -<Borders> |
4970 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4971 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="1"/> |
4972 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="2"/> |
4973 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="2"/> |
4974 | -</Borders> |
4975 | -<Interior ss:Pattern="Solid"/> |
4976 | -<NumberFormat ss:Format="0.0%"/> |
4977 | -</Style> |
4978 | -<Style ss:ID="s46"> |
4979 | -<Borders> |
4980 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4981 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="2"/> |
4982 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="1"/> |
4983 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="1"/> |
4984 | -</Borders> |
4985 | -<Interior ss:Pattern="Solid"/> |
4986 | -<NumberFormat ss:Format="#,##0"/> |
4987 | -</Style> |
4988 | -<Style ss:ID="s47" ss:Parent="s20"> |
4989 | -<Borders> |
4990 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="1"/> |
4991 | -<Border ss:Position="Left" ss:LineStyle="Continuous" ss:Weight="1"/> |
4992 | -<Border ss:Position="Right" ss:LineStyle="Continuous" ss:Weight="2"/> |
4993 | -<Border ss:Position="Top" ss:LineStyle="Continuous" ss:Weight="1"/> |
4994 | -</Borders> |
4995 | -<Interior ss:Pattern="Solid"/> |
4996 | -<NumberFormat ss:Format="0.0%"/> |
4997 | -</Style> |
4998 | -<Style ss:ID="s53"> |
4999 | -<Borders> |
5000 | -<Border ss:Position="Bottom" ss:LineStyle="Continuous" ss:Weight="2"/> |
The diff has been truncated for viewing.