Merge lp:~julie-w/unifield-server/US-4058 into lp:unifield-server/uf7
- US-4058
- Merge into uf7
Status: | Needs review |
---|---|
Proposed branch: | lp:~julie-w/unifield-server/US-4058 |
Merge into: | lp:unifield-server/uf7 |
Diff against target: |
2496 lines (+524/-392) 21 files modified
bin/addons/account/account_cash_statement.py (+19/-16) bin/addons/account_corrections/account_move_line.py (+10/-2) bin/addons/base/module/module.py (+0/-7) bin/addons/delivery_mechanism/wizard/enter_reason.py (+3/-1) bin/addons/msf_accrual/wizard/wizard_accrual_reversal.py (+1/-1) bin/addons/msf_accrual/wizard/wizard_accrual_validation.py (+2/-2) bin/addons/msf_homere_interface/wizard/hr_payroll_import.py (+3/-0) bin/addons/msf_instance/add_instance.py (+40/-3) bin/addons/msf_outgoing/msf_outgoing.py (+97/-97) bin/addons/msf_outgoing/report/labels.rml (+1/-1) bin/addons/msf_profile/i18n/fr_MF.po (+19/-0) bin/addons/msf_supply_doc_export/wizard/po_follow_up.py (+49/-49) bin/addons/purchase/purchase_workflow.py (+39/-27) bin/addons/register_accounting/report/fully_report_xls.mako (+10/-2) bin/addons/register_accounting/wizard/import_invoice_on_registers.py (+2/-1) bin/addons/register_accounting/wizard/wizard_register_import.py (+93/-70) bin/addons/sale/sale_workflow.py (+42/-27) bin/addons/sync_so/purchase.py (+1/-1) bin/addons/tender_flow/tender_flow.py (+89/-84) bin/release.py (+1/-1) bin/sql_db.py (+3/-0) |
To merge this branch: | bzr merge lp:~julie-w/unifield-server/US-4058 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Dev Team | Pending | ||
Review via email: mp+337538@code.launchpad.net |
Commit message
Description of the change
Unmerged revisions
- 4719. By Julie Nuguet
-
US-4058 [FIX] Accruals: duplications were created when posting accrual & reversal lines
- 4718. By jftempo
-
US-3893 [FIX] C&R IN at header level should set state of stock moves to 'Canceled-r' and FO should remain Confirmed
- 4717. By jftempo
-
US-3944 [FIX] PO/FO: prevent from validating an order with taxes if linked to an intermission partner
- 4716. By jftempo
-
US-3885 [FIX] PO created from Tender are not auto linked to initial IR/FO and AD is not populated from FO
- 4715. By jftempo
-
US-3853 [IMP] Register Lines import: checks on "third party required"
- 4714. By jftempo
-
US-3612 [FIX] Register Full Report: remove duplicates for the use case 'advance return lines linked to Free1/2'
- 4713. By jftempo
-
US-2924 [FIX] Pick pdf labels: print weight
- 4712. By jftempo
-
US-3259 [FIX] PO Follow Up per Supplier: escape html entities
- 4711. By jftempo
-
US-3798 [FIX] Funding Pool: when a FP is created use the current instance_id by default
- 4710. By jftempo
-
US-3493 [IMP] Payroll import: strip spaces in all entries
Preview Diff
1 | === modified file 'bin/addons/account/account_cash_statement.py' |
2 | --- bin/addons/account/account_cash_statement.py 2016-08-18 08:32:03 +0000 |
3 | +++ bin/addons/account/account_cash_statement.py 2018-02-12 10:19:50 +0000 |
4 | @@ -61,7 +61,7 @@ |
5 | 'subtotal': fields.function(_sub_total, method=True, string='Sub Total', type='float', digits_compute=dp.get_precision('Account')), |
6 | 'starting_id': fields.many2one('account.bank.statement', ondelete='cascade'), |
7 | 'ending_id': fields.many2one('account.bank.statement', ondelete='cascade'), |
8 | - } |
9 | + } |
10 | |
11 | account_cashbox_line() |
12 | |
13 | @@ -115,7 +115,7 @@ |
14 | for statement in self.browse(cr, uid, ids, context=context): |
15 | encoding_total=0.0 |
16 | for line in statement.line_ids: |
17 | - encoding_total += line.amount |
18 | + encoding_total += line.amount |
19 | res2[statement.id] = encoding_total |
20 | return res2 |
21 | |
22 | @@ -125,7 +125,7 @@ |
23 | res = {} |
24 | |
25 | company_currency_id = res_users_obj.browse(cursor, user, user, |
26 | - context=context).company_id.currency_id.id |
27 | + context=context).company_id.currency_id.id |
28 | |
29 | statements = self.browse(cursor, user, ids, context=context) |
30 | for statement in statements: |
31 | @@ -136,14 +136,14 @@ |
32 | if line.account_id.id == \ |
33 | statement.journal_id.default_debit_account_id.id: |
34 | res[statement.id] += res_currency_obj.compute(cursor, |
35 | - user, company_currency_id, currency_id, |
36 | - line.debit, context=context) |
37 | + user, company_currency_id, currency_id, |
38 | + line.debit, context=context) |
39 | else: |
40 | if line.account_id.id == \ |
41 | statement.journal_id.default_credit_account_id.id: |
42 | res[statement.id] -= res_currency_obj.compute(cursor, |
43 | - user, company_currency_id, currency_id, |
44 | - line.credit, context=context) |
45 | + user, company_currency_id, currency_id, |
46 | + line.credit, context=context) |
47 | |
48 | if statement.state in ('draft', 'open'): |
49 | for line in statement.line_ids: |
50 | @@ -221,8 +221,8 @@ |
51 | 'balance_end_real': fields.float('Closing Balance', digits_compute=dp.get_precision('Account'), states={'confirm': [('readonly', True)]}, help="closing balance entered by the cashbox verifier"), |
52 | 'state': fields.selection( |
53 | [('draft', 'Draft'), |
54 | - ('confirm', 'Closed'), |
55 | - ('open','Open')], 'State', required=True, states={'confirm': [('readonly', True)]}, readonly="1"), |
56 | + ('confirm', 'Closed'), |
57 | + ('open','Open')], 'State', required=True, states={'confirm': [('readonly', True)]}, readonly="1"), |
58 | 'total_entry_encoding': fields.function(_get_sum_entry_encoding, method=True, store=True, string="Cash Transaction", help="Total cash transactions"), |
59 | 'closing_date': fields.datetime("Closed On"), |
60 | 'balance_end': fields.function(_end_balance, method=True, store=True, string='Balance', help="Closing balance based on Starting Balance and Cash Transactions"), |
61 | @@ -238,12 +238,12 @@ |
62 | 'user_id': lambda self, cr, uid, context=None: uid, |
63 | 'starting_details_ids': _get_cash_open_box_lines, |
64 | 'ending_details_ids': _get_default_cash_close_box_lines |
65 | - } |
66 | + } |
67 | |
68 | def create(self, cr, uid, vals, context=None): |
69 | sql = [ |
70 | - ('journal_id', '=', vals.get('journal_id', False)), |
71 | - ('state', '=', 'open') |
72 | + ('journal_id', '=', vals.get('journal_id', False)), |
73 | + ('state', '=', 'open') |
74 | ] |
75 | open_jrnl = self.search(cr, uid, sql) |
76 | if open_jrnl: |
77 | @@ -255,10 +255,10 @@ |
78 | for start in vals.get('starting_details_ids'): |
79 | dict_val = start[2] |
80 | for end in open_close['end']: |
81 | - if end[2]['pieces'] == dict_val['pieces']: |
82 | - end[2]['number'] += dict_val['number'] |
83 | + if end[2]['pieces'] == dict_val['pieces']: |
84 | + end[2]['number'] += dict_val['number'] |
85 | vals.update({ |
86 | -# 'ending_details_ids': open_close['start'], |
87 | + # 'ending_details_ids': open_close['start'], |
88 | 'starting_details_ids': open_close['end'] |
89 | }) |
90 | else: |
91 | @@ -308,8 +308,11 @@ |
92 | return super(account_cash_statement, self).onchange_journal_id(cr, uid, statement_id, journal_id, context=context) |
93 | |
94 | def _equal_balance(self, cr, uid, cash_id, context=None): |
95 | + if context is None: |
96 | + context = {} |
97 | statement = self.browse(cr, uid, cash_id, context=context) |
98 | - self.write(cr, uid, [cash_id], {'balance_end_real': statement.balance_end}) |
99 | + context.update({'from_cash_statement_equal_balance': True}) |
100 | + self.write(cr, uid, [cash_id], {'balance_end_real': statement.balance_end}, context=context) |
101 | statement.balance_end_real = statement.balance_end |
102 | if abs(statement.balance_end - statement.balance_end_cash) > 10**-4: |
103 | return False |
104 | |
105 | === modified file 'bin/addons/account_corrections/account_move_line.py' |
106 | --- bin/addons/account_corrections/account_move_line.py 2017-10-27 12:10:07 +0000 |
107 | +++ bin/addons/account_corrections/account_move_line.py 2018-02-12 10:19:50 +0000 |
108 | @@ -527,6 +527,9 @@ |
109 | j_extra_ids = j_obj.search(cr, uid, [('type', '=', 'extra'), |
110 | ('is_current_instance', '=', True)]) |
111 | j_extra_id = j_extra_ids and j_extra_ids[0] or False |
112 | + j_ana_extra_ids = ana_j_obj.search(cr, uid, [('type', '=', 'extra'), ('is_current_instance', '=', True)], context=context) |
113 | + j_ana_extra_id = j_ana_extra_ids and j_ana_extra_ids[0] or False |
114 | + |
115 | # Search attached period |
116 | period_ids = self.pool.get('account.period').search(cr, uid, [('date_start', '<=', date), ('date_stop', '>=', date)], context=context, |
117 | limit=1, order='date_start, name') |
118 | @@ -555,8 +558,13 @@ |
119 | journal_id = j_corr_id |
120 | if is_inkind: |
121 | journal_id = j_extra_id |
122 | - if not journal_id and is_inkind: |
123 | - raise osv.except_osv(_('Error'), _('No OD-Extra Accounting Journal found!')) |
124 | + j_ana_corr_id = j_ana_extra_id |
125 | + |
126 | + if is_inkind: |
127 | + if not journal_id: |
128 | + raise osv.except_osv(_('Error'), _('No OD-Extra Accounting Journal found!')) |
129 | + elif not j_ana_extra_id: |
130 | + raise osv.except_osv(_('Error'), _('No OD-Extra Accounting Analytic Journal found!')) |
131 | elif not journal_id: |
132 | raise osv.except_osv(_('Error'), _('No correction journal found!')) |
133 | |
134 | |
135 | === modified file 'bin/addons/base/module/module.py' |
136 | --- bin/addons/base/module/module.py 2017-10-04 05:23:42 +0000 |
137 | +++ bin/addons/base/module/module.py 2018-02-12 10:19:50 +0000 |
138 | @@ -538,13 +538,6 @@ |
139 | if not mod.description: |
140 | logger.warn('module %s: description is empty !', mod.name) |
141 | |
142 | - if not mod.certificate or not mod.certificate.isdigit(): |
143 | - logger.info('module %s: no quality certificate', mod.name) |
144 | - else: |
145 | - val = long(mod.certificate[2:]) % 97 == 29 |
146 | - if not val: |
147 | - logger.critical('module %s: invalid quality certificate: %s', mod.name, mod.certificate) |
148 | - raise osv.except_osv(_('Error'), _('Module %s: Invalid Quality Certificate') % (mod.name,)) |
149 | |
150 | def list_web(self, cr, uid, context=None): |
151 | """ list_web(cr, uid, context) -> [(module_name, module_version)] |
152 | |
153 | === modified file 'bin/addons/delivery_mechanism/wizard/enter_reason.py' |
154 | --- bin/addons/delivery_mechanism/wizard/enter_reason.py 2017-11-03 10:59:21 +0000 |
155 | +++ bin/addons/delivery_mechanism/wizard/enter_reason.py 2018-02-12 10:19:50 +0000 |
156 | @@ -66,6 +66,8 @@ |
157 | # set the reason |
158 | obj.write({'change_reason': change_reason}, context=context) |
159 | |
160 | + if context.get('do_resource', False): |
161 | + self.pool.get('stock.move').write(cr, uid, [move.id for move in obj.move_lines], {'has_to_be_resourced': True}, context=context) |
162 | self.pool.get('stock.move').action_cancel(cr, uid, [move.id for move in obj.move_lines], context=context) |
163 | |
164 | # cancel the IN |
165 | @@ -74,7 +76,7 @@ |
166 | # correct the corresponding po manually if exists - should be in shipping exception |
167 | if obj.purchase_id: |
168 | wf_service.trg_validate(uid, 'purchase.order', obj.purchase_id.id, 'picking_ok', cr) |
169 | - purchase_obj.log(cr, uid, obj.purchase_id.id, _('The Purchase Order %s is %s%% received')%(obj.purchase_id.name, round(obj.purchase_id.shipped_rate,2))) |
170 | + purchase_obj.log(cr, uid, obj.purchase_id.id, _('The Purchase Order %s is %s%% received') % (obj.purchase_id.name, round(obj.purchase_id.shipped_rate, 2))) |
171 | |
172 | self.infolog(cr, uid, "The Incoming shipment id:%s (%s) has been canceled%s." % ( |
173 | obj.id, obj.name, cancel_type != 'update_out' and ' and resourced' or '', |
174 | |
175 | === modified file 'bin/addons/msf_accrual/wizard/wizard_accrual_reversal.py' |
176 | --- bin/addons/msf_accrual/wizard/wizard_accrual_reversal.py 2016-02-19 13:23:15 +0000 |
177 | +++ bin/addons/msf_accrual/wizard/wizard_accrual_reversal.py 2018-02-12 10:19:50 +0000 |
178 | @@ -70,7 +70,7 @@ |
179 | raise osv.except_osv(_('Warning !'), _("The reversal period '%s' is not open!" % reversal_period.name)) |
180 | |
181 | # post the accrual reversal |
182 | - accrual_line_obj.accrual_reversal_post(cr, uid, context['active_ids'], document_date, |
183 | + accrual_line_obj.accrual_reversal_post(cr, uid, [accrual_line.id], document_date, |
184 | posting_date, context=context) |
185 | |
186 | # close the wizard |
187 | |
188 | === modified file 'bin/addons/msf_accrual/wizard/wizard_accrual_validation.py' |
189 | --- bin/addons/msf_accrual/wizard/wizard_accrual_validation.py 2016-05-24 08:44:23 +0000 |
190 | +++ bin/addons/msf_accrual/wizard/wizard_accrual_validation.py 2018-02-12 10:19:50 +0000 |
191 | @@ -62,11 +62,11 @@ |
192 | raise osv.except_osv(_('Warning !'), _("The reversal period '%s' is not open!" % reversal_period.name)) |
193 | |
194 | # post the accrual |
195 | - accrual_line_obj.accrual_post(cr, uid, context['active_ids'], context=context) |
196 | + accrual_line_obj.accrual_post(cr, uid, [accrual_line.id], context=context) |
197 | # post its reversal only if it is a reversing accrual |
198 | if accrual_line.accrual_type == 'reversing_accrual': |
199 | reversal_date = (datetime.datetime.strptime(accrual_line.date, '%Y-%m-%d') + relativedelta(days=1)).strftime('%Y-%m-%d') |
200 | - accrual_line_obj.accrual_reversal_post(cr, uid, context['active_ids'], reversal_date, |
201 | + accrual_line_obj.accrual_reversal_post(cr, uid, [accrual_line.id], reversal_date, |
202 | reversal_date, context=context) |
203 | |
204 | # close the wizard |
205 | |
206 | === modified file 'bin/addons/msf_homere_interface/wizard/hr_payroll_import.py' |
207 | --- bin/addons/msf_homere_interface/wizard/hr_payroll_import.py 2017-10-06 14:55:12 +0000 |
208 | +++ bin/addons/msf_homere_interface/wizard/hr_payroll_import.py 2018-02-12 10:19:50 +0000 |
209 | @@ -136,6 +136,9 @@ |
210 | error_message = "" |
211 | partner_obj = self.pool.get('res.partner') |
212 | |
213 | + # strip spaces in all columns |
214 | + for i in range(len(data)): |
215 | + data[i] = data[i].strip() |
216 | if len(data) == 13: |
217 | accounting_code, description, second_description, third, expense, receipt, project, financing_line, \ |
218 | financing_contract, date, currency, project, analytic_line = zip(data) |
219 | |
220 | === modified file 'bin/addons/msf_instance/add_instance.py' |
221 | --- bin/addons/msf_instance/add_instance.py 2016-10-28 09:07:17 +0000 |
222 | +++ bin/addons/msf_instance/add_instance.py 2018-02-12 10:19:50 +0000 |
223 | @@ -425,10 +425,27 @@ |
224 | def write(self, cr, uid, ids, vals, context=None): |
225 | if not ids: |
226 | return True |
227 | + if context is None: |
228 | + context = {} |
229 | if 'journal_id' in vals: |
230 | journal = self.pool.get('account.journal').read(cr, uid, vals['journal_id'], ['instance_id'], context=context) |
231 | vals['instance_id'] = journal.get('instance_id')[0] |
232 | - return super(account_bank_statement, self).write(cr, uid, ids, vals, context=context) |
233 | + res = True |
234 | + for reg in self.browse(cr, uid, ids, fields_to_fetch=['closing_balance_frozen'], context=context): |
235 | + # if the End-of-the-Month Balance has already been confirmed for a register, ignore changes on fields that |
236 | + # should be read-only in that case (cover the use case of concurrent changes by 2 users) |
237 | + newvals = vals.copy() |
238 | + if reg.closing_balance_frozen: |
239 | + # remove the values for each register with a confirmed balance |
240 | + # Note: at Cashbox closing the balance_end_real is set to the reg.balance_end value: keep this change |
241 | + if 'balance_end_real' in newvals and not context.get('from_cash_statement_equal_balance', False): |
242 | + del newvals['balance_end_real'] |
243 | + if 'balance_start' in newvals: |
244 | + del newvals['balance_start'] |
245 | + if 'ending_details_ids' in newvals: |
246 | + del newvals['ending_details_ids'] |
247 | + res = res and super(account_bank_statement, self).write(cr, uid, [reg.id], newvals, context=context) |
248 | + return res |
249 | |
250 | account_bank_statement() |
251 | |
252 | @@ -499,8 +516,25 @@ |
253 | vals['instance_id'] = register.get('instance_id')[0] |
254 | return super(account_cashbox_line, self).write(cr, uid, ids, vals, context=context) |
255 | |
256 | + def unlink(self, cr, uid, ids, context=None): |
257 | + """ |
258 | + CashBox Line Deletion method |
259 | + The deletion isn't triggered for Closing Balance Lines linked to a reg. with a Confirmed month-end cash count |
260 | + (covers the use case of concurrent changes by 2 users) |
261 | + """ |
262 | + if context is None: |
263 | + context = {} |
264 | + if isinstance(ids, (int, long)): |
265 | + ids = [ids] |
266 | + res = True |
267 | + for line in self.browse(cr, uid, ids, fields_to_fetch=['ending_id'], context=context): |
268 | + if not line.ending_id or not line.ending_id.closing_balance_frozen: |
269 | + res = res and super(account_cashbox_line, self).unlink(cr, uid, [line.id], context=context) |
270 | + return res |
271 | + |
272 | account_cashbox_line() |
273 | |
274 | + |
275 | class account_analytic_account(osv.osv): |
276 | _name = 'account.analytic.account' |
277 | _inherit = 'account.analytic.account' |
278 | @@ -534,10 +568,11 @@ |
279 | 'current_instance_type': lambda self, cr, uid, c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.instance_id.level, |
280 | } |
281 | |
282 | - def check_fp(self, cr, uid, vals, context=None): |
283 | + def check_fp(self, cr, uid, vals, to_update=False, context=None): |
284 | """ |
285 | Check that FP have an instance_id |
286 | Check that the given instance is not section level! |
287 | + If to_update is True and no instance_id is in vals: update vals with the id of the current instance |
288 | """ |
289 | if context is None: |
290 | context = {} |
291 | @@ -554,6 +589,8 @@ |
292 | if not current_instance or current_instance.level == 'section': |
293 | raise osv.except_osv(_('Error'), _('Proprietary Instance is mandatory for FP accounts!')) |
294 | instance_id = current_instance.id |
295 | + if to_update: |
296 | + vals.update({'instance_id': instance_id}) |
297 | instance_level = self.pool.get('msf.instance').browse(cr, uid, instance_id).level |
298 | if instance_level == 'section': |
299 | raise osv.except_osv(_('Warning'), _('Proprietary Instance for FP accounts should be only COORDO and/or MISSION')) |
300 | @@ -567,7 +604,7 @@ |
301 | context = {} |
302 | # Check that instance_id is filled in for FP |
303 | if context.get('from_web', False) is True: |
304 | - self.check_fp(cr, uid, vals, context=context) |
305 | + self.check_fp(cr, uid, vals, to_update=True, context=context) |
306 | return super(account_analytic_account, self).create(cr, uid, vals, context=context) |
307 | |
308 | def write(self, cr, uid, ids, vals, context=None): |
309 | |
310 | === modified file 'bin/addons/msf_outgoing/msf_outgoing.py' |
311 | --- bin/addons/msf_outgoing/msf_outgoing.py 2017-12-14 17:30:55 +0000 |
312 | +++ bin/addons/msf_outgoing/msf_outgoing.py 2018-02-12 10:19:50 +0000 |
313 | @@ -613,7 +613,7 @@ |
314 | shipment_name = '%s-%s' % (shipment.name, shipment_number) |
315 | source_shipment_address_id = shipment.address_id.id if shipment.address_id else False |
316 | |
317 | - if context.get('rw_shipment_name', False) and context.get('sync_message_execution', False): # RW Sync - update the shipment name same as on RW instance |
318 | + if context.get('rw_shipment_name', False) and context.get('sync_message_execution', False): # RW Sync - update the shipment name same as on RW instance |
319 | shipment_name = context.get('rw_shipment_name') |
320 | del context['rw_shipment_name'] |
321 | |
322 | @@ -659,7 +659,7 @@ |
323 | del context['description_ppl'] |
324 | |
325 | for family in wizard.family_ids: |
326 | - if not family.selected_number: # UTP-1015 fix from Quentin |
327 | + if not family.selected_number: # UTP-1015 fix from Quentin |
328 | continue |
329 | |
330 | picking = family.draft_packing_id |
331 | @@ -674,7 +674,7 @@ |
332 | 'backorder_id': picking.id, |
333 | 'shipment_id': False, |
334 | 'move_lines': [], |
335 | - 'description_ppl': description_ppl or picking.description_ppl, # US-803: added the description |
336 | + 'description_ppl': description_ppl or picking.description_ppl, # US-803: added the description |
337 | } |
338 | # Update context for copy |
339 | context.update({ |
340 | @@ -709,8 +709,8 @@ |
341 | |
342 | # Log creation message |
343 | message = _('The new Shipment id:%s (%s) has been created.') |
344 | - self.log(cr, uid, shipment.id, message%(shipment.id, shipment_name,)) |
345 | - self.infolog(cr, uid, message%(shipment.id, shipment.name)) |
346 | + self.log(cr, uid, shipment.id, message % (shipment.id, shipment_name,)) |
347 | + self.infolog(cr, uid, message % (shipment.id, shipment.name)) |
348 | # The shipment is automatically shipped, no more pack states in between. |
349 | self.ship(cr, uid, [shipment_id], context=context) |
350 | |
351 | @@ -729,7 +729,7 @@ |
352 | }) |
353 | |
354 | return { |
355 | - 'name':_("Shipment"), |
356 | + 'name': _("Shipment"), |
357 | 'type': 'ir.actions.act_window', |
358 | 'res_model': 'shipment', |
359 | 'view_mode': 'form,tree', |
360 | @@ -881,7 +881,7 @@ |
361 | 'from_pack': family.to_pack - family.selected_number + 1, |
362 | 'to_pack': family.to_pack, |
363 | 'state': 'done', |
364 | - 'not_shipped': True, #BKLG-13: set the pack returned to stock also as not_shipped, for showing to view ship draft |
365 | + 'not_shipped': True, # BKLG-13: set the pack returned to stock also as not_shipped, for showing to view ship draft |
366 | } |
367 | context['non_stock_noupdate'] = True |
368 | |
369 | @@ -902,12 +902,12 @@ |
370 | ('wizard_id', '=', picking_processor_wiz.id), |
371 | ('move_id', '=', draft_move.id), |
372 | ], context=context) |
373 | - if not save_as_draft_move: # then create the SaD line: |
374 | + if not save_as_draft_move: # then create the SaD line: |
375 | move_data = self.pool.get('create.picking.move.processor')._get_line_data(cr, uid, wizard=picking_processor_wiz, move=draft_move, context=context) |
376 | move_data.update({'quantity': return_qty}) |
377 | save_as_draft_move = self.pool.get('create.picking.move.processor').create(cr, uid, move_data, context=context) |
378 | save_as_draft_move = [save_as_draft_move] |
379 | - else: # update Sad line |
380 | + else: # update Sad line |
381 | for sad_move in self.pool.get('create.picking.move.processor').browse(cr, uid, save_as_draft_move, context=context): |
382 | self.pool.get('create.picking.move.processor').write(cr, uid, sad_move.id, { |
383 | 'ordered_quantity': sad_move.ordered_quantity + return_qty, |
384 | @@ -935,13 +935,13 @@ |
385 | # If everything is allright (all draft packing are finished) the shipment is done also |
386 | self.complete_finished(cr, uid, shipment_ids, context=context) |
387 | |
388 | - #UF-2531: Create manually the message for the return pack of the ship |
389 | + # UF-2531: Create manually the message for the return pack of the ship |
390 | if shipment and shipment.id: |
391 | self._manual_create_rw_shipment_message(cr, uid, shipment.id, return_info, 'usb_shipment_return_packs_shipment_draft', context=context) |
392 | |
393 | view_id = data_obj.get_object_reference(cr, uid, 'msf_outgoing', 'view_picking_ticket_form') |
394 | return { |
395 | - 'name':_("Picking Ticket"), |
396 | + 'name': _("Picking Ticket"), |
397 | 'view_mode': 'form,tree', |
398 | 'view_id': [view_id and view_id[1] or False], |
399 | 'view_type': 'form', |
400 | @@ -1207,13 +1207,13 @@ |
401 | # if everything is allright (all draft packing are finished) the shipment is done also |
402 | self.complete_finished(cr, uid, shipment_ids, context=context) |
403 | |
404 | - #UF-2531: Create manually the message for the return pack of the ship |
405 | + # UF-2531: Create manually the message for the return pack of the ship |
406 | if shipment and shipment.id: |
407 | self._manual_create_rw_shipment_message(cr, uid, shipment.id, return_info, 'usb_shipment_return_packs', context=context) |
408 | |
409 | view_id = data_obj.get_object_reference(cr, uid, 'msf_outgoing', 'view_shipment_form') |
410 | return { |
411 | - 'name':_("Shipment"), |
412 | + 'name': _("Shipment"), |
413 | 'view_mode': 'form,tree', |
414 | 'view_id': [view_id and view_id[1] or False], |
415 | 'view_type': 'form', |
416 | @@ -1279,7 +1279,7 @@ |
417 | # the state does not need to be updated - function |
418 | # update actual ship date (shipment_actual_date) to today + time |
419 | today = time.strftime(db_datetime_format) |
420 | - vals = {'shipment_actual_date': today,} |
421 | + vals = {'shipment_actual_date': today, } |
422 | if context.get('source_shipment_address_id', False): |
423 | vals['address_id'] = context['source_shipment_address_id'] |
424 | |
425 | @@ -1308,7 +1308,7 @@ |
426 | pick_obj.write(cr, uid, [new_packing_id], ({'claim': True}), context=context) |
427 | new_packing = pick_obj.browse(cr, uid, new_packing_id, context=context) |
428 | |
429 | - if new_packing.move_lines and pick_obj._get_usb_entity_type(cr, uid) == pick_obj.REMOTE_WAREHOUSE and not context.get('sync_message_execution', False): # RW Sync - set the replicated to True for not syncing it again |
430 | + if new_packing.move_lines and pick_obj._get_usb_entity_type(cr, uid) == pick_obj.REMOTE_WAREHOUSE and not context.get('sync_message_execution', False): # RW Sync - set the replicated to True for not syncing it again |
431 | pick_obj.write(cr, uid, [new_packing_id], {'for_shipment_replicate': True}, context=context) |
432 | |
433 | # update the shipment_date of the corresponding sale order if the date is not set yet - with current date |
434 | @@ -1490,7 +1490,7 @@ |
435 | 'payment_term': payment_term_id, |
436 | 'fiscal_position': partner.property_account_position.id, |
437 | 'date_invoice': context.get('date_inv', False) or today, |
438 | - 'user_id':uid, |
439 | + 'user_id': uid, |
440 | } |
441 | |
442 | cur_id = shipment.pack_family_memory_ids[0].currency_id.id |
443 | @@ -1826,8 +1826,8 @@ |
444 | _name = 'stock.picking' |
445 | |
446 | # For use only in Remote Warehouse |
447 | - CENTRAL_PLATFORM="central_platform" |
448 | - REMOTE_WAREHOUSE="remote_warehouse" |
449 | + CENTRAL_PLATFORM = "central_platform" |
450 | + REMOTE_WAREHOUSE = "remote_warehouse" |
451 | |
452 | def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): |
453 | ''' |
454 | @@ -1855,8 +1855,8 @@ |
455 | pass |
456 | |
457 | res = super(stock_picking, self).fields_view_get(cr, uid, view_id, view_type, context=context, toolbar=toolbar, submenu=submenu) |
458 | - #US-688 Do not show the button new, duplicate in the tree and form view of picking |
459 | - if view_type in ['tree','form'] and res['name'] in ['picking.ticket.form', 'picking.ticket.tree']: |
460 | + # US-688 Do not show the button new, duplicate in the tree and form view of picking |
461 | + if view_type in ['tree', 'form'] and res['name'] in ['picking.ticket.form', 'picking.ticket.tree']: |
462 | root = etree.fromstring(res['arch']) |
463 | root.set('hide_new_button', 'True') |
464 | root.set('hide_delete_button', 'True') |
465 | @@ -1906,7 +1906,7 @@ |
466 | context.update({'picking_type': 'incoming_shipment'}) |
467 | else: |
468 | context.update({'picking_type': 'internal_move'}) |
469 | - context.update({'_terp_view_name': 'Internal Moves'}) # REF-92: Update also the Form view name, otherwise Products to Process |
470 | + context.update({'_terp_view_name': 'Internal Moves'}) # REF-92: Update also the Form view name, otherwise Products to Process |
471 | |
472 | return super(stock_picking, self)._hook_picking_get_view(cr, uid, ids, context=context, *args, **kwargs) |
473 | |
474 | @@ -2702,7 +2702,7 @@ |
475 | res = cr.dictfetchone() |
476 | if res and res['id']: |
477 | seq = 'ir_sequence_%03d' % res['id'] |
478 | - cr.execute("ALTER SEQUENCE " + seq +" RESTART WITH " + str(value_to_force)) |
479 | + cr.execute("ALTER SEQUENCE " + seq + " RESTART WITH " + str(value_to_force)) |
480 | return |
481 | |
482 | def create(self, cr, uid, vals, context=None): |
483 | @@ -2734,12 +2734,12 @@ |
484 | # The following lines are to re-enter explicitly the values, even if they are already set to False |
485 | vals['backorder_id'] = vals.get('backorder_id', False) |
486 | vals['shipment_id'] = vals.get('shipment_id', False) |
487 | - else: # if it is a CONSO-OUT --_> set the state for replicating back to CP |
488 | + else: # if it is a CONSO-OUT --_> set the state for replicating back to CP |
489 | if 'name' in vals and 'OUT-CONSO' in vals['name']: |
490 | vals.update(already_replicated=False,) |
491 | - #UF-2531: When the INT from scratch created in RW, just set it for sync to CP |
492 | - if usb_entity == self.REMOTE_WAREHOUSE and (('type' in vals and vals['type']=='internal') or |
493 | - ('origin' not in vals or vals['origin']==False)): #US-702 Sync also the OUT from scratch in RW |
494 | + # UF-2531: When the INT from scratch created in RW, just set it for sync to CP |
495 | + if usb_entity == self.REMOTE_WAREHOUSE and (('type' in vals and vals['type'] == 'internal') or |
496 | + ('origin' not in vals or vals['origin'] == False)): # US-702 Sync also the OUT from scratch in RW |
497 | vals.update(already_replicated=False,) |
498 | |
499 | # the action adds subtype in the context depending from which screen it is created |
500 | @@ -2771,10 +2771,10 @@ |
501 | |
502 | if not vals['backorder_id']: |
503 | # creation of *draft* picking ticket |
504 | - vals.update(sequence_id=self.create_sequence(cr, uid, {'name':vals['name'], |
505 | - 'code':vals['name'], |
506 | - 'prefix':'', |
507 | - 'padding':2}, context=context)) |
508 | + vals.update(sequence_id=self.create_sequence(cr, uid, {'name': vals['name'], |
509 | + 'code': vals['name'], |
510 | + 'prefix': '', |
511 | + 'padding': 2}, context=context)) |
512 | |
513 | if 'subtype' in vals and vals['subtype'] == 'packing': |
514 | # creation of a new packing |
515 | @@ -2783,10 +2783,10 @@ |
516 | |
517 | if not vals['backorder_id']: |
518 | # creation of *draft* picking ticket |
519 | - vals.update(sequence_id=self.create_sequence(cr, uid, {'name':vals['name'], |
520 | - 'code':vals['name'], |
521 | - 'prefix':'', |
522 | - 'padding':2, |
523 | + vals.update(sequence_id=self.create_sequence(cr, uid, {'name': vals['name'], |
524 | + 'code': vals['name'], |
525 | + 'prefix': '', |
526 | + 'padding': 2, |
527 | }, context=context)) |
528 | |
529 | # create packing object |
530 | @@ -2797,7 +2797,7 @@ |
531 | if new_packing and ((new_packing.type == 'out' and new_packing.subtype == 'picking' and new_packing.name.find('-') == -1) or |
532 | (new_packing.type == 'in' and new_packing.subtype == 'standard') or |
533 | (new_packing.type == 'internal' and new_packing.subtype == 'standard' and new_packing.sale_id)): |
534 | - for_update = {'already_replicated':False} |
535 | + for_update = {'already_replicated': False} |
536 | |
537 | ''' |
538 | Only get the current sequence for the IN object at the moment, as we still have problem with the naming of documents functionally |
539 | @@ -2911,7 +2911,7 @@ |
540 | shipment_ids = [n['id']] |
541 | found = True |
542 | break |
543 | - if not found: # If the name is new, then create a new Shipment |
544 | + if not found: # If the name is new, then create a new Shipment |
545 | shipment_ids = [] |
546 | |
547 | # only one 'draft' shipment should be available |
548 | @@ -2946,10 +2946,10 @@ |
549 | 'shipment_actual_date': rts, |
550 | 'sale_id': vals.get('sale_id', False), |
551 | 'transport_type': sale_id and sale_order_obj.read(cr, uid, sale_id, ['transport_type'], context=context)['transport_type'] or False, |
552 | - 'sequence_id': self.create_sequence(cr, uid, {'name':name, |
553 | - 'code':name, |
554 | - 'prefix':'', |
555 | - 'padding':2}, context=context)} |
556 | + 'sequence_id': self.create_sequence(cr, uid, {'name': name, |
557 | + 'code': name, |
558 | + 'prefix': '', |
559 | + 'padding': 2}, context=context)} |
560 | |
561 | shipment_id = shipment_obj.create(cr, uid, values, context=context) |
562 | shipment_obj.log(cr, uid, shipment_id, _('The new Draft Shipment %s has been created.') % (name,)) |
563 | @@ -3225,7 +3225,7 @@ |
564 | search_view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'view_picking_out_search') |
565 | search_view_id = search_view_id and search_view_id[1] or False |
566 | context.update({'picking_type': 'delivery_order', 'view_id': view_id, 'search_view_id': search_view_id}) |
567 | - return {'name':_("Delivery Orders"), |
568 | + return {'name': _("Delivery Orders"), |
569 | 'view_mode': 'form,tree', |
570 | 'view_id': [view_id, tree_view_id], |
571 | 'search_view_id': search_view_id, |
572 | @@ -3287,10 +3287,10 @@ |
573 | 'subtype': 'picking', |
574 | 'converted_to_standard': False, |
575 | 'state': 'draft', |
576 | - 'sequence_id': self.create_sequence(cr, uid, {'name':new_name, |
577 | - 'code':new_name, |
578 | - 'prefix':'', |
579 | - 'padding':2}, context=context) |
580 | + 'sequence_id': self.create_sequence(cr, uid, {'name': new_name, |
581 | + 'code': new_name, |
582 | + 'prefix': '', |
583 | + 'padding': 2}, context=context) |
584 | } |
585 | |
586 | self.write(cr, uid, [out.id], default_vals, context=context) |
587 | @@ -3465,13 +3465,13 @@ |
588 | if need_new_picking and not rw_full_process: |
589 | cp_vals = { |
590 | 'name': sequence_obj.get(cr, uid, 'stock.picking.%s' % (picking.type)), |
591 | - 'move_lines' : [], |
592 | - 'state':'draft', |
593 | + 'move_lines': [], |
594 | + 'state': 'draft', |
595 | } |
596 | context['allow_copy'] = True |
597 | |
598 | new_picking_id = picking_obj.copy(cr, uid, picking.id, cp_vals, context=context) |
599 | - # US-327: if it's an internal picking and in partial process, then set the already_replicated to True, so no replicate needed |
600 | + # US-327: if it's an internal picking and in partial process, then set the already_replicated to True, so no replicate needed |
601 | if picking.type == 'internal' and (usb_entity == self.REMOTE_WAREHOUSE or context.get('sync_message_execution', False)): |
602 | to_be_replicated = False |
603 | self.write(cr, uid, [new_picking_id], {'already_replicated': True}, context=context) |
604 | @@ -3646,14 +3646,14 @@ |
605 | # Create the new picking object |
606 | # A sequence for each draft picking ticket is used for the picking ticket |
607 | |
608 | - #UF-2531: Use the name of the PICK sent from the RW sync if it's the case |
609 | + # UF-2531: Use the name of the PICK sent from the RW sync if it's the case |
610 | pick_name = False |
611 | already_replicated = False |
612 | if 'associate_pick_name' in context: |
613 | pick_name = context.get('associate_pick_name', False) |
614 | del context['associate_pick_name'] |
615 | already_replicated = True |
616 | - #US-803: Set the pick name that given from sync |
617 | + # US-803: Set the pick name that given from sync |
618 | elif 'rw_backorder_name' in context: |
619 | pick_name = context.get('rw_backorder_name', False) |
620 | del context['rw_backorder_name'] |
621 | @@ -3767,7 +3767,7 @@ |
622 | if usb_entity == self.REMOTE_WAREHOUSE and not context.get('sync_message_execution', False): |
623 | self._manual_create_rw_messages(cr, uid, context=context) |
624 | |
625 | - return {'name':_("Picking Ticket"), |
626 | + return {'name': _("Picking Ticket"), |
627 | 'view_mode': 'form,tree', |
628 | 'view_id': [view_id], |
629 | 'view_type': 'form', |
630 | @@ -3864,7 +3864,7 @@ |
631 | }) |
632 | |
633 | # Create the new ppl object |
634 | - #US-702: If the PPL is from RW, then add the suffix RW, if it is created via RW Sync in CP, then use the one from the context (name sent by RW instance) |
635 | + # US-702: If the PPL is from RW, then add the suffix RW, if it is created via RW Sync in CP, then use the one from the context (name sent by RW instance) |
636 | ppl_number = 'PPL/%s' % picking.name.split("/")[1] |
637 | if context.get('rw_backorder_name', False): |
638 | ppl_number = context.get('rw_backorder_name') |
639 | @@ -3979,7 +3979,7 @@ |
640 | diff_qty = move_vals['initial_qty'] - move_vals['processed_qty'] |
641 | if diff_qty != 0.00: |
642 | # Original move from the draft picking ticket which will be updated |
643 | - if move_vals['move'].backmove_id: #2531: Added a check to make sure the following code can be run correctly |
644 | + if move_vals['move'].backmove_id: # 2531: Added a check to make sure the following code can be run correctly |
645 | original_move_id = move_vals['move'].backmove_id.id |
646 | original_vals = move_obj.browse(cr, uid, original_move_id, context=context) |
647 | if original_vals.product_uom.id != move_vals['move'].product_uom.id: |
648 | @@ -4020,7 +4020,7 @@ |
649 | if usb_entity == self.REMOTE_WAREHOUSE and not context.get('sync_message_execution', False): |
650 | self._manual_create_rw_messages(cr, uid, context=context) |
651 | |
652 | - return {'name':_("Pre-Packing List"), |
653 | + return {'name': _("Pre-Packing List"), |
654 | 'view_mode': 'form,tree', |
655 | 'view_id': [view_id], |
656 | 'view_type': 'form', |
657 | @@ -4225,7 +4225,7 @@ |
658 | 'shipment_id': False, |
659 | 'origin': picking.origin, |
660 | 'move_lines': [], |
661 | - 'date': today, # Set date as today for the new PACK object |
662 | + 'date': today, # Set date as today for the new PACK object |
663 | } |
664 | |
665 | # Change the context for copy |
666 | @@ -4239,7 +4239,7 @@ |
667 | new_packing_id = self.copy(cr, uid, picking.id, pack_values, context=context) |
668 | if picking.claim: |
669 | self.write(cr, uid, new_packing_id, ({'claim': True}), context=context) |
670 | - if usb_entity == self.REMOTE_WAREHOUSE and not context.get('sync_message_execution', False): # RW Sync - set the replicated to True for not syncing it again |
671 | + if usb_entity == self.REMOTE_WAREHOUSE and not context.get('sync_message_execution', False): # RW Sync - set the replicated to True for not syncing it again |
672 | self.write(cr, uid, new_packing_id, {'already_replicated': False}, context=context) |
673 | |
674 | # Reset context values |
675 | @@ -4336,7 +4336,7 @@ |
676 | shipment_id = obj.shipment_id.id |
677 | shipment_name = obj.shipment_id.name |
678 | |
679 | - if context.get('rw_shipment_name', False) and context.get('sync_message_execution', False): # RW Sync - update the shipment name same as on RW instance |
680 | + if context.get('rw_shipment_name', False) and context.get('sync_message_execution', False): # RW Sync - update the shipment name same as on RW instance |
681 | new_name = context.get('rw_shipment_name') |
682 | if new_name != obj.shipment_id.name: |
683 | del context['rw_shipment_name'] |
684 | @@ -4356,7 +4356,7 @@ |
685 | |
686 | view_id = data_obj.get_object_reference(cr, uid, 'msf_outgoing', 'view_shipment_form') |
687 | view_id = view_id and view_id[1] or False |
688 | - return {'name':_("Shipment"), |
689 | + return {'name': _("Shipment"), |
690 | 'view_mode': 'form,tree', |
691 | 'view_id': [view_id], |
692 | 'view_type': 'form', |
693 | @@ -4490,12 +4490,12 @@ |
694 | ('wizard_id', '=', picking_processor_wiz.id), |
695 | ('move_id', '=', draft_move_id), |
696 | ], context=context) |
697 | - if not save_as_draft_move: # then create the SaD line: |
698 | + if not save_as_draft_move: # then create the SaD line: |
699 | move_data = self.pool.get('create.picking.move.processor')._get_line_data(cr, uid, wizard=picking_processor_wiz, move=line.move_id.backmove_id, context=context) |
700 | move_data.update({'quantity': return_qty}) |
701 | save_as_draft_move = self.pool.get('create.picking.move.processor').create(cr, uid, move_data, context=context) |
702 | save_as_draft_move = [save_as_draft_move] |
703 | - else: # update Sad line |
704 | + else: # update Sad line |
705 | for sad_move in self.pool.get('create.picking.move.processor').browse(cr, uid, save_as_draft_move, context=context): |
706 | self.pool.get('create.picking.move.processor').write(cr, uid, sad_move.id, { |
707 | 'ordered_quantity': sad_move.ordered_quantity + return_qty, |
708 | @@ -4537,17 +4537,17 @@ |
709 | view_id = view_id and view_id[1] or False |
710 | context['picking_type'] = 'picking_ticket' |
711 | |
712 | - #UF-2531: Create manually the message for the return pack of the ship |
713 | + # UF-2531: Create manually the message for the return pack of the ship |
714 | self._manual_create_rw_picking_message(cr, uid, picking.id, return_info, 'usb_picking_return_products', context=context) |
715 | |
716 | context.update({'picking_type': 'picking_ticket'}) |
717 | return { |
718 | - 'name':_("Picking Ticket"), |
719 | + 'name': _("Picking Ticket"), |
720 | 'view_mode': 'form,tree', |
721 | 'view_id': [view_id], |
722 | 'view_type': 'form', |
723 | 'res_model': 'stock.picking', |
724 | - 'res_id': draft_picking_id , |
725 | + 'res_id': draft_picking_id, |
726 | 'type': 'ir.actions.act_window', |
727 | 'target': 'crush', |
728 | 'context': context, |
729 | @@ -4641,12 +4641,12 @@ |
730 | ('wizard_id', '=', picking_processor_wiz.id), |
731 | ('move_id', '=', draft_move.id), |
732 | ], context=context) |
733 | - if not save_as_draft_move: # then create the SaD line: |
734 | + if not save_as_draft_move: # then create the SaD line: |
735 | move_data = self.pool.get('create.picking.move.processor')._get_line_data(cr, uid, wizard=picking_processor_wiz, move=move, context=context) |
736 | move_data.update({'quantity': move.product_qty}) |
737 | save_as_draft_move = self.pool.get('create.picking.move.processor').create(cr, uid, move_data, context=context) |
738 | save_as_draft_move = [save_as_draft_move] |
739 | - else: # update Sad line |
740 | + else: # update Sad line |
741 | for sad_move in self.pool.get('create.picking.move.processor').browse(cr, uid, save_as_draft_move, context=context): |
742 | self.pool.get('create.picking.move.processor').write(cr, uid, sad_move.id, { |
743 | 'ordered_quantity': sad_move.ordered_quantity + move.product_qty, |
744 | @@ -4982,10 +4982,10 @@ |
745 | _columns = {'from_pack': fields.integer(string='From p.'), |
746 | 'to_pack': fields.integer(string='To p.'), |
747 | 'pack_type': fields.many2one('pack.type', string='Pack Type'), |
748 | - 'length' : fields.float(digits=(16, 2), string='Length [cm]'), |
749 | - 'width' : fields.float(digits=(16, 2), string='Width [cm]'), |
750 | - 'height' : fields.float(digits=(16, 2), string='Height [cm]'), |
751 | - 'weight' : fields.float(digits=(16, 2), string='Weight p.p [kg]'), |
752 | + 'length': fields.float(digits=(16, 2), string='Length [cm]'), |
753 | + 'width': fields.float(digits=(16, 2), string='Width [cm]'), |
754 | + 'height': fields.float(digits=(16, 2), string='Height [cm]'), |
755 | + 'weight': fields.float(digits=(16, 2), string='Weight p.p [kg]'), |
756 | # 'pack_family_id': fields.many2one('pack.family', string='Pack Family'), |
757 | 'initial_location': fields.many2one('stock.location', string='Initial Picking Location'), |
758 | # relation to the corresponding move from draft **picking** ticket object |
759 | @@ -5015,7 +5015,7 @@ |
760 | 'stock.move': (lambda obj, cr, uid, ids, c={}: ids, ['picking_id'], 10), |
761 | 'stock.picking': (_get_picking, ['shipment_id'], 10), |
762 | } |
763 | - ), |
764 | + ), |
765 | # Fields used for domain |
766 | 'location_virtual_id': fields.many2one('stock.location', string='Virtual location'), |
767 | 'location_output_id': fields.many2one('stock.location', string='Output location'), |
768 | @@ -5090,26 +5090,6 @@ |
769 | signal = 'cancel_r' if resource else 'cancel' |
770 | wf_service.trg_validate(uid, 'purchase.order.line', move.purchase_line_id.id, signal, cr) |
771 | |
772 | - not_done_moves = self.pool.get('stock.move').search(cr, uid, [ |
773 | - ('purchase_line_id', '=', move.purchase_line_id.id), |
774 | - ('state', 'not in', ['cancel', 'cancel_r', 'done']), |
775 | - ('picking_id.type', '=', 'in'), |
776 | - ], context=context) |
777 | - if (not not_done_moves) or all([x in ids for x in not_done_moves]): |
778 | - # all in lines processed or will be processed for this po line |
779 | - wf_service.trg_validate(uid, 'purchase.order.line', move.purchase_line_id.id, 'done', cr) |
780 | - |
781 | - if move.purchase_line_id.is_line_split and move.purchase_line_id.original_line_id: |
782 | - # check if the original PO line can be set to done |
783 | - not_done_moves = self.pool.get('stock.move').search(cr, uid, [ |
784 | - ('purchase_line_id', '=', move.purchase_line_id.original_line_id.id), |
785 | - ('state', 'not in', ['cancel', 'cancel_r', 'done']), |
786 | - ('picking_id.type', '=', 'in'), |
787 | - ], context=context) |
788 | - if (not not_done_moves) or all([x in ids for x in not_done_moves]): |
789 | - # all in lines processed or will be processed for this po line |
790 | - wf_service.trg_validate(uid, 'purchase.order.line', move.purchase_line_id.original_line_id.id, 'done', cr) |
791 | - |
792 | sol_ids = pol_obj.get_sol_ids_from_pol_ids(cr, uid, [move.purchase_line_id.id], context=context) |
793 | for sol in sol_obj.browse(cr, uid, sol_ids, context=context): |
794 | # If the line will be sourced in another way, do not cancel the OUT move |
795 | @@ -5118,15 +5098,15 @@ |
796 | continue |
797 | |
798 | diff_qty = uom_obj._compute_qty(cr, uid, move.product_uom.id, move.product_qty, sol.product_uom.id) |
799 | - if move.picking_id.partner_id2.partner_type not in ['internal','section','intermission']: |
800 | - sol_obj.update_or_cancel_line(cr, uid, sol.id, diff_qty, resource=resource,context=context) |
801 | + if move.picking_id.partner_id2.partner_type not in ['internal', 'section', 'intermission']: |
802 | + sol_obj.update_or_cancel_line(cr, uid, sol.id, diff_qty, resource=resource, context=context) |
803 | # Cancel the remaining OUT line |
804 | if diff_qty < sol.product_uom_qty: |
805 | data_back = self.create_data_back(move) |
806 | out_move = self.get_mirror_move(cr, uid, [move.id], data_back, context=context)[move.id] |
807 | out_move_id = False |
808 | if out_move['moves']: |
809 | - out_move_id = sorted(out_move['moves'], key=lambda x: abs(x.product_qty-diff_qty))[0].id |
810 | + out_move_id = sorted(out_move['moves'], key=lambda x: abs(x.product_qty - diff_qty))[0].id |
811 | elif out_move['move_id']: |
812 | out_move_id = out_move['move_id'] |
813 | |
814 | @@ -5134,6 +5114,26 @@ |
815 | context.setdefault('not_resource_move', []).append(out_move_id) |
816 | self.action_cancel(cr, uid, [out_move_id], context=context) |
817 | |
818 | + not_done_moves = self.pool.get('stock.move').search(cr, uid, [ |
819 | + ('purchase_line_id', '=', move.purchase_line_id.id), |
820 | + ('state', 'not in', ['cancel', 'cancel_r', 'done']), |
821 | + ('picking_id.type', '=', 'in'), |
822 | + ], context=context) |
823 | + if (not not_done_moves) or all([x in ids for x in not_done_moves]): |
824 | + # all in lines processed or will be processed for this po line |
825 | + wf_service.trg_validate(uid, 'purchase.order.line', move.purchase_line_id.id, 'done', cr) |
826 | + |
827 | + if move.purchase_line_id.is_line_split and move.purchase_line_id.original_line_id: |
828 | + # check if the original PO line can be set to done |
829 | + not_done_moves = self.pool.get('stock.move').search(cr, uid, [ |
830 | + ('purchase_line_id', '=', move.purchase_line_id.original_line_id.id), |
831 | + ('state', 'not in', ['cancel', 'cancel_r', 'done']), |
832 | + ('picking_id.type', '=', 'in'), |
833 | + ], context=context) |
834 | + if (not not_done_moves) or all([x in ids for x in not_done_moves]): |
835 | + # all in lines processed or will be processed for this po line |
836 | + wf_service.trg_validate(uid, 'purchase.order.line', move.purchase_line_id.original_line_id.id, 'done', cr) |
837 | + |
838 | self.pool.get('purchase.order.line').update_fo_lines(cr, uid, [move.purchase_line_id.id], context=context) |
839 | |
840 | elif move.sale_line_id and (pick_type == 'internal' or (pick_type == 'out' and subtype_ok)): |
841 | @@ -5306,10 +5306,10 @@ |
842 | 'from_pack': fields.integer(string='From p.'), |
843 | 'to_pack': fields.integer(string='To p.'), |
844 | 'pack_type': fields.many2one('pack.type', string='Pack Type'), |
845 | - 'length' : fields.float(digits=(16, 2), string='Length [cm]'), |
846 | - 'width' : fields.float(digits=(16, 2), string='Width [cm]'), |
847 | - 'height' : fields.float(digits=(16, 2), string='Height [cm]'), |
848 | - 'weight' : fields.float(digits=(16, 2), string='Weight p.p [kg]'), |
849 | + 'length': fields.float(digits=(16, 2), string='Length [cm]'), |
850 | + 'width': fields.float(digits=(16, 2), string='Width [cm]'), |
851 | + 'height': fields.float(digits=(16, 2), string='Height [cm]'), |
852 | + 'weight': fields.float(digits=(16, 2), string='Weight p.p [kg]'), |
853 | # functions |
854 | 'move_lines': fields.function(_vals_get, method=True, type='one2many', relation='stock.move', string='Stock Moves', multi='get_vals',), |
855 | 'fake_state': fields.function(_vals_get, method=True, type='char', String='Fake state', multi='get_vals'), |
856 | |
857 | === modified file 'bin/addons/msf_outgoing/report/labels.rml' |
858 | --- bin/addons/msf_outgoing/report/labels.rml 2017-04-24 13:16:28 +0000 |
859 | +++ bin/addons/msf_outgoing/report/labels.rml 2018-02-12 10:19:50 +0000 |
860 | @@ -137,7 +137,7 @@ |
861 | <para style="P4">[[ stock_picking.ppl_customize_label.weight and translate('Weight:') or ' ' ]]</para> |
862 | </td> |
863 | <td> |
864 | - <para style="P3">[[ stock_picking.ppl_customize_label.weight and str(pack_family.weight or '0.0') + ' kg' or ' ' ]]</para> |
865 | + <para style="P3">[[ stock_picking.ppl_customize_label.weight and str(stock_picking.total_weight or '0.0') + ' kg' or ' ' ]]</para> |
866 | </td> |
867 | </tr> |
868 | <tr> |
869 | |
870 | === modified file 'bin/addons/msf_profile/i18n/fr_MF.po' |
871 | --- bin/addons/msf_profile/i18n/fr_MF.po 2017-12-12 10:23:48 +0000 |
872 | +++ bin/addons/msf_profile/i18n/fr_MF.po 2018-02-12 10:19:50 +0000 |
873 | @@ -101136,3 +101136,22 @@ |
874 | #, python-format |
875 | msgid "You must select at least one currency of transfers." |
876 | msgstr "Vous devez sélectionner au moins une devise de virement." |
877 | + |
878 | +#. module: account_corrections |
879 | +#: code:addons/account_corrections/account_move_line.py:567 |
880 | +#, python-format |
881 | +msgid "No OD-Extra Accounting Analytic Journal found!" |
882 | +msgstr "Aucun Journal Analytique de type OD-Extra Accounting n'a été trouvé !" |
883 | + |
884 | +#. module: register_accounting |
885 | +#: code:addons/register_accounting/wizard/wizard_register_import.py:490 |
886 | +#, python-format |
887 | +msgid "Line %s. Third Party %s not found or not compatible with the Type for specific treatment of the account '%s - %s'." |
888 | +msgstr "Ligne %s. Tiers %s non trouvé ou non compatible avec le Type pour traitement spécifique du compte '%s - %s'." |
889 | + |
890 | +#. modules: purchase, sale |
891 | +#: code:addons/purchase/purchase_workflow.py:413 |
892 | +#: code:addons/sale/sale_workflow.py:491 |
893 | +#, python-format |
894 | +msgid "You can't use taxes with an intermission partner." |
895 | +msgstr "Vous ne pouvez pas utiliser de taxes avec un partenaire intermission." |
896 | |
897 | === modified file 'bin/addons/msf_supply_doc_export/wizard/po_follow_up.py' |
898 | --- bin/addons/msf_supply_doc_export/wizard/po_follow_up.py 2016-01-29 15:34:00 +0000 |
899 | +++ bin/addons/msf_supply_doc_export/wizard/po_follow_up.py 2018-02-12 10:19:50 +0000 |
900 | @@ -22,7 +22,7 @@ |
901 | from osv import osv |
902 | from osv import fields |
903 | from tools.translate import _ |
904 | -from lxml import etree |
905 | +from urllib import quote_plus |
906 | |
907 | import time |
908 | from datetime import datetime |
909 | @@ -42,116 +42,116 @@ |
910 | _description = 'PO Follow up report wizard' |
911 | |
912 | _columns = { |
913 | - 'po_id':fields.many2one('purchase.order',string="Order Reference", help="Unique number of the Purchase Order. Optional", required=False), |
914 | - 'state': fields.selection(PURCHASE_ORDER_STATE_SELECTION, 'State', help="The state of the purchase order. Optional", select=True, required=False), |
915 | - 'po_date_from':fields.date("PO date from", required="False"), |
916 | - 'po_date_thru':fields.date("PO date to", required="False"), |
917 | - 'partner_id':fields.many2one('res.partner', 'Supplier', required=False), |
918 | - 'project_ref':fields.char('Supplier reference', size=64, required=False), |
919 | - 'export_format': fields.selection([('xls', 'Excel'), ('pdf', 'PDF')], string="Export format", required=True), |
920 | - 'background_time': fields.integer('Number of second before background processing'), |
921 | + 'po_id': fields.many2one('purchase.order', string="Order Reference", help="Unique number of the Purchase Order. Optional", required=False), |
922 | + 'state': fields.selection(PURCHASE_ORDER_STATE_SELECTION, 'State', help="The state of the purchase order. Optional", select=True, required=False), |
923 | + 'po_date_from': fields.date("PO date from", required="False"), |
924 | + 'po_date_thru': fields.date("PO date to", required="False"), |
925 | + 'partner_id': fields.many2one('res.partner', 'Supplier', required=False), |
926 | + 'project_ref': fields.char('Supplier reference', size=64, required=False), |
927 | + 'export_format': fields.selection([('xls', 'Excel'), ('pdf', 'PDF')], string="Export format", required=True), |
928 | + 'background_time': fields.integer('Number of second before background processing'), |
929 | } |
930 | - |
931 | + |
932 | _defaults = { |
933 | 'export_format': lambda *a: 'xls', |
934 | 'background_time': lambda *a: 20, |
935 | } |
936 | - |
937 | + |
938 | def button_validate(self, cr, uid, ids, context=None): |
939 | - wiz = self.browse(cr,uid,ids)[0] |
940 | + wiz = self.browse(cr, uid, ids)[0] |
941 | |
942 | domain = [('rfq_ok', '=', False)] |
943 | states = {} |
944 | for state_val, state_string in PURCHASE_ORDER_STATE_SELECTION: |
945 | states[state_val] = state_string |
946 | - report_parms = { |
947 | + report_parms = { |
948 | 'title': 'PO Follow Up per Supplier', |
949 | 'run_date': time.strftime("%d/%m/%Y"), |
950 | 'date_from': '', |
951 | 'date_thru': '', |
952 | 'state': '', |
953 | - 'supplier':'' |
954 | + 'supplier': '' |
955 | } |
956 | - |
957 | + |
958 | # PO number |
959 | if wiz.po_id: |
960 | - domain.append(('id','=', wiz.po_id.id)) |
961 | - |
962 | + domain.append(('id', '=', wiz.po_id.id)) |
963 | + |
964 | # Status |
965 | if wiz.state: |
966 | - domain.append(('state','=', wiz.state)) |
967 | + domain.append(('state', '=', wiz.state)) |
968 | report_parms['state'] = states[wiz.state] |
969 | - |
970 | + |
971 | # Dates |
972 | if wiz.po_date_from: |
973 | - domain.append(('date_order','>=',wiz.po_date_from)) |
974 | - tmp = datetime.strptime(wiz.po_date_from,"%Y-%m-%d") |
975 | + domain.append(('date_order', '>=', wiz.po_date_from)) |
976 | + tmp = datetime.strptime(wiz.po_date_from, "%Y-%m-%d") |
977 | report_parms['date_from'] = tmp.strftime("%d/%m/%Y") |
978 | |
979 | if wiz.po_date_thru: |
980 | - domain.append(('date_order','<=',wiz.po_date_thru)) |
981 | - tmp = datetime.strptime(wiz.po_date_thru,"%Y-%m-%d") |
982 | + domain.append(('date_order', '<=', wiz.po_date_thru)) |
983 | + tmp = datetime.strptime(wiz.po_date_thru, "%Y-%m-%d") |
984 | report_parms['date_thru'] = tmp.strftime("%d/%m/%Y") |
985 | |
986 | # Supplier |
987 | if wiz.partner_id: |
988 | - domain.append(('partner_id','=', wiz.partner_id.id)) |
989 | - report_parms['supplier'] = wiz.partner_id.name |
990 | - |
991 | + domain.append(('partner_id', '=', wiz.partner_id.id)) |
992 | + report_parms['supplier'] = quote_plus(wiz.partner_id.name) |
993 | + |
994 | # Supplier Reference |
995 | if wiz.project_ref: |
996 | - domain.append(('project_ref','like',wiz.project_ref)) |
997 | - |
998 | + domain.append(('project_ref', 'like', wiz.project_ref)) |
999 | + |
1000 | # get the PO ids based on the selected criteria |
1001 | po_obj = self.pool.get('purchase.order') |
1002 | po_ids = po_obj.search(cr, uid, domain) |
1003 | - |
1004 | + |
1005 | if not po_ids: |
1006 | raise osv.except_osv(_('Error'), _('No Purchase Orders match the specified criteria.')) |
1007 | return True |
1008 | - |
1009 | + |
1010 | report_header = [] |
1011 | report_header.append(report_parms['title']) |
1012 | - |
1013 | + |
1014 | report_header_line2 = '' |
1015 | if wiz.partner_id: |
1016 | report_header_line2 += wiz.partner_id.name |
1017 | - report_header_line2 += ' Report run date: ' + time.strftime("%d/%m/%Y") #TODO to be removed |
1018 | + report_header_line2 += ' Report run date: ' + time.strftime("%d/%m/%Y") # TODO to be removed |
1019 | if wiz.po_date_from: |
1020 | report_header_line2 += wiz.po_date_from |
1021 | - #UF-2496: Minor fix to append the "date from" correctly into header |
1022 | + # UF-2496: Minor fix to append the "date from" correctly into header |
1023 | if wiz.po_date_thru: |
1024 | if wiz.po_date_from: |
1025 | report_header_line2 += ' - ' |
1026 | report_header_line2 += wiz.po_date_thru |
1027 | - report_header.append(report_header_line2) |
1028 | - |
1029 | - datas = {'ids': po_ids, 'report_header': report_header, 'report_parms': report_parms} |
1030 | + report_header.append(quote_plus(report_header_line2)) |
1031 | + |
1032 | + datas = {'ids': po_ids, 'report_header': report_header, 'report_parms': report_parms} |
1033 | if wiz.export_format == 'xls': |
1034 | report_name = 'po.follow.up_xls' |
1035 | else: |
1036 | report_name = 'po.follow.up_rml' |
1037 | - |
1038 | + |
1039 | if wiz.po_date_from: |
1040 | - domain.append(('date_order','>=',wiz.po_date_from)) |
1041 | - |
1042 | + domain.append(('date_order', '>=', wiz.po_date_from)) |
1043 | + |
1044 | background_id = self.pool.get('memory.background.report').create(cr, uid, {'file_name': report_name, 'report_name': report_name}, context=context) |
1045 | context['background_id'] = background_id |
1046 | context['background_time'] = wiz.background_time |
1047 | - |
1048 | - return { |
1049 | - 'type': 'ir.actions.report.xml', |
1050 | - 'report_name': report_name, |
1051 | - 'datas': datas, |
1052 | - 'nodestroy': True, |
1053 | - 'context': context, |
1054 | + |
1055 | + return { |
1056 | + 'type': 'ir.actions.report.xml', |
1057 | + 'report_name': report_name, |
1058 | + 'datas': datas, |
1059 | + 'nodestroy': True, |
1060 | + 'context': context, |
1061 | } |
1062 | - |
1063 | + |
1064 | po_follow_up() |
1065 | |
1066 | |
1067 | # already defined in account_mcdb/wizard/output_currency_for_export.py |
1068 | -#class background_report(osv.osv_memory): |
1069 | +# class background_report(osv.osv_memory): |
1070 | # _name = 'memory.background.report' |
1071 | # _description = 'Report result' |
1072 | # |
1073 | @@ -164,4 +164,4 @@ |
1074 | # } |
1075 | # def update_percent(self, cr, uid, ids, percent, context=None): |
1076 | # self.write(cr, uid, ids, {'percent': percent}) |
1077 | -#background_report() |
1078 | +# background_report() |
1079 | |
1080 | === modified file 'bin/addons/purchase/purchase_workflow.py' |
1081 | --- bin/addons/purchase/purchase_workflow.py 2017-12-14 17:30:55 +0000 |
1082 | +++ bin/addons/purchase/purchase_workflow.py 2018-02-12 10:19:50 +0000 |
1083 | @@ -49,7 +49,7 @@ |
1084 | ''' |
1085 | if context is None: |
1086 | context = {} |
1087 | - if isinstance(ids, (int,long)): |
1088 | + if isinstance(ids, (int, long)): |
1089 | ids = [ids] |
1090 | |
1091 | for pol in self.browse(cr, uid, ids, context=context): |
1092 | @@ -73,7 +73,7 @@ |
1093 | domain = [('purchase_line_id', '=', pol.id), ('type', '=', 'in'), ('state', '=', 'assigned')] |
1094 | linked_in_move = self.pool.get('stock.move').search(cr, uid, domain, context=context) |
1095 | if linked_in_move: |
1096 | - self.pool.get('stock.move').action_cancel(cr, uid, linked_in_move, context=context) |
1097 | + self.pool.get('stock.move').action_cancel(cr, uid, linked_in_move, context=context) |
1098 | |
1099 | return True |
1100 | |
1101 | @@ -84,7 +84,7 @@ |
1102 | ''' |
1103 | if context is None: |
1104 | context = {} |
1105 | - if isinstance(ids, (int,long)): |
1106 | + if isinstance(ids, (int, long)): |
1107 | ids = [ids] |
1108 | |
1109 | for pol in self.browse(cr, uid, ids, fields_to_fetch=['price_unit'], context=context): |
1110 | @@ -99,7 +99,7 @@ |
1111 | ''' |
1112 | if context is None: |
1113 | context = {} |
1114 | - if isinstance(ids, (int,long)): |
1115 | + if isinstance(ids, (int, long)): |
1116 | ids = [ids] |
1117 | |
1118 | for pol in self.browse(cr, uid, ids, context=context): |
1119 | @@ -117,7 +117,7 @@ |
1120 | ], context=context) |
1121 | so_id = so_id and so_id[0] or False |
1122 | if not so_id: |
1123 | - continue # no sale order linked to our PO line |
1124 | + continue # no sale order linked to our PO line |
1125 | sale_order = self.pool.get('sale.order').browse(cr, uid, so_id, context=context) |
1126 | if sale_order.state == 'cancel' and sale_order.procurement_request: |
1127 | to_trigger = True |
1128 | @@ -126,7 +126,7 @@ |
1129 | sale_order = pol.linked_sol_id.order_id |
1130 | else: |
1131 | # case of PO line from scratch, nothing to update |
1132 | - continue |
1133 | + continue |
1134 | |
1135 | # convert from currency of pol to currency of sol |
1136 | price_unit_converted = self.pool.get('res.currency').compute(cr, uid, pol.currency_id.id, sale_order.currency_id.id, pol.price_unit or 0.0, |
1137 | @@ -204,14 +204,14 @@ |
1138 | # the right OUT move (moves are already splits at this level): |
1139 | if sol_values['is_line_split']: |
1140 | linked_out_moves = self.pool.get('stock.move').search(cr, uid, [ |
1141 | - ('sale_line_id', '=', sol_values['original_line_id']), |
1142 | - ('type', '=', 'out')], |
1143 | + ('sale_line_id', '=', sol_values['original_line_id']), |
1144 | + ('type', '=', 'out')], |
1145 | context=context) |
1146 | if len(linked_out_moves) > 1: |
1147 | for out_move in self.pool.get('stock.move').browse(cr, uid, linked_out_moves, context=context): |
1148 | if out_move.state in ('assigned', 'confirmed') and out_move.product_qty == sol_values['product_uom_qty']: |
1149 | self.pool.get('stock.move').write(cr, uid, [out_move.id], {'sale_line_id': new_sol}, context=context) |
1150 | - else: # update FO line |
1151 | + else: # update FO line |
1152 | self.pool.get('sale.order.line').write(cr, uid, [pol.linked_sol_id.id], sol_values, context=context) |
1153 | |
1154 | |
1155 | @@ -226,7 +226,7 @@ |
1156 | ''' |
1157 | if context is None: |
1158 | context = {} |
1159 | - if isinstance(ids, (int,long)): |
1160 | + if isinstance(ids, (int, long)): |
1161 | ids = [ids] |
1162 | if not fo_id: |
1163 | raise Exception, "No parent Sale Order given for the new Sale Order line" |
1164 | @@ -305,10 +305,10 @@ |
1165 | ''' |
1166 | if context is None: |
1167 | context = {} |
1168 | - if isinstance(ids, (int,long)): |
1169 | + if isinstance(ids, (int, long)): |
1170 | ids = [ids] |
1171 | if not ids: |
1172 | - raise Exception , "No PO line given" |
1173 | + raise Exception, "No PO line given" |
1174 | |
1175 | #Â load common data into context: |
1176 | self.pool.get('data.tools').load_common_data(cr, uid, ids, context=context) |
1177 | @@ -332,7 +332,7 @@ |
1178 | pick_id = self.pool.get('stock.picking').create(cr, uid, pick_values, context=context) |
1179 | |
1180 | # log picking creation |
1181 | - self.pool.get('stock.picking').log(cr, uid, pick_id, _('The new internal Picking %s has been created.')%name) |
1182 | + self.pool.get('stock.picking').log(cr, uid, pick_id, _('The new internal Picking %s has been created.') % name) |
1183 | |
1184 | return pick_id |
1185 | |
1186 | @@ -343,7 +343,7 @@ |
1187 | ''' |
1188 | if context is None: |
1189 | context = {} |
1190 | - if isinstance(ids, (int,long)): |
1191 | + if isinstance(ids, (int, long)): |
1192 | ids = [ids] |
1193 | wf_service = netsvc.LocalService("workflow") |
1194 | |
1195 | @@ -383,7 +383,7 @@ |
1196 | ''' |
1197 | if context is None: |
1198 | context = {} |
1199 | - if isinstance(ids, (int,long)): |
1200 | + if isinstance(ids, (int, long)): |
1201 | ids = [ids] |
1202 | self.write(cr, uid, ids, {'state': 'validated_n'}, context=context) |
1203 | |
1204 | @@ -400,6 +400,17 @@ |
1205 | |
1206 | return True |
1207 | |
1208 | + def check_po_tax(self, cr, uid, ids, context=None): |
1209 | + """ |
1210 | + Prevents from validating a PO with taxes when using an Intermission partner |
1211 | + """ |
1212 | + if context is None: |
1213 | + context = {} |
1214 | + if isinstance(ids, (int, long)): |
1215 | + ids = [ids] |
1216 | + for po_line in self.browse(cr, uid, ids, fields_to_fetch=['order_id', 'taxes_id'], context=context): |
1217 | + if po_line.taxes_id and po_line.order_id.partner_type == 'intermission': |
1218 | + raise osv.except_osv(_('Error'), _("You can't use taxes with an intermission partner.")) |
1219 | |
1220 | def action_validate(self, cr, uid, ids, context=None): |
1221 | ''' |
1222 | @@ -407,7 +418,7 @@ |
1223 | ''' |
1224 | if context is None: |
1225 | context = {} |
1226 | - if isinstance(ids, (int,long)): |
1227 | + if isinstance(ids, (int, long)): |
1228 | ids = [ids] |
1229 | wf_service = netsvc.LocalService("workflow") |
1230 | |
1231 | @@ -415,6 +426,7 @@ |
1232 | self.check_analytic_distribution(cr, uid, ids, context=context) |
1233 | self.check_if_stock_take_date_with_esc_partner(cr, uid, ids, context=context) |
1234 | self.check_unit_price(cr, uid, ids, context=context) |
1235 | + self.check_po_tax(cr, uid, ids, context=context) |
1236 | |
1237 | # update FO lines: |
1238 | self.update_fo_lines(cr, uid, ids, context=context) |
1239 | @@ -451,7 +463,7 @@ |
1240 | ''' |
1241 | if context is None: |
1242 | context = {} |
1243 | - if isinstance(ids, (int,long)): |
1244 | + if isinstance(ids, (int, long)): |
1245 | ids = [ids] |
1246 | wf_service = netsvc.LocalService("workflow") |
1247 | |
1248 | @@ -472,13 +484,13 @@ |
1249 | ''' |
1250 | if context is None: |
1251 | context = {} |
1252 | - if isinstance(ids, (int,long)): |
1253 | + if isinstance(ids, (int, long)): |
1254 | ids = [ids] |
1255 | wf_service = netsvc.LocalService("workflow") |
1256 | |
1257 | self.write(cr, uid, ids, {'state': 'sourced_v'}, context=context) |
1258 | |
1259 | - #update linked sol (same instance) to sourced-v (if has) |
1260 | + # update linked sol (same instance) to sourced-v (if has) |
1261 | for po in self.browse(cr, uid, ids, context=context): |
1262 | if po.linked_sol_id: |
1263 | wf_service.trg_validate(uid, 'sale.order.line', po.linked_sol_id.id, 'sourced_v', cr) |
1264 | @@ -492,7 +504,7 @@ |
1265 | ''' |
1266 | if context is None: |
1267 | context = {} |
1268 | - if isinstance(ids, (int,long)): |
1269 | + if isinstance(ids, (int, long)): |
1270 | ids = [ids] |
1271 | |
1272 | self.write(cr, uid, ids, {'state': 'sourced_n'}, context=context) |
1273 | @@ -547,9 +559,9 @@ |
1274 | self.pool.get('stock.move').in_action_confirm(cr, uid, incoming_move_id, context) |
1275 | |
1276 | # create internal moves (INT): |
1277 | - if pol.order_id.location_id.input_ok and pol.product_id.type not in ('service_recep', 'consu'): |
1278 | + if pol.order_id.location_id.input_ok and pol.product_id.type not in ('service_recep', 'consu'): |
1279 | internal_pick = self.pool.get('stock.picking').search(cr, uid, [ |
1280 | - ('type', '=', 'internal'), |
1281 | + ('type', '=', 'internal'), |
1282 | ('purchase_id', '=', pol.order_id.id), |
1283 | ('state', 'not in', ['done', 'cancel']), |
1284 | ], context=context) |
1285 | @@ -604,7 +616,7 @@ |
1286 | ''' |
1287 | if context is None: |
1288 | context = {} |
1289 | - if isinstance(ids, (int,long)): |
1290 | + if isinstance(ids, (int, long)): |
1291 | ids = [ids] |
1292 | wf_service = netsvc.LocalService("workflow") |
1293 | |
1294 | @@ -613,8 +625,8 @@ |
1295 | |
1296 | for pol in self.browse(cr, uid, ids, context=context): |
1297 | # no PICK/OUT needed in this cases; close SO line: |
1298 | - internal_ir = pol.linked_sol_id and pol.linked_sol_id.order_id.procurement_request and pol.linked_sol_id.order_id.location_requestor_id.usage == 'internal' or False # PO line from Internal IR |
1299 | - dpo = pol.order_id.order_type == 'direct' or False # direct PO |
1300 | + internal_ir = pol.linked_sol_id and pol.linked_sol_id.order_id.procurement_request and pol.linked_sol_id.order_id.location_requestor_id.usage == 'internal' or False # PO line from Internal IR |
1301 | + dpo = pol.order_id.order_type == 'direct' or False # direct PO |
1302 | ir_non_stockable = pol.linked_sol_id and pol.linked_sol_id.order_id.procurement_request and pol.linked_sol_id.product_id.type in ('consu', 'service', 'service_recep') or False |
1303 | |
1304 | if internal_ir or dpo or ir_non_stockable: |
1305 | @@ -700,7 +712,7 @@ |
1306 | """ |
1307 | if context is None: |
1308 | context = {} |
1309 | - if isinstance(ids, (int,long)): |
1310 | + if isinstance(ids, (int, long)): |
1311 | ids = [ids] |
1312 | |
1313 | for po in self.browse(cr, uid, ids, context=context): |
1314 | @@ -715,7 +727,7 @@ |
1315 | ''' |
1316 | if context is None: |
1317 | context = {} |
1318 | - if isinstance(ids, (int,long)): |
1319 | + if isinstance(ids, (int, long)): |
1320 | ids = [ids] |
1321 | wf_service = netsvc.LocalService("workflow") |
1322 | |
1323 | |
1324 | === modified file 'bin/addons/register_accounting/report/fully_report_xls.mako' |
1325 | --- bin/addons/register_accounting/report/fully_report_xls.mako 2017-09-26 11:57:14 +0000 |
1326 | +++ bin/addons/register_accounting/report/fully_report_xls.mako 2018-02-12 10:19:50 +0000 |
1327 | @@ -904,8 +904,16 @@ |
1328 | <!-- Display analytic lines linked to this register line --> |
1329 | <% |
1330 | a_lines = False |
1331 | +adv_return_aal = line.cash_return_move_line_id and line.cash_return_move_line_id.analytic_lines or [] |
1332 | +adv_return_fp_lines = [] |
1333 | +adv_return_free_lines = [] |
1334 | +for adv_return_l in adv_return_aal: |
1335 | + if adv_return_l.free_account: |
1336 | + adv_return_free_lines.append(adv_return_l) |
1337 | + else: |
1338 | + adv_return_fp_lines.append(adv_return_l) |
1339 | if line.fp_analytic_lines and not line.invoice_id and not line.imported_invoice_line_ids: |
1340 | - a_lines = line.cash_return_move_line_id and line.cash_return_move_line_id.analytic_lines or line.fp_analytic_lines |
1341 | + a_lines = adv_return_fp_lines or line.fp_analytic_lines |
1342 | %> |
1343 | % if a_lines: |
1344 | % for ana_line in sorted(a_lines, key=lambda x: x.id): |
1345 | @@ -964,7 +972,7 @@ |
1346 | <% |
1347 | a_lines = False |
1348 | if line.free_analytic_lines and not line.invoice_id and not line.imported_invoice_line_ids: |
1349 | - a_lines = line.free_analytic_lines |
1350 | + a_lines = adv_return_free_lines or line.free_analytic_lines |
1351 | %> |
1352 | % if a_lines: |
1353 | % for ana_line in sorted(a_lines, key=lambda x: x.id): |
1354 | |
1355 | === modified file 'bin/addons/register_accounting/wizard/import_invoice_on_registers.py' |
1356 | --- bin/addons/register_accounting/wizard/import_invoice_on_registers.py 2017-06-06 09:58:17 +0000 |
1357 | +++ bin/addons/register_accounting/wizard/import_invoice_on_registers.py 2018-02-12 10:19:50 +0000 |
1358 | @@ -244,7 +244,8 @@ |
1359 | |
1360 | # Create register line |
1361 | partial = False |
1362 | - if line.amount and line.amount_to_pay and line.amount < abs(line.amount_to_pay): |
1363 | + equal_amounts = abs(abs(line.amount) - abs(line.amount_to_pay)) <= 10**-3 |
1364 | + if not equal_amounts and line.amount < abs(line.amount_to_pay): |
1365 | partial = ' - ' + _('partial pymt') |
1366 | ref = line.ref |
1367 | if not ref or ref == 'false': |
1368 | |
1369 | === modified file 'bin/addons/register_accounting/wizard/wizard_register_import.py' |
1370 | --- bin/addons/register_accounting/wizard/wizard_register_import.py 2017-04-19 13:19:37 +0000 |
1371 | +++ bin/addons/register_accounting/wizard/wizard_register_import.py 2018-02-12 10:19:50 +0000 |
1372 | @@ -61,9 +61,9 @@ |
1373 | if not context: |
1374 | context = {} |
1375 | view = super(wizard_register_import, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar, submenu) |
1376 | - if view_type=='form': |
1377 | + if view_type == 'form': |
1378 | form = etree.fromstring(view['arch']) |
1379 | - for el in [('document_date', 'Document Date'), ('posting_date', 'Posting Date'), ('cheque_number', 'Cheque Number'), ('description', 'Description'), ('reference', 'Reference'), ('account', 'Account'), ('third_party', 'Third Party'), ('amount_in', 'Amount In'), ('amount_out', 'Amount Out'), ('destination', 'Destination'), ('cost_center', 'Cost Centre'), ('funding_pool', 'Funding Pool'), ('proprietary_instance', "Proprietary instance's code"), ('journal', "Journal's code"), ('currency', "Currency's code"),('free1', "Free 1"),('free2', "Free 2")]: |
1380 | + for el in [('document_date', 'Document Date'), ('posting_date', 'Posting Date'), ('cheque_number', 'Cheque Number'), ('description', 'Description'), ('reference', 'Reference'), ('account', 'Account'), ('third_party', 'Third Party'), ('amount_in', 'Amount In'), ('amount_out', 'Amount Out'), ('destination', 'Destination'), ('cost_center', 'Cost Centre'), ('funding_pool', 'Funding Pool'), ('proprietary_instance', "Proprietary instance's code"), ('journal', "Journal's code"), ('currency', "Currency's code"), ('free1', "Free 1"), ('free2', "Free 2")]: |
1381 | fields = form.xpath('/form//th[@class="' + el[0] + '"]') |
1382 | for field in fields: |
1383 | field.text = _(el[1]) |
1384 | @@ -149,20 +149,20 @@ |
1385 | currency_id = l.get('currency_id', False) and l.get('currency_id')[0] or False |
1386 | account = account_obj.read(cr, uid, account_id, ['is_analytic_addicted']) |
1387 | cheque_number = l.get('cheque_number') |
1388 | - free_1_id = l.get('free_1_id',False) and l.get('free_1_id')[0] or False |
1389 | - free_2_id = l.get('free_2_id',False) and l.get('free_2_id')[0] or False |
1390 | + free_1_id = l.get('free_1_id', False) and l.get('free_1_id')[0] or False |
1391 | + free_2_id = l.get('free_2_id', False) and l.get('free_2_id')[0] or False |
1392 | |
1393 | vals = { |
1394 | - 'name': l.get('description', ''), |
1395 | - 'ref': l.get('ref', ''), |
1396 | - 'document_date': l.get('document_date', False), |
1397 | - 'date': date, |
1398 | - 'account_id': account_id, |
1399 | - 'amount': l.get('debit', 0.0) - l.get('credit', 0.0), |
1400 | - 'partner_id': partner_id, |
1401 | - 'employee_id': employee_id, |
1402 | + 'name': l.get('description', ''), |
1403 | + 'ref': l.get('ref', ''), |
1404 | + 'document_date': l.get('document_date', False), |
1405 | + 'date': date, |
1406 | + 'account_id': account_id, |
1407 | + 'amount': l.get('debit', 0.0) - l.get('credit', 0.0), |
1408 | + 'partner_id': partner_id, |
1409 | + 'employee_id': employee_id, |
1410 | 'transfer_journal_id': transfer_journal_id, |
1411 | - 'statement_id': register_id, |
1412 | + 'statement_id': register_id, |
1413 | } |
1414 | if cheque_number: |
1415 | vals['cheque_number'] = str(cheque_number) |
1416 | @@ -182,21 +182,21 @@ |
1417 | 'source_date': date, |
1418 | 'destination_id': destination_id, |
1419 | } |
1420 | - common_vals.update({'analytic_id': cost_center_id,}) |
1421 | + common_vals.update({'analytic_id': cost_center_id, }) |
1422 | self.pool.get('cost.center.distribution.line').create(cr, uid, common_vals) |
1423 | - common_vals.update({'analytic_id': funding_pool_id or msf_fp_id, 'cost_center_id': cost_center_id,}) |
1424 | + common_vals.update({'analytic_id': funding_pool_id or msf_fp_id, 'cost_center_id': cost_center_id, }) |
1425 | self.pool.get('funding.pool.distribution.line').create(cr, uid, common_vals) |
1426 | |
1427 | if free_1_id: |
1428 | - common_vals.update({'analytic_id': free_1_id,}) |
1429 | - self.pool.get('free.1.distribution.line').create(cr,uid,common_vals) |
1430 | + common_vals.update({'analytic_id': free_1_id, }) |
1431 | + self.pool.get('free.1.distribution.line').create(cr, uid, common_vals) |
1432 | |
1433 | if free_2_id: |
1434 | - common_vals.update({'analytic_id': free_2_id,}) |
1435 | - self.pool.get('free.2.distribution.line').create(cr,uid,common_vals) |
1436 | + common_vals.update({'analytic_id': free_2_id, }) |
1437 | + self.pool.get('free.2.distribution.line').create(cr, uid, common_vals) |
1438 | |
1439 | # Check analytic distribution. Use SKIP_WRITE_CHECK to not do anything else that writing analytic distribution field |
1440 | - absl_obj.write(cr, uid, [absl_id], {'analytic_distribution_id': distrib_id,}, context={'skip_write_check': True}) |
1441 | + absl_obj.write(cr, uid, [absl_id], {'analytic_distribution_id': distrib_id, }, context={'skip_write_check': True}) |
1442 | # Add this line to be check at the end of the process |
1443 | to_check.append(absl_id) |
1444 | # Update wizard with current progression |
1445 | @@ -212,7 +212,7 @@ |
1446 | to_delete.append(absl_data.get('id')) |
1447 | to_delete_distrib.append(absl_data.get('analytic_distribution_id', [False])[0]) |
1448 | # Delete analytic distribution field on register lines |
1449 | - absl_obj.write(cr, uid, to_delete, {'analytic_distribution_id': False}, context={'skip_write_check': True}) # do not do anything else than changing analytic_distribution_id field content (thanks to SKIP_WRITE_CHECK) |
1450 | + absl_obj.write(cr, uid, to_delete, {'analytic_distribution_id': False}, context={'skip_write_check': True}) # do not do anything else than changing analytic_distribution_id field content (thanks to SKIP_WRITE_CHECK) |
1451 | # Delete analytic distribution |
1452 | self.pool.get('analytic.distribution').unlink(cr, uid, to_delete_distrib, context) |
1453 | return True |
1454 | @@ -234,6 +234,9 @@ |
1455 | processed = 0 |
1456 | errors = [] |
1457 | cheque_numbers = [] |
1458 | + employee_obj = self.pool.get('hr.employee') |
1459 | + journal_obj = self.pool.get('account.journal') |
1460 | + partner_obj = self.pool.get('res.partner') |
1461 | try: |
1462 | msf_fp_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'analytic_distribution', 'analytic_account_msf_private_funds')[1] |
1463 | except ValueError: |
1464 | @@ -270,22 +273,22 @@ |
1465 | # cols variable describe each column and its expected number |
1466 | cols = { |
1467 | 'document_date': 0, |
1468 | - 'posting_date': 1, |
1469 | + 'posting_date': 1, |
1470 | 'cheque_number': 2, |
1471 | - 'description': 3, |
1472 | - 'reference': 4, |
1473 | - 'account': 5, |
1474 | - 'third_party': 6, |
1475 | - 'amount_in': 7, |
1476 | - 'amount_out': 8, |
1477 | - 'destination': 9, |
1478 | - 'cost_center': 10, |
1479 | - 'funding_pool': 11, |
1480 | - 'free1': 12, |
1481 | - 'free2': 13, |
1482 | + 'description': 3, |
1483 | + 'reference': 4, |
1484 | + 'account': 5, |
1485 | + 'third_party': 6, |
1486 | + 'amount_in': 7, |
1487 | + 'amount_out': 8, |
1488 | + 'destination': 9, |
1489 | + 'cost_center': 10, |
1490 | + 'funding_pool': 11, |
1491 | + 'free1': 12, |
1492 | + 'free2': 13, |
1493 | } |
1494 | # Number of line to bypass in line's count |
1495 | - base_num = 5 # because of Python that begins to 0. |
1496 | + base_num = 5 # because of Python that begins to 0. |
1497 | # Attempt to read 3 first lines |
1498 | first_line = self.pool.get('import.cell.data').get_line_values(cr, uid, ids, rows.next()) |
1499 | try: |
1500 | @@ -296,7 +299,7 @@ |
1501 | try: |
1502 | journal_code = second_line[1] |
1503 | except IndexError, e: |
1504 | - raise osv.except_osv(_('Warning'), _('No journal code found.')) |
1505 | + raise osv.except_osv(_('Warning'), _('No journal code found.')) |
1506 | third_line = self.pool.get('import.cell.data').get_line_values(cr, uid, ids, rows.next()) |
1507 | try: |
1508 | currency_code = third_line[1] |
1509 | @@ -309,7 +312,7 @@ |
1510 | if isinstance(instance_ids, (int, long)): |
1511 | instance_ids = [instance_ids] |
1512 | # Check second info: journal's code |
1513 | - journal_ids = self.pool.get('account.journal').search(cr, uid, [('code', '=', journal_code)]) |
1514 | + journal_ids = journal_obj.search(cr, uid, [('code', '=', journal_code)]) |
1515 | if not journal_ids or len(journal_ids) > 1: |
1516 | raise osv.except_osv(_('Warning'), _('Journal %s not found.') % (journal_code or '',)) |
1517 | if isinstance(journal_ids, (int, long)): |
1518 | @@ -346,7 +349,7 @@ |
1519 | # Check file's content |
1520 | for num, r in enumerate(rows): |
1521 | # Update wizard |
1522 | - progression = ((float(num+1) * remaining) / float(nb_rows)) + 6 |
1523 | + progression = ((float(num + 1) * remaining) / float(nb_rows)) + 6 |
1524 | self.write(cr, uid, [wiz.id], {'message': _('Checking file…'), 'progression': progression}, context) |
1525 | # Prepare some values |
1526 | r_debit = 0 |
1527 | @@ -446,42 +449,62 @@ |
1528 | if register_type == 'cheque': |
1529 | if r_cheque_number: |
1530 | if r_cheque_number in cheque_numbers: |
1531 | - errors.append(_('Line %s. Cheque number %s is duplicated from another line') % (current_line_num,r_cheque_number,)) |
1532 | + errors.append(_('Line %s. Cheque number %s is duplicated from another line') % (current_line_num, r_cheque_number,)) |
1533 | absl = self.pool.get('account.bank.statement.line') |
1534 | - cheque_number_id = absl.search(cr, uid, [('cheque_number','=',r_cheque_number)],context=context) |
1535 | + cheque_number_id = absl.search(cr, uid, [('cheque_number', '=', r_cheque_number)], context=context) |
1536 | if cheque_number_id: |
1537 | - errors.append(_('Line %s. Cheque number %s has already been entered into the system.') % (current_line_num,r_cheque_number,)) |
1538 | + errors.append(_('Line %s. Cheque number %s has already been entered into the system.') % (current_line_num, r_cheque_number,)) |
1539 | cheque_numbers.append(r_cheque_number) |
1540 | else: |
1541 | errors.append(_('Line %s. Cheque number is missing') % (current_line_num,)) |
1542 | - # Check that Third party exists (if not empty) |
1543 | + |
1544 | + # Check Account/Third Party compatibility regarding the Account "Type for specific treatment" |
1545 | partner_type = 'partner' |
1546 | - if line[cols['third_party']]: |
1547 | - if type_for_register == 'advance': |
1548 | - tp_ids = self.pool.get('hr.employee').search(cr, uid, [('name', '=', line[cols['third_party']])]) |
1549 | - partner_type = 'employee' |
1550 | - elif type_for_register in ['transfer', 'transfer_same']: |
1551 | - tp_ids = self.pool.get('account.journal').search(cr, uid, [('code', '=', line[cols['third_party']])]) |
1552 | - if tp_ids: |
1553 | + tp_ids = [] |
1554 | + has_specific_type = type_for_register in ['advance', 'transfer', 'transfer_same', 'down_payment', 'payroll'] or False |
1555 | + if has_specific_type: |
1556 | + if line[cols['third_party']]: |
1557 | + # Type Operational Advance ==> EMPLOYEE required |
1558 | + if type_for_register == 'advance': |
1559 | + tp_ids = employee_obj.search(cr, uid, [('name', '=', line[cols['third_party']])], context=context) |
1560 | + partner_type = 'employee' |
1561 | + # Type Internal transfer ==> JOURNAL required |
1562 | + elif type_for_register in ['transfer', 'transfer_same']: |
1563 | + tp_ids = journal_obj.search(cr, uid, [('code', '=', line[cols['third_party']])], context=context) |
1564 | partner_type = 'journal' |
1565 | - tp_journal = self.pool.get('account.journal').browse(cr, uid, tp_ids, context=context)[0] |
1566 | - if type_for_register == 'transfer': |
1567 | - if tp_journal.currency.id == register_currency: |
1568 | - errors.append(_('Line %s. A Transfer Journal must have a different currency than the register.') % (current_line_num,)) |
1569 | - if type_for_register == 'transfer_same': |
1570 | - if tp_journal.currency.id != register_currency: |
1571 | - errors.append(_('Line %s. A Transfer Same Journal must have the same currency as the register.') % (current_line_num,)) |
1572 | - else: |
1573 | - tp_ids = self.pool.get('res.partner').search(cr, uid, [('name', '=', line[cols['third_party']])]) |
1574 | - partner_type = 'partner' |
1575 | - if not tp_ids: |
1576 | - # Search now if employee exists |
1577 | - tp_ids = self.pool.get('hr.employee').search(cr, uid, [('name', '=', line[cols['third_party']])]) |
1578 | + if tp_ids: |
1579 | + tp_journal = journal_obj.browse(cr, uid, tp_ids, fields_to_fetch=['currency'], context=context)[0] |
1580 | + if type_for_register == 'transfer': |
1581 | + if tp_journal.currency.id == register_currency: |
1582 | + errors.append(_('Line %s. A Transfer Journal must have a different currency than the register.') % (current_line_num,)) |
1583 | + continue |
1584 | + if type_for_register == 'transfer_same': |
1585 | + if tp_journal.currency.id != register_currency: |
1586 | + errors.append(_('Line %s. A Transfer Same Journal must have the same currency as the register.') % (current_line_num,)) |
1587 | + continue |
1588 | + # Type DP or payroll ==> PARTNER required |
1589 | + elif type_for_register in ['down_payment', 'payroll']: |
1590 | + tp_ids = partner_obj.search(cr, uid, [('name', '=', line[cols['third_party']])], context=context) |
1591 | + partner_type = 'partner' |
1592 | + # Any type for Spec. Treatment listed above ==> EMPTY partner NOT allowed |
1593 | + if not tp_ids: |
1594 | + errors.append( |
1595 | + _("Line %s. Third Party %s not found or not compatible with the Type for specific" |
1596 | + " treatment of the account '%s - %s'.") % (current_line_num, line[cols['third_party']] or '', |
1597 | + account['code'], account['name'],)) |
1598 | + continue |
1599 | + elif line[cols['third_party']]: |
1600 | + # if the account has no specific type, search for a partner, then an employee |
1601 | + # (the journal type is ignored in that case. If used it should trigger an error message) |
1602 | + tp_ids = partner_obj.search(cr, uid, [('name', '=', line[cols['third_party']])], context=context) |
1603 | + partner_type = 'partner' |
1604 | + if not tp_ids: |
1605 | + tp_ids = employee_obj.search(cr, uid, [('name', '=', line[cols['third_party']])], context=context) |
1606 | partner_type = 'employee' |
1607 | - # If really not, raise an error for this line |
1608 | - if not tp_ids: |
1609 | - errors.append(_('Line %s. Third party not found: %s') % (current_line_num, line[cols['third_party']],)) |
1610 | - continue |
1611 | + if not tp_ids: |
1612 | + errors.append(_('Line %s. Third party not found: %s') % (current_line_num, line[cols['third_party']],)) |
1613 | + continue |
1614 | + if tp_ids: |
1615 | r_partner = tp_ids[0] |
1616 | |
1617 | # US-672 TP compat with account |
1618 | @@ -569,7 +592,7 @@ |
1619 | # - Booking Currency |
1620 | vals = { |
1621 | 'description': r_description or '', |
1622 | - 'ref': line[4] or '', |
1623 | + 'ref': line[4] or '', |
1624 | 'account_id': r_account or False, |
1625 | 'debit': r_debit or 0.0, |
1626 | 'credit': r_credit or 0.0, |
1627 | @@ -586,14 +609,14 @@ |
1628 | 'free_2_id': r_free2 or False, |
1629 | } |
1630 | if type_for_register == 'advance': |
1631 | - vals.update({'employee_id': r_partner,}) |
1632 | + vals.update({'employee_id': r_partner, }) |
1633 | elif type_for_register in ['transfer', 'transfer_same']: |
1634 | vals.update({'transfer_journal_id': r_partner}) |
1635 | else: |
1636 | if partner_type == 'partner': |
1637 | - vals.update({'partner_id': r_partner,}) |
1638 | + vals.update({'partner_id': r_partner, }) |
1639 | elif partner_type == 'employee': |
1640 | - vals.update({'employee_id': r_partner,}) |
1641 | + vals.update({'employee_id': r_partner, }) |
1642 | line_res = self.pool.get('wizard.register.import.lines').create(cr, uid, vals, context) |
1643 | if not line_res: |
1644 | errors.append(_('Line %s. A problem occurred for line registration. Please contact an Administrator.') % (current_line_num,)) |
1645 | @@ -612,7 +635,7 @@ |
1646 | # Delete old errors |
1647 | error_ids = self.pool.get('wizard.register.import.errors').search(cr, uid, [], context) |
1648 | if error_ids: |
1649 | - self.pool.get('wizard.register.import.errors').unlink(cr, uid, error_ids ,context) |
1650 | + self.pool.get('wizard.register.import.errors').unlink(cr, uid, error_ids, context) |
1651 | # create errors lines |
1652 | for e in errors: |
1653 | self.pool.get('wizard.register.import.errors').create(cr, uid, {'wizard_id': wiz.id, 'name': e}, context) |
1654 | |
1655 | === modified file 'bin/addons/sale/sale_workflow.py' |
1656 | --- bin/addons/sale/sale_workflow.py 2018-01-08 13:50:26 +0000 |
1657 | +++ bin/addons/sale/sale_workflow.py 2018-02-12 10:19:50 +0000 |
1658 | @@ -52,7 +52,7 @@ |
1659 | o_ana_dist_id = so.analytic_distribution_id and so.analytic_distribution_id.id |
1660 | distrib_id = l_ana_dist_id or o_ana_dist_id or False |
1661 | |
1662 | - #US-830 : Remove the definition of a default AD for the inter-mission FO is no AD is defined |
1663 | + # US-830 : Remove the definition of a default AD for the inter-mission FO is no AD is defined |
1664 | if not distrib_id and not so.order_type in ('loan', 'donation_st', 'donation_exp'): |
1665 | raise osv.except_osv( |
1666 | _('Warning'), |
1667 | @@ -102,7 +102,7 @@ |
1668 | ''' |
1669 | if context is None: |
1670 | context = {} |
1671 | - if isinstance(ids, (int,long)): |
1672 | + if isinstance(ids, (int, long)): |
1673 | ids = [ids] |
1674 | |
1675 | # for each line get a new copy: |
1676 | @@ -122,7 +122,7 @@ |
1677 | ''' |
1678 | if context is None: |
1679 | context = {} |
1680 | - if isinstance(ids, (int,long)): |
1681 | + if isinstance(ids, (int, long)): |
1682 | ids = [ids] |
1683 | |
1684 | for sol in self.browse(cr, uid, ids, context=context): |
1685 | @@ -139,14 +139,14 @@ |
1686 | ''' |
1687 | if context is None: |
1688 | context = {} |
1689 | - if isinstance(ids, (int,long)): |
1690 | + if isinstance(ids, (int, long)): |
1691 | ids = [ids] |
1692 | wf_service = netsvc.LocalService("workflow") |
1693 | |
1694 | new_sol_id = False |
1695 | for sol in self.browse(cr, uid, ids, context=context): |
1696 | new_sol_id = self.copy(cr, uid, sol.id, { |
1697 | - 'resourced_original_line': sol.id, |
1698 | + 'resourced_original_line': sol.id, |
1699 | 'resourced_original_remote_line': sol.sync_linked_pol, |
1700 | 'resourced_at_state': sol.state, |
1701 | 'is_line_split': False, |
1702 | @@ -164,7 +164,7 @@ |
1703 | ''' |
1704 | if context is None: |
1705 | context = {} |
1706 | - if isinstance(ids, (int,long)): |
1707 | + if isinstance(ids, (int, long)): |
1708 | ids = [ids] |
1709 | |
1710 | sol = self.browse(cr, uid, ids[0], context=context) |
1711 | @@ -189,7 +189,7 @@ |
1712 | ''' |
1713 | if context is None: |
1714 | context = {} |
1715 | - if isinstance(ids, (int,long)): |
1716 | + if isinstance(ids, (int, long)): |
1717 | ids = [ids] |
1718 | |
1719 | self.write(cr, uid, ids, {'state': 'done'}, context=context) |
1720 | @@ -197,7 +197,7 @@ |
1721 | # generate sync message manually : |
1722 | return_info = {} |
1723 | for sol_id in ids: |
1724 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1725 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1726 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1727 | |
1728 | return True |
1729 | @@ -217,7 +217,7 @@ |
1730 | # generate sync message manually : |
1731 | return_info = {} |
1732 | for sol_id in ids: |
1733 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1734 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1735 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1736 | |
1737 | return True |
1738 | @@ -237,7 +237,7 @@ |
1739 | # generate sync message |
1740 | return_info = {} |
1741 | for sol_id in ids: |
1742 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1743 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1744 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1745 | |
1746 | return True |
1747 | @@ -271,7 +271,7 @@ |
1748 | # generate sync message manually : |
1749 | return_info = {} |
1750 | for sol_id in ids: |
1751 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1752 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1753 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1754 | |
1755 | return True |
1756 | @@ -283,7 +283,7 @@ |
1757 | ''' |
1758 | if context is None: |
1759 | context = {} |
1760 | - if isinstance(ids, (int,long)): |
1761 | + if isinstance(ids, (int, long)): |
1762 | ids = [ids] |
1763 | |
1764 | sol = self.browse(cr, uid, ids[0], context=context) |
1765 | @@ -314,7 +314,7 @@ |
1766 | ''' |
1767 | if context is None: |
1768 | context = {} |
1769 | - if isinstance(ids, (int,long)): |
1770 | + if isinstance(ids, (int, long)): |
1771 | ids = [ids] |
1772 | |
1773 | pick_to_use = False |
1774 | @@ -363,18 +363,20 @@ |
1775 | ''' |
1776 | if context is None: |
1777 | context = {} |
1778 | - if isinstance(ids, (int,long)): |
1779 | + if isinstance(ids, (int, long)): |
1780 | ids = [ids] |
1781 | |
1782 | for sol in self.browse(cr, uid, ids, context=context): |
1783 | out_moves_to_cancel = self.pool.get('stock.move').search(cr, uid, [ |
1784 | - ('sale_line_id', '=', sol.id), |
1785 | + ('sale_line_id', '=', sol.id), |
1786 | ('type', '=', 'out'), |
1787 | ('state', 'in', ['assigned', 'confirmed']), |
1788 | ], context=context) |
1789 | |
1790 | if out_moves_to_cancel: |
1791 | + context.update({'not_resource_move': out_moves_to_cancel}) |
1792 | self.pool.get('stock.move').action_cancel(cr, uid, out_moves_to_cancel, context=context) |
1793 | + context.pop('not_resource_move') |
1794 | |
1795 | return True |
1796 | |
1797 | @@ -398,7 +400,7 @@ |
1798 | ('order_id.order_type', '=', 'direct'), |
1799 | ], context=context) |
1800 | |
1801 | - if sol.order_id.procurement_request and sol.product_id.type in ('consu', 'service', 'service_recep'): # IR non stockable |
1802 | + if sol.order_id.procurement_request and sol.product_id.type in ('consu', 'service', 'service_recep'): # IR non stockable |
1803 | continue |
1804 | |
1805 | if linked_dpo_line: |
1806 | @@ -439,7 +441,7 @@ |
1807 | # Change Currency ?? |
1808 | if sol.order_partner_id.partner_type in ('section', 'intermission'): |
1809 | picking = picking_obj.browse(cr, uid, pick_to_use, context=context) |
1810 | - move = self.pool.get('stock.move').browse(cr ,uid, move_id, context=context) |
1811 | + move = self.pool.get('stock.move').browse(cr, uid, move_id, context=context) |
1812 | invoice_id, inv_type = picking_obj.action_invoice_create_header(cr, uid, picking, journal_id=False, invoices_group=False, type=False, use_draft=True, context=context) |
1813 | if invoice_id: |
1814 | picking_obj.action_invoice_create_line(cr, uid, picking, move, invoice_id, group=False, inv_type=inv_type, partner=sol.order_id.partner_id, context=context) |
1815 | @@ -460,24 +462,35 @@ |
1816 | self.pool.get('stock.move').action_confirm(cr, uid, [move_id], context=context) |
1817 | |
1818 | # confirm the OUT if in draft state: |
1819 | - pick_state = self.pool.get('stock.picking').read(cr, uid, pick_to_use, ['state'] ,context=context)['state'] |
1820 | + pick_state = self.pool.get('stock.picking').read(cr, uid, pick_to_use, ['state'], context=context)['state'] |
1821 | if picking_data['type'] == 'out' and picking_data['subtype'] == 'standard' and pick_state == 'draft': |
1822 | self.pool.get('stock.picking').draft_force_assign(cr, uid, [pick_to_use], context=context) |
1823 | # run check availability on PICK/OUT: |
1824 | if picking_data['type'] == 'out' and picking_data['subtype'] in ['picking', 'standard']: |
1825 | self.pool.get('stock.picking').action_assign(cr, uid, [pick_to_use], context=context) |
1826 | if picking_data['type'] == 'internal' and sol.type == 'make_to_stock' and sol.order_id.procurement_request: |
1827 | - wf_service.trg_validate(uid, 'stock.picking', pick_to_use, 'button_confirm', cr) |
1828 | + wf_service.trg_validate(uid, 'stock.picking', pick_to_use, 'button_confirm', cr) |
1829 | |
1830 | self.write(cr, uid, ids, {'state': 'confirmed'}, context=context) |
1831 | |
1832 | # generate sync message: |
1833 | return_info = {} |
1834 | for sol_id in ids: |
1835 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1836 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1837 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1838 | return True |
1839 | |
1840 | + def check_fo_tax(self, cr, uid, ids, context=None): |
1841 | + """ |
1842 | + Prevents from validating a FO with taxes when using an Intermission partner |
1843 | + """ |
1844 | + if context is None: |
1845 | + context = {} |
1846 | + if isinstance(ids, (int, long)): |
1847 | + ids = [ids] |
1848 | + for fo_line in self.browse(cr, uid, ids, fields_to_fetch=['order_id', 'tax_id'], context=context): |
1849 | + if fo_line.tax_id and fo_line.order_id.partner_type == 'intermission': |
1850 | + raise osv.except_osv(_('Error'), _("You can't use taxes with an intermission partner.")) |
1851 | |
1852 | def action_validate(self, cr, uid, ids, context=None): |
1853 | ''' |
1854 | @@ -486,13 +499,15 @@ |
1855 | if context is None: |
1856 | context = {} |
1857 | if isinstance(ids, (int, long)): |
1858 | - ids = [ids] |
1859 | + ids = [ids] |
1860 | + |
1861 | + self.check_fo_tax(cr, uid, ids, context=context) |
1862 | |
1863 | for sol in self.browse(cr, uid, ids, context=context): |
1864 | to_write = {} |
1865 | if not sol.stock_take_date and sol.order_id.stock_take_date: |
1866 | to_write['stock_take_date'] = sol.order_id.stock_take_date |
1867 | - if not sol.order_id.procurement_request: # in case of FO |
1868 | + if not sol.order_id.procurement_request: # in case of FO |
1869 | # check unit price: |
1870 | if not sol.price_unit or sol.price_unit <= 0: |
1871 | raise osv.except_osv( |
1872 | @@ -522,13 +537,13 @@ |
1873 | # generate sync message: |
1874 | return_info = {} |
1875 | for sol in self.browse(cr, uid, ids, context=context): |
1876 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol.id, return_info, |
1877 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol.id, return_info, |
1878 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1879 | |
1880 | return True |
1881 | |
1882 | |
1883 | - def action_draft(self, cr ,uid, ids, context=None): |
1884 | + def action_draft(self, cr, uid, ids, context=None): |
1885 | ''' |
1886 | Workflow method called when trying to reset draft the sale.order.line |
1887 | ''' |
1888 | @@ -560,7 +575,7 @@ |
1889 | # generate sync message: |
1890 | return_info = {} |
1891 | for sol_id in ids: |
1892 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1893 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1894 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1895 | |
1896 | return True |
1897 | @@ -585,11 +600,11 @@ |
1898 | # generate sync message for original FO line: |
1899 | return_info = {} |
1900 | for sol_id in ids: |
1901 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1902 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', sol_id, return_info, |
1903 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1904 | |
1905 | # generate sync message for resourced line: |
1906 | - self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', resourced_sol, return_info, |
1907 | + self.pool.get('sync.client.message_rule')._manual_create_sync_message(cr, uid, 'sale.order.line', resourced_sol, return_info, |
1908 | 'purchase.order.line.sol_update_original_pol', self._logger, check_identifier=False, context=context) |
1909 | |
1910 | return True |
1911 | |
1912 | === modified file 'bin/addons/sync_so/purchase.py' |
1913 | --- bin/addons/sync_so/purchase.py 2017-12-13 14:46:52 +0000 |
1914 | +++ bin/addons/sync_so/purchase.py 2018-02-12 10:19:50 +0000 |
1915 | @@ -146,6 +146,7 @@ |
1916 | pol_updated = False |
1917 | if not pol_id: # then create new PO line |
1918 | kind = 'new line' |
1919 | + pol_values['line_number'] = sol_dict['line_number'] |
1920 | if sol_dict['is_line_split']: |
1921 | sync_linked_sol = int(sol_dict['original_line_id'].get('id').split('/')[-1]) if sol_dict['original_line_id'] else False |
1922 | if not sync_linked_sol: |
1923 | @@ -161,7 +162,6 @@ |
1924 | pol_values['origin'] = orig_pol_info.origin |
1925 | if sol_dict['in_name_goods_return'] and not sol_dict['is_line_split']: |
1926 | # in case of FO from missing/replacement claim |
1927 | - pol_values['line_number'] = sol_dict['line_number'] |
1928 | pol_values['origin'] = self.pool.get('purchase.order').browse(cr, uid, po_ids[0], context=context).origin |
1929 | pol_values['from_synchro_return_goods'] = True |
1930 | # case of PO line doesn't exists, so created in FO (COO) and pushed back in PO (PROJ) |
1931 | |
1932 | === modified file 'bin/addons/tender_flow/tender_flow.py' |
1933 | --- bin/addons/tender_flow/tender_flow.py 2017-11-13 08:37:20 +0000 |
1934 | +++ bin/addons/tender_flow/tender_flow.py 2018-02-12 10:19:50 +0000 |
1935 | @@ -40,7 +40,7 @@ |
1936 | def copy(self, cr, uid, id, default=None, context=None, done_list=[], local=False): |
1937 | if not default: |
1938 | default = {} |
1939 | - default['internal_state'] = 'draft' # UF-733: Reset the internal_state |
1940 | + default['internal_state'] = 'draft' # UF-733: Reset the internal_state |
1941 | if not 'sale_order_id' in default: |
1942 | default['sale_order_id'] = False |
1943 | return super(osv.osv, self).copy(cr, uid, id, default, context=context) |
1944 | @@ -91,8 +91,8 @@ |
1945 | res = {} |
1946 | for tender in self.browse(cr, uid, ids, context=context): |
1947 | retour = False |
1948 | - ids_proc = self.pool.get('procurement.order').search(cr,uid,[('tender_id','=',tender.id)]) |
1949 | - ids_sol = self.pool.get('sale.order.line').search(cr,uid,[('procurement_id','in',ids_proc),('order_id.procurement_request','=',False)]) |
1950 | + ids_proc = self.pool.get('procurement.order').search(cr, uid, [('tender_id', '=', tender.id)]) |
1951 | + ids_sol = self.pool.get('sale.order.line').search(cr, uid, [('procurement_id', 'in', ids_proc), ('order_id.procurement_request', '=', False)]) |
1952 | if ids_sol: |
1953 | retour = True |
1954 | res[tender.id] = retour |
1955 | @@ -100,22 +100,22 @@ |
1956 | |
1957 | _columns = {'name': fields.char('Tender Reference', size=64, required=True, select=True, readonly=True), |
1958 | 'sale_order_id': fields.many2one('sale.order', string="Sale Order", readonly=True), |
1959 | - 'state': fields.selection([('draft', 'Draft'),('comparison', 'Comparison'), ('done', 'Closed'), ('cancel', 'Cancelled'),], string="State", readonly=True), |
1960 | + 'state': fields.selection([('draft', 'Draft'), ('comparison', 'Comparison'), ('done', 'Closed'), ('cancel', 'Cancelled'), ], string="State", readonly=True), |
1961 | 'supplier_ids': fields.many2many('res.partner', 'tender_supplier_rel', 'tender_id', 'supplier_id', string="Suppliers", domain="[('id', '!=', company_id)]", |
1962 | - states={'draft':[('readonly',False)]}, readonly=True, |
1963 | - context={'search_default_supplier': 1,}), |
1964 | - 'location_id': fields.many2one('stock.location', 'Location', required=True, states={'draft':[('readonly',False)]}, readonly=True, domain=[('usage', '=', 'internal')]), |
1965 | - 'company_id': fields.many2one('res.company','Company',required=True, states={'draft':[('readonly',False)]}, readonly=True), |
1966 | + states={'draft': [('readonly', False)]}, readonly=True, |
1967 | + context={'search_default_supplier': 1, }), |
1968 | + 'location_id': fields.many2one('stock.location', 'Location', required=True, states={'draft': [('readonly', False)]}, readonly=True, domain=[('usage', '=', 'internal')]), |
1969 | + 'company_id': fields.many2one('res.company', 'Company', required=True, states={'draft': [('readonly', False)]}, readonly=True), |
1970 | 'rfq_ids': fields.one2many('purchase.order', 'tender_id', string="RfQs", readonly=True), |
1971 | - 'priority': fields.selection(ORDER_PRIORITY, string='Tender Priority', states={'draft':[('readonly',False)],}, readonly=True,), |
1972 | - 'categ': fields.selection(ORDER_CATEGORY, string='Tender Category', required=True, states={'draft':[('readonly',False)],}, readonly=True), |
1973 | + 'priority': fields.selection(ORDER_PRIORITY, string='Tender Priority', states={'draft': [('readonly', False)], }, readonly=True,), |
1974 | + 'categ': fields.selection(ORDER_CATEGORY, string='Tender Category', required=True, states={'draft': [('readonly', False)], }, readonly=True), |
1975 | 'creator': fields.many2one('res.users', string="Creator", readonly=True, required=True,), |
1976 | - 'warehouse_id': fields.many2one('stock.warehouse', string="Warehouse", required=True, states={'draft':[('readonly',False)],}, readonly=True), |
1977 | - 'creation_date': fields.date(string="Creation Date", readonly=True, states={'draft':[('readonly',False)]}), |
1978 | - 'details': fields.char(size=30, string="Details", states={'draft':[('readonly',False)],}, readonly=True), |
1979 | - 'requested_date': fields.date(string="Requested Date", required=True, states={'draft':[('readonly',False)],}, readonly=True), |
1980 | + 'warehouse_id': fields.many2one('stock.warehouse', string="Warehouse", required=True, states={'draft': [('readonly', False)], }, readonly=True), |
1981 | + 'creation_date': fields.date(string="Creation Date", readonly=True, states={'draft': [('readonly', False)]}), |
1982 | + 'details': fields.char(size=30, string="Details", states={'draft': [('readonly', False)], }, readonly=True), |
1983 | + 'requested_date': fields.date(string="Requested Date", required=True, states={'draft': [('readonly', False)], }, readonly=True), |
1984 | 'notes': fields.text('Notes'), |
1985 | - 'internal_state': fields.selection([('draft', 'Draft'),('updated', 'Rfq Updated'), ], string="Internal State", readonly=True), |
1986 | + 'internal_state': fields.selection([('draft', 'Draft'), ('updated', 'Rfq Updated'), ], string="Internal State", readonly=True), |
1987 | 'rfq_name_list': fields.function(_vals_get, method=True, string='RfQs Ref', type='char', readonly=True, store=False, multi='get_vals',), |
1988 | 'product_id': fields.related('tender_line_ids', 'product_id', type='many2one', relation='product.product', string='Product'), |
1989 | 'delivery_address': fields.many2one('res.partner.address', string='Delivery address', required=True), |
1990 | @@ -174,7 +174,7 @@ |
1991 | if not tender.tender_from_fo: |
1992 | return retour |
1993 | for sup in tender.supplier_ids: |
1994 | - if sup.partner_type == 'internal' : |
1995 | + if sup.partner_type == 'internal': |
1996 | retour = False |
1997 | return retour |
1998 | |
1999 | @@ -280,7 +280,7 @@ |
2000 | ''' |
2001 | on_change function for the warehouse |
2002 | ''' |
2003 | - result = {'value':{},} |
2004 | + result = {'value': {}, } |
2005 | if warehouse_id: |
2006 | input_loc_id = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context).lot_input_id.id |
2007 | result['value'].update(location_id=input_loc_id) |
2008 | @@ -303,7 +303,7 @@ |
2009 | # check some supplier have been selected |
2010 | if not tender.supplier_ids: |
2011 | raise osv.except_osv(_('Warning !'), _('You must select at least one supplier!')) |
2012 | - #utp-315: check that the suppliers are not inactive (I use a SQL request because the inactive partner are ignored with the browse) |
2013 | + # utp-315: check that the suppliers are not inactive (I use a SQL request because the inactive partner are ignored with the browse) |
2014 | sql = """ |
2015 | select tsr.supplier_id, rp.name, rp.active |
2016 | from tender_supplier_rel tsr |
2017 | @@ -325,7 +325,7 @@ |
2018 | # create a purchase order for each supplier |
2019 | address_id = partner_obj.address_get(cr, uid, [supplier.id], ['default'])['default'] |
2020 | if not address_id: |
2021 | - raise osv.except_osv(_('Warning !'), _('The supplier "%s" has no address defined!')%(supplier.name,)) |
2022 | + raise osv.except_osv(_('Warning !'), _('The supplier "%s" has no address defined!') % (supplier.name,)) |
2023 | pricelist_id = supplier.property_product_pricelist_purchase.id |
2024 | values = {'origin': tender.sale_order_id and tender.sale_order_id.name + ';' + tender.name or tender.name, |
2025 | 'rfq_ok': True, |
2026 | @@ -353,14 +353,14 @@ |
2027 | if line.qty <= 0.00: |
2028 | raise osv.except_osv(_('Error !'), _('You cannot generate RfQs for an line with a null quantity.')) |
2029 | |
2030 | - if line.product_id.id == obj_data.get_object_reference(cr, uid,'msf_doc_import', 'product_tbd')[1]: |
2031 | + if line.product_id.id == obj_data.get_object_reference(cr, uid, 'msf_doc_import', 'product_tbd')[1]: |
2032 | raise osv.except_osv(_('Warning !'), _('You can\'t have "To Be Defined" for the product. Please select an existing product.')) |
2033 | newdate = datetime.strptime(line.date_planned, '%Y-%m-%d') |
2034 | values = {'name': line.product_id.partner_ref, |
2035 | 'product_qty': line.qty, |
2036 | 'product_id': line.product_id.id, |
2037 | 'product_uom': line.product_uom.id, |
2038 | - 'price_unit': 0.0, # was price variable - uf-607 |
2039 | + 'price_unit': 0.0, # was price variable - uf-607 |
2040 | 'date_planned': newdate.strftime('%Y-%m-%d'), |
2041 | 'notes': line.product_id.description_purchase, |
2042 | 'order_id': po_id, |
2043 | @@ -369,7 +369,7 @@ |
2044 | } |
2045 | # create purchase order line |
2046 | pol_obj.create(cr, uid, values, context=context) |
2047 | - message = "Request for Quotation '%s' has been created."%po_obj.browse(cr, uid, po_id, context=context).name |
2048 | + message = "Request for Quotation '%s' has been created." % po_obj.browse(cr, uid, po_id, context=context).name |
2049 | # create the log message |
2050 | self.pool.get('res.log').create(cr, uid, |
2051 | {'name': message, |
2052 | @@ -385,7 +385,7 @@ |
2053 | tender.name, |
2054 | )) |
2055 | |
2056 | - self.write(cr, uid, ids, {'state':'comparison'}, context=context) |
2057 | + self.write(cr, uid, ids, {'state': 'comparison'}, context=context) |
2058 | return True |
2059 | |
2060 | |
2061 | @@ -395,7 +395,7 @@ |
2062 | ''' |
2063 | if context is None: |
2064 | context = {} |
2065 | - if isinstance(ids, (int,long)): |
2066 | + if isinstance(ids, (int, long)): |
2067 | ids = [ids] |
2068 | |
2069 | self.create_po(cr, uid, ids, context=context) |
2070 | @@ -420,12 +420,12 @@ |
2071 | # close linked RfQ: |
2072 | rfq_list = [] |
2073 | for rfq in tender.rfq_ids: |
2074 | - if rfq.rfq_state not in ('updated', 'cancel'): |
2075 | + if rfq.rfq_state not in ('updated', 'cancel'): |
2076 | rfq_list.append(rfq.id) |
2077 | else: |
2078 | self.pool.get('purchase.order').write(cr, uid, [rfq.id], {'rfq_state': 'done'}, context=context) |
2079 | |
2080 | - self.write(cr, uid, [tender.id], {'state':'done'}, context=context) |
2081 | + self.write(cr, uid, [tender.id], {'state': 'done'}, context=context) |
2082 | self.infolog(cr, uid, "The tender id:%s (%s) has been closed" % ( |
2083 | tender.id, |
2084 | tender.name, |
2085 | @@ -444,12 +444,12 @@ |
2086 | raise osv.except_osv(_('Error !'), _("Some RfQ are already Closed. Integrity failure.")) |
2087 | # all rfqs must have been treated |
2088 | rfq_ids = po_obj.search(cr, uid, [('tender_id', '=', tender.id), |
2089 | - ('rfq_state', 'in', ('draft', 'sent',)),], context=context) |
2090 | + ('rfq_state', 'in', ('draft', 'sent',)), ], context=context) |
2091 | if rfq_ids: |
2092 | raise osv.except_osv(_('Warning !'), _("Generated RfQs must be Updated or Cancelled.")) |
2093 | # at least one rfq must be updated and not canceled |
2094 | rfq_ids = po_obj.search(cr, uid, [('tender_id', '=', tender.id), |
2095 | - ('rfq_state', 'in', ('updated',)),], context=context) |
2096 | + ('rfq_state', 'in', ('updated',)), ], context=context) |
2097 | if not rfq_ids: |
2098 | raise osv.except_osv(_('Warning !'), _("At least one RfQ must be in state Updated.")) |
2099 | |
2100 | @@ -469,7 +469,7 @@ |
2101 | suppliers = {} |
2102 | for line in tender.tender_line_ids: |
2103 | if line.product_id and line.supplier_id and line.line_state != 'cancel': |
2104 | - suppliers.update({line.product_id.id:line.supplier_id.id,}) |
2105 | + suppliers.update({line.product_id.id: line.supplier_id.id, }) |
2106 | # rfq corresponding to this tender with done state (has been updated and not canceled) |
2107 | # the list of rfq which will be compared |
2108 | c = dict(context, active_ids=rfq_ids, tender_id=tender.id, end_wizard=False, suppliers=suppliers,) |
2109 | @@ -503,7 +503,7 @@ |
2110 | # find the corresponding suppinfo with sequence -99 |
2111 | info_99_list = info_obj.search(cr, uid, [('product_id', '=', product.product_tmpl_id.id), |
2112 | ('name', '=', line.purchase_order_line_id.order_id.partner_id.id), |
2113 | - ('sequence', '=', -99),], context=context) |
2114 | + ('sequence', '=', -99), ], context=context) |
2115 | |
2116 | if info_99_list: |
2117 | # we drop it |
2118 | @@ -513,12 +513,12 @@ |
2119 | values = {'name': line.supplier_id.id, |
2120 | 'product_name': False, |
2121 | 'product_code': False, |
2122 | - 'sequence' : -99, |
2123 | + 'sequence': -99, |
2124 | #'product_uom': line.product_uom.id, |
2125 | #'min_qty': 0.0, |
2126 | #'qty': function |
2127 | - 'product_id' : product.product_tmpl_id.id, |
2128 | - 'delay' : int(line.supplier_id.default_delay), |
2129 | + 'product_id': product.product_tmpl_id.id, |
2130 | + 'delay': int(line.supplier_id.default_delay), |
2131 | #'pricelist_ids': created just after |
2132 | #'company_id': default value |
2133 | } |
2134 | @@ -574,10 +574,13 @@ |
2135 | self.pool.get('purchase.order').log(cr, uid, po_to_use, 'The Purchase Order %s for supplier %s has been created.' % (po.name, po.partner_id.name)) |
2136 | self.pool.get('purchase.order').infolog(cr, uid, 'The Purchase order %s for supplier %s has been created.' % (po.name, po.partner_id.name)) |
2137 | |
2138 | + anal_dist_to_copy = tender_line.sale_order_line_id and tender_line.sale_order_line_id.analytic_distribution_id.id or False |
2139 | + |
2140 | # attach new PO line: |
2141 | pol_values = { |
2142 | 'order_id': po_to_use, |
2143 | 'linked_sol_id': tender_line.sale_order_line_id.id or False, |
2144 | + 'origin': tender_line.sale_order_line_id and tender_line.sale_order_line_id.order_id.name or False, |
2145 | 'name': tender_line.product_id.partner_ref, |
2146 | 'product_qty': tender_line.qty, |
2147 | 'product_id': tender_line.product_id.id, |
2148 | @@ -589,6 +592,8 @@ |
2149 | 'notes': tender_line.product_id.description_purchase, |
2150 | 'comment': tender_line.comment, |
2151 | } |
2152 | + if anal_dist_to_copy: |
2153 | + pol_values['analytic_distribution_id'] = self.pool.get('analytic.distribution').copy(cr, uid, anal_dist_to_copy, {}, context=context) |
2154 | self.pool.get('purchase.order.line').create(cr, uid, pol_values, context=context) |
2155 | |
2156 | # when the po is generated, the tender is done - no more modification or comparison |
2157 | @@ -769,9 +774,9 @@ |
2158 | ''' |
2159 | _name = 'tender.line' |
2160 | _rec_name = 'product_id' |
2161 | - _description= 'Tender Line' |
2162 | + _description = 'Tender Line' |
2163 | |
2164 | - _SELECTION_TENDER_STATE = [('draft', 'Draft'),('comparison', 'Comparison'), ('done', 'Closed'),] |
2165 | + _SELECTION_TENDER_STATE = [('draft', 'Draft'), ('comparison', 'Comparison'), ('done', 'Closed'), ] |
2166 | |
2167 | def on_product_change(self, cr, uid, id, product_id, uom_id, product_qty, categ, context=None): |
2168 | ''' |
2169 | @@ -793,7 +798,7 @@ |
2170 | result['value']['text_error'] = False |
2171 | result['value']['to_correct_ok'] = False |
2172 | |
2173 | - res_qty = self.onchange_uom_qty(cr, uid, id, uom_id or result.get('value', {}).get('product_uom',False), product_qty) |
2174 | + res_qty = self.onchange_uom_qty(cr, uid, id, uom_id or result.get('value', {}).get('product_uom', False), product_qty) |
2175 | result['value']['qty'] = res_qty.get('value', {}).get('qty', product_qty) |
2176 | |
2177 | if uom_id: |
2178 | @@ -841,9 +846,9 @@ |
2179 | else: |
2180 | result[line.id]['currency_id'] = result[line.id]['func_currency_id'] |
2181 | |
2182 | - result[line.id]['func_total_price'] = self.pool.get('res.currency').compute(cr, uid, result[line.id]['currency_id'], |
2183 | - result[line.id]['func_currency_id'], |
2184 | - result[line.id]['total_price'], |
2185 | + result[line.id]['func_total_price'] = self.pool.get('res.currency').compute(cr, uid, result[line.id]['currency_id'], |
2186 | + result[line.id]['func_currency_id'], |
2187 | + result[line.id]['total_price'], |
2188 | round=True, context=context) |
2189 | |
2190 | return result |
2191 | @@ -865,7 +870,7 @@ |
2192 | 'date_planned': fields.related('tender_id', 'requested_date', type='date', string='Requested Date', store=False,), |
2193 | # functions |
2194 | 'supplier_id': fields.related('purchase_order_line_id', 'order_id', 'partner_id', type='many2one', relation='res.partner', string="Supplier", readonly=True), |
2195 | - 'price_unit': fields.related('purchase_order_line_id', 'price_unit', type="float", string="Price unit", digits_compute=dp.get_precision('Purchase Price Computation'), readonly=True), # same precision as related field! |
2196 | + 'price_unit': fields.related('purchase_order_line_id', 'price_unit', type="float", string="Price unit", digits_compute=dp.get_precision('Purchase Price Computation'), readonly=True), # same precision as related field! |
2197 | 'delivery_confirmed_date': fields.related('purchase_order_line_id', 'confirmed_delivery_date', type="date", string="Delivery Confirmed Date", readonly=True), |
2198 | 'total_price': fields.function(_get_total_price, method=True, type='float', string="Total Price", digits_compute=dp.get_precision('Purchase Price'), multi='total'), |
2199 | 'currency_id': fields.function(_get_total_price, method=True, type='many2one', relation='res.currency', string='Cur.', multi='total'), |
2200 | @@ -874,7 +879,7 @@ |
2201 | 'purchase_order_id': fields.related('purchase_order_line_id', 'order_id', type='many2one', relation='purchase.order', string="Related RfQ", readonly=True,), |
2202 | 'purchase_order_line_number': fields.related('purchase_order_line_id', 'line_number', type="char", string="Related Line Number", readonly=True,), |
2203 | 'state': fields.related('tender_id', 'state', type="selection", selection=_SELECTION_TENDER_STATE, string="State",), |
2204 | - 'line_state': fields.selection([('draft','Draft'), ('cancel', 'Canceled'), ('done', 'Done')], string='State', readonly=True), |
2205 | + 'line_state': fields.selection([('draft', 'Draft'), ('cancel', 'Canceled'), ('done', 'Done')], string='State', readonly=True), |
2206 | 'comment': fields.char(size=128, string='Comment'), |
2207 | 'has_to_be_resourced': fields.boolean(string='Has to be resourced'), |
2208 | 'created_by_rfq': fields.boolean(string='Created by RfQ'), |
2209 | @@ -940,7 +945,7 @@ |
2210 | |
2211 | return super(tender_line, self).copy(cr, uid, id, default, context=context) |
2212 | |
2213 | - def cancel_sourcing(self,cr, uid, ids, context=None): |
2214 | + def cancel_sourcing(self, cr, uid, ids, context=None): |
2215 | ''' |
2216 | Cancel the line and re-source the FO line |
2217 | ''' |
2218 | @@ -997,11 +1002,11 @@ |
2219 | del context['update_or_cancel_line_not_delete'] |
2220 | |
2221 | # Update the FO state |
2222 | - #for so in so_to_update: |
2223 | + # for so in so_to_update: |
2224 | # wf_service.trg_write(uid, 'sale.order', so, cr) |
2225 | |
2226 | # UF-733: if all tender lines have been compared (have PO Line id), then set the tender to be ready |
2227 | - # for proceeding to other actions (create PO, Done etc) |
2228 | + # for proceeding to other actions (create PO, Done etc) |
2229 | for tender in tender_obj.browse(cr, uid, list(tender_to_update), context=context): |
2230 | if tender.internal_state == 'draft': |
2231 | flag = True |
2232 | @@ -1149,7 +1154,7 @@ |
2233 | ''' |
2234 | if context is None: |
2235 | context = {} |
2236 | - if isinstance(ids, (int,long)): |
2237 | + if isinstance(ids, (int, long)): |
2238 | ids = [ids] |
2239 | |
2240 | for tender_line in self.browse(cr, uid, ids, context=context): |
2241 | @@ -1164,7 +1169,7 @@ |
2242 | 'origin': (tender.sale_order_id and tender.sale_order_id.name or "") + '; ' + tender.name, |
2243 | 'partner_id': tender_line.supplier_id.id, |
2244 | 'partner_address_id': self.pool.get('res.partner').address_get(cr, uid, [tender_line.supplier_id.id], ['default'])['default'], |
2245 | - 'customer_id': tender_line.sale_order_line_id and tender_line.sale_order_line_id.order_id.partner_id.id or False, |
2246 | + 'customer_id': tender_line.sale_order_line_id and tender_line.sale_order_line_id.order_id.partner_id.id or False, |
2247 | 'location_id': tender.location_id.id, |
2248 | 'company_id': tender.company_id.id, |
2249 | 'cross_docking_ok': True if tender_line.sale_order_line_id else False, |
2250 | @@ -1189,7 +1194,7 @@ |
2251 | tender class |
2252 | ''' |
2253 | _inherit = 'tender' |
2254 | - _columns = {'tender_line_ids': fields.one2many('tender.line', 'tender_id', string="Tender lines", states={'draft':[('readonly',False)]}, readonly=True), |
2255 | + _columns = {'tender_line_ids': fields.one2many('tender.line', 'tender_id', string="Tender lines", states={'draft': [('readonly', False)]}, readonly=True), |
2256 | } |
2257 | |
2258 | def copy(self, cr, uid, id, default=None, context=None): |
2259 | @@ -1252,16 +1257,16 @@ |
2260 | 'rfq_id': fields.many2one('purchase.order', string='RfQ', readonly=True), |
2261 | 'rfq_line_id': fields.many2one('purchase.order.line', string='RfQ line', readonly=True), |
2262 | 'is_rfq_done': fields.boolean(string="RfQ Closed"), |
2263 | - 'state': fields.selection([('draft','Draft'), |
2264 | - ('confirmed','Confirmed'), |
2265 | - ('exception','Exception'), |
2266 | - ('running','Converted'), |
2267 | - ('cancel','Cancelled'), |
2268 | - ('ready','Ready'), |
2269 | - ('done','Closed'), |
2270 | + 'state': fields.selection([('draft', 'Draft'), |
2271 | + ('confirmed', 'Confirmed'), |
2272 | + ('exception', 'Exception'), |
2273 | + ('running', 'Converted'), |
2274 | + ('cancel', 'Cancelled'), |
2275 | + ('ready', 'Ready'), |
2276 | + ('done', 'Closed'), |
2277 | ('tender', 'Tender'), |
2278 | ('rfq', 'Request for Quotation'), |
2279 | - ('waiting','Waiting'),], 'State', required=True, |
2280 | + ('waiting', 'Waiting'), ], 'State', required=True, |
2281 | help='When a procurement is created the state is set to \'Draft\'.\n If the procurement is confirmed, the state is set to \'Confirmed\'.\ |
2282 | \nAfter confirming the state is set to \'Running\'.\n If any exception arises in the order then the state is set to \'Exception\'.\n Once the exception is removed the state becomes \'Ready\'.\n It is in \'Waiting\'. state when the procurement is waiting for another one to finish.'), |
2283 | 'price_unit': fields.float('Unit Price from Tender', digits_compute=dp.get_precision('Purchase Price Computation')), |
2284 | @@ -1279,7 +1284,7 @@ |
2285 | if proc.supplier and not proc.supplier.address: |
2286 | self.write(cr, uid, [proc.id], { |
2287 | 'state': 'exception', |
2288 | - 'message': _('The supplier "%s" has no address defined!')%(proc.supplier.name,), |
2289 | + 'message': _('The supplier "%s" has no address defined!') % (proc.supplier.name,), |
2290 | }, context=context) |
2291 | |
2292 | return True |
2293 | @@ -1308,8 +1313,8 @@ |
2294 | break |
2295 | # find the rfq |
2296 | rfq_id = False |
2297 | - # UTP-934: If source rfq to different supplier, different rfq must be created, and cannot be using the same rfq |
2298 | - rfq_ids = rfq_obj.search(cr, uid, [('sale_order_id', '=', sale_order.id),('partner_id', '=', proc.supplier.id), ('state', '=', 'draft'), ('rfq_ok', '=', True),], context=context) |
2299 | + # UTP-934: If source rfq to different supplier, different rfq must be created, and cannot be using the same rfq |
2300 | + rfq_ids = rfq_obj.search(cr, uid, [('sale_order_id', '=', sale_order.id), ('partner_id', '=', proc.supplier.id), ('state', '=', 'draft'), ('rfq_ok', '=', True), ], context=context) |
2301 | if rfq_ids: |
2302 | rfq_id = rfq_ids[0] |
2303 | # create if not found |
2304 | @@ -1320,7 +1325,7 @@ |
2305 | address_id = partner_obj.address_get(cr, uid, [supplier.id], ['default'])['default'] |
2306 | if not address_id: |
2307 | self.write(cr, uid, [proc.id], { |
2308 | - 'message': _('The supplier "%s" has no address defined!')%(supplier.name,), |
2309 | + 'message': _('The supplier "%s" has no address defined!') % (supplier.name,), |
2310 | }, context=context) |
2311 | continue |
2312 | |
2313 | @@ -1338,7 +1343,7 @@ |
2314 | 'rfq_ok': True, |
2315 | 'from_procurement': True, |
2316 | 'order_type': sale_order.order_type, |
2317 | - 'origin': sale_order.name,}, context=context) |
2318 | + 'origin': sale_order.name, }, context=context) |
2319 | |
2320 | prsd_obj.chk_create(cr, uid, { |
2321 | 'order_id': sale_order.id, |
2322 | @@ -1366,7 +1371,7 @@ |
2323 | self.write(cr, uid, ids, {'rfq_id': rfq_id, 'rfq_line_id': rfq_line_id}, context=context) |
2324 | |
2325 | # log message concerning RfQ creation |
2326 | - rfq_obj.log(cr, uid, rfq_id, "The Request for Quotation '%s' has been created and must be completed before purchase order creation."%rfq_obj.browse(cr, uid, rfq_id, context=context).name, context={'rfq_ok': 1}) |
2327 | + rfq_obj.log(cr, uid, rfq_id, "The Request for Quotation '%s' has been created and must be completed before purchase order creation." % rfq_obj.browse(cr, uid, rfq_id, context=context).name, context={'rfq_ok': 1}) |
2328 | rfq_line = rfq_line_obj.browse(cr, uid, rfq_line_id, context=context) |
2329 | self.infolog(cr, uid, "The FO/IR line id:%s (line number: %s) has been sourced on order to RfQ line id:%s (line number: %s) of the RfQ id:%s (%s)" % ( |
2330 | sale_order_line.id, |
2331 | @@ -1399,7 +1404,7 @@ |
2332 | sale_order_line = sol |
2333 | # find the tender |
2334 | tender_id = False |
2335 | - tender_ids = tender_obj.search(cr, uid, [('sale_order_id', '=', sale_order.id),('state', '=', 'draft'),], context=context) |
2336 | + tender_ids = tender_obj.search(cr, uid, [('sale_order_id', '=', sale_order.id), ('state', '=', 'draft'), ], context=context) |
2337 | if tender_ids: |
2338 | tender_id = tender_ids[0] |
2339 | # create if not found |
2340 | @@ -1433,7 +1438,7 @@ |
2341 | |
2342 | # log message concerning tender creation |
2343 | tender_line = tender_line_obj.browse(cr, uid, tender_line_id, context=context) |
2344 | - tender_obj.log(cr, uid, tender_id, "The tender '%s' has been created and must be completed before purchase order creation."%tender_line.tender_id.name) |
2345 | + tender_obj.log(cr, uid, tender_id, "The tender '%s' has been created and must be completed before purchase order creation." % tender_line.tender_id.name) |
2346 | self.infolog(cr, uid, "The FO/IR line id:%s (%s) has been sourced on order to tender line id:%s of the tender id:%s (%s)" % ( |
2347 | sale_order_line.id, |
2348 | sale_order_line.line_number, |
2349 | @@ -1450,14 +1455,14 @@ |
2350 | ''' |
2351 | set is_tender_done value |
2352 | ''' |
2353 | - self.write(cr, uid, ids, {'is_tender_done': True, 'state': 'exception',}, context=context) |
2354 | + self.write(cr, uid, ids, {'is_tender_done': True, 'state': 'exception', }, context=context) |
2355 | return True |
2356 | |
2357 | def wkf_action_rfq_done(self, cr, uid, ids, context=None): |
2358 | ''' |
2359 | set is_rfq_done value |
2360 | ''' |
2361 | - self.write(cr, uid, ids, {'is_rfq_done': True, 'state': 'exception',}, context=context) |
2362 | + self.write(cr, uid, ids, {'is_rfq_done': True, 'state': 'exception', }, context=context) |
2363 | return True |
2364 | |
2365 | def _get_pricelist_from_currency(self, cr, uid, currency_id, context=None): |
2366 | @@ -1488,7 +1493,7 @@ |
2367 | # do not display a log if we come from po update backward update of so |
2368 | data = self.read(cr, uid, ids, ['so_back_update_dest_po_id_procurement_order'], context=context) |
2369 | if not data[0]['so_back_update_dest_po_id_procurement_order']: |
2370 | - po_obj.log(cr, uid, result, "The Purchase Order '%s' has been created following 'on order' sourcing."%po_obj.browse(cr, uid, result, context=context).name) |
2371 | + po_obj.log(cr, uid, result, "The Purchase Order '%s' has been created following 'on order' sourcing." % po_obj.browse(cr, uid, result, context=context).name) |
2372 | return result |
2373 | |
2374 | def po_values_hook(self, cr, uid, ids, context=None, *args, **kwargs): |
2375 | @@ -1570,7 +1575,7 @@ |
2376 | _constraints = [ |
2377 | (_check_valid_till, |
2378 | 'You must specify a Valid Till date.', |
2379 | - ['valid_till']),] |
2380 | + ['valid_till']), ] |
2381 | |
2382 | def default_get(self, cr, uid, fields, context=None): |
2383 | ''' |
2384 | @@ -1612,7 +1617,7 @@ |
2385 | ''' |
2386 | if context is None: |
2387 | context = {} |
2388 | - if isinstance(ids, (int,long)): |
2389 | + if isinstance(ids, (int, long)): |
2390 | ids = [ids] |
2391 | wf_service = netsvc.LocalService("workflow") |
2392 | |
2393 | @@ -1640,10 +1645,10 @@ |
2394 | purchase_orders = self.read(cr, uid, ids, ['state'], context=context) |
2395 | unlink_ids = [] |
2396 | for s in purchase_orders: |
2397 | - if s['state'] in ['draft','cancel']: |
2398 | + if s['state'] in ['draft', 'cancel']: |
2399 | unlink_ids.append(s['id']) |
2400 | else: |
2401 | - raise osv.except_osv(_('Invalid action !'), _('Cannot delete Purchase Order(s) which are in %s State!') % _(dict(PURCHASE_ORDER_STATE_SELECTION).get(s['state']))) |
2402 | + raise osv.except_osv(_('Invalid action !'), _('Cannot delete Purchase Order(s) which are in %s State!') % _(dict(PURCHASE_ORDER_STATE_SELECTION).get(s['state']))) |
2403 | |
2404 | return super(purchase_order, self).unlink(cr, uid, ids, context=context) |
2405 | |
2406 | @@ -1675,18 +1680,18 @@ |
2407 | This hook belongs to the rfq_sent method from tender_flow>tender_flow.py |
2408 | - check lines after import |
2409 | ''' |
2410 | - pol_obj = self.pool.get('purchase.order.line') |
2411 | + pol_obj = self.pool.get('purchase.order.line') |
2412 | |
2413 | - res = True |
2414 | - empty_lines = pol_obj.search(cr, uid, [ |
2415 | - ('order_id', 'in', ids), |
2416 | - ('product_qty', '<=', 0.00), |
2417 | - ], context=context) |
2418 | - if empty_lines: |
2419 | - raise osv.except_osv( |
2420 | - _('Error'), |
2421 | + res = True |
2422 | + empty_lines = pol_obj.search(cr, uid, [ |
2423 | + ('order_id', 'in', ids), |
2424 | + ('product_qty', '<=', 0.00), |
2425 | + ], context=context) |
2426 | + if empty_lines: |
2427 | + raise osv.except_osv( |
2428 | + _('Error'), |
2429 | _('All lines of the RfQ should have a quantity before sending the RfQ to the supplier'), |
2430 | - ) |
2431 | + ) |
2432 | return res |
2433 | |
2434 | def action_sent(self, cr, uid, ids, context=None): |
2435 | @@ -1695,7 +1700,7 @@ |
2436 | ''' |
2437 | if context is None: |
2438 | context = {} |
2439 | - if isinstance(ids, (int,long)): |
2440 | + if isinstance(ids, (int, long)): |
2441 | ids = [ids] |
2442 | |
2443 | self.write(cr, uid, ids, { |
2444 | @@ -2123,7 +2128,7 @@ |
2445 | |
2446 | po_accepted_values = {'client_action_multi': ['Order Follow Up', |
2447 | 'action_view_purchase_order_group'], |
2448 | - 'client_print_multi': ['Purchase Order (Merged)', |
2449 | + 'client_print_multi': ['Purchase Order (Merged)', |
2450 | 'Purchase Order', |
2451 | 'Allocation report', |
2452 | 'Order impact vs. Budget'], |
2453 | @@ -2146,14 +2151,14 @@ |
2454 | or v[1] == 'Purchase Order' \ |
2455 | or v[1] == 'Purchase Order (Merged)' \ |
2456 | or v[1] == 'Allocation report' \ |
2457 | - or v[1] == 'Order impact vs. Budget' : |
2458 | + or v[1] == 'Order impact vs. Budget': |
2459 | new_values.append(v) |
2460 | elif context.get('request_for_quotation', False) and 'purchase.order' in [x[0] for x in models]: |
2461 | new_values = [] |
2462 | for v in values: |
2463 | if key == 'action' and v[1] in rfq_accepted_values[key2] \ |
2464 | or v[1] == 'Request for Quotation' \ |
2465 | - or v[1] == 'Request For Quotation Excel Export' : |
2466 | + or v[1] == 'Request For Quotation Excel Export': |
2467 | new_values.append(v) |
2468 | |
2469 | return new_values |
2470 | |
2471 | === modified file 'bin/release.py' |
2472 | --- bin/release.py 2018-01-30 16:12:16 +0000 |
2473 | +++ bin/release.py 2018-02-12 10:19:50 +0000 |
2474 | @@ -20,7 +20,7 @@ |
2475 | ############################################################################## |
2476 | |
2477 | name = 'openerp-server' |
2478 | -version = 'UF7.1' |
2479 | +version = 'UF8.0dev' |
2480 | major_version = '7.0' |
2481 | description = 'OpenERP Server' |
2482 | long_desc = '''OpenERP is a complete ERP and CRM. The main features are accounting (analytic |
2483 | |
2484 | === modified file 'bin/sql_db.py' |
2485 | --- bin/sql_db.py 2017-10-19 14:14:14 +0000 |
2486 | +++ bin/sql_db.py 2018-02-12 10:19:50 +0000 |
2487 | @@ -390,6 +390,9 @@ |
2488 | try: |
2489 | result = psycopg2.connect(dsn=dsn, connection_factory=PsycoConnection) |
2490 | except psycopg2.Error: |
2491 | + log = logging.getLogger() |
2492 | + if len(log.handlers) > 1: |
2493 | + log.removeHandler(log.handlers[1]) |
2494 | self.__logger.exception('Connection to the database failed') |
2495 | raise |
2496 | self._connections.append((result, True, time.time())) |