Merge lp:~unifield-team/unifield-server/us-1960 into lp:unifield-server/uf3
- us-1960
- Merge into uf3
Proposed by
jftempo
Status: | Needs review |
---|---|
Proposed branch: | lp:~unifield-team/unifield-server/us-1960 |
Merge into: | lp:unifield-server/uf3 |
Diff against target: |
1291 lines (+341/-194) 21 files modified
bin/addons/account_override/account_move_line.py (+7/-1) bin/addons/account_override/finance_export.py (+20/-1) bin/addons/account_reconciliation/wizard/account_reconcile.py (+31/-7) bin/addons/base/ir/ir_attachment.py (+4/-4) bin/addons/msf_doc_import/report/po_simulation_screen_xls_report.mako (+2/-2) bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py (+14/-5) bin/addons/msf_profile/i18n/fr_MF.po (+81/-2) bin/addons/order_types/stock.py (+13/-15) bin/addons/product_attributes/product_attributes.py (+1/-1) bin/addons/purchase_override/report/merged_order.rml (+1/-1) bin/addons/purchase_override/report/purchase_order.rml (+1/-1) bin/addons/stock/stock_view.xml (+2/-1) bin/addons/supplier_catalogue/product.py (+116/-99) bin/addons/tender_flow/report/tender_rfq_comparison_xls.mako (+1/-1) bin/addons/tender_flow/report/tender_rfq_comparison_xls.py (+0/-39) bin/addons/vertical_integration/report/hq_report_ocb.py (+14/-6) bin/addons/vertical_integration/wizard/ocb_export.py (+21/-2) bin/osv/orm.py (+7/-3) bin/release.py (+1/-1) bin/service/web_services.py (+2/-2) bin/tools/translate.py (+2/-0) |
To merge this branch: | bzr merge lp:~unifield-team/unifield-server/us-1960 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Dev Team | Pending | ||
Review via email: mp+314829@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Unmerged revisions
- 4127. By Quentin THEURET @Amaris
-
US-1960 [FIX] Do not raise an error if the product is already inactive when a sync. update deactivate the product
- 4126. By jftempo
-
Version 4.0dev
- 4125. By jftempo
-
US-2047 [FIX] Attachment config: French translation missing
- 4124. By jftempo
-
US-1822 [FIX] Unreconciliation: allow to unreconcile manually reconciled system JIs
- 4123. By jftempo
-
US-1900 [FIX] View translation when a node have multiplte attributes + export/translate help msg
- 4122. By jftempo
-
US-1571 [IMP] Delivery Orders: new "Draft" search button
- 4121. By jftempo
-
US-1694 [FIX] Comparison RFQ report: fix currency
- 4120. By jftempo
-
US-1804 [FIX] Typo + translation
- 4119. By jftempo
-
US-1743 [FIX] Export xls PO simulation: fix xls corruption when RTS is not set
- 4118. By jftempo
-
US-1773 [IMP] Reconciliation must include at least one debit and one credit
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/addons/account_override/account_move_line.py' |
2 | --- bin/addons/account_override/account_move_line.py 2016-11-24 15:02:51 +0000 |
3 | +++ bin/addons/account_override/account_move_line.py 2017-01-16 10:24:56 +0000 |
4 | @@ -484,7 +484,13 @@ |
5 | context = {} |
6 | if context.get('from_web_menu', False): |
7 | for ml in self.browse(cr, uid, ids): |
8 | - if ml.move_id and ml.move_id.status == 'sys': |
9 | + # US-1822 When unreconciling entries, to check if the write can be done use the reconciliation status |
10 | + # (manual / auto) instead of the JI one |
11 | + if 'unreconcile_date' in vals and vals['unreconcile_date']: |
12 | + reconciliation = ml.reconcile_id or ml.reconcile_partial_id or False |
13 | + if reconciliation and reconciliation.type == 'auto': |
14 | + raise osv.except_osv(_('Warning'), _('Only manually reconciled entries can be unreconciled.')) |
15 | + elif ml.move_id and ml.move_id.status == 'sys': |
16 | raise osv.except_osv(_('Warning'), _('You cannot change Journal Items that comes from the system!')) |
17 | # Check date validity with period |
18 | self._check_date_validity(cr, uid, ids, vals) |
19 | |
20 | === modified file 'bin/addons/account_override/finance_export.py' |
21 | --- bin/addons/account_override/finance_export.py 2016-01-20 10:18:19 +0000 |
22 | +++ bin/addons/account_override/finance_export.py 2017-01-16 10:24:56 +0000 |
23 | @@ -56,9 +56,13 @@ |
24 | + Do not repeat headers if you use the same filename for more than 1 request. This avoid having multiple lines as headers. |
25 | """ |
26 | |
27 | - def __init__(self, sql, process): |
28 | + def __init__(self, sql, process, context): |
29 | self.sqlrequests = sql |
30 | self.processrequests = process |
31 | + if 'background_id' in context: |
32 | + self.bg_id = context['background_id'] |
33 | + else: |
34 | + self.bg_id = None |
35 | |
36 | def line_to_utf8(self, line): |
37 | """ |
38 | @@ -183,6 +187,14 @@ |
39 | # - filename |
40 | # - key of sqlrequests dict to fetch its SQL request |
41 | files = {} |
42 | + |
43 | + if self.bg_id: |
44 | + bg_report_obj = pool.get('memory.background.report') |
45 | + else: |
46 | + bg_report_obj = None |
47 | + |
48 | + request_count = 0 |
49 | + |
50 | for fileparams in self.processrequests: |
51 | if not fileparams.get('filename', False): |
52 | raise osv.except_osv(_('Error'), _('Filename param is missing!')) |
53 | @@ -256,6 +268,13 @@ |
54 | if filename not in files: |
55 | files[filename] = tmp_file |
56 | |
57 | + if bg_report_obj: |
58 | + request_count += 1 |
59 | + percent = request_count / float(len(self.processrequests) + 1) # add 1 |
60 | + # to the total because task is not finish at the end of the for |
61 | + # loop, there is some ZIP work to do |
62 | + bg_report_obj.update_percent(cr, uid, [self.bg_id], percent) |
63 | + |
64 | # WRITE RESULT INTO AN ARCHIVE |
65 | # Create a ZIP file |
66 | out_zipfile = zipfile.ZipFile(zip_buffer, "w") |
67 | |
68 | === modified file 'bin/addons/account_reconciliation/wizard/account_reconcile.py' |
69 | --- bin/addons/account_reconciliation/wizard/account_reconcile.py 2016-10-04 10:11:04 +0000 |
70 | +++ bin/addons/account_reconciliation/wizard/account_reconcile.py 2017-01-16 10:24:56 +0000 |
71 | @@ -24,7 +24,6 @@ |
72 | from osv import osv |
73 | from osv import fields |
74 | from tools.translate import _ |
75 | -import time |
76 | from collections import defaultdict |
77 | |
78 | class account_move_line_reconcile(osv.osv_memory): |
79 | @@ -33,8 +32,8 @@ |
80 | |
81 | _columns = { |
82 | 'state': fields.selection([('total', 'Full Reconciliation'), ('partial', 'Partial Reconciliation'), |
83 | - ('total_change', 'Full Reconciliation with change'), ('partial_change', 'Partial Reconciliation with change')], string="State", |
84 | - required=True, readonly=True), |
85 | + ('total_change', 'Full Reconciliation with change'), ('partial_change', 'Partial Reconciliation with change')], string="State", |
86 | + required=True, readonly=True), |
87 | 'different_currencies': fields.boolean('Is this reconciliation in different currencies? (2 at most)'), |
88 | } |
89 | |
90 | @@ -145,7 +144,13 @@ |
91 | state = 'total_change' |
92 | currency_id = False |
93 | currency2_id = False |
94 | + rec_partial_set = set() |
95 | + rec_partial_leg_nb = 0 |
96 | for line in account_move_line_obj.browse(cr, uid, context['active_ids'], context=context): |
97 | + # for partially reconciled lines: store the different partial reconciliation ids and the total nb of legs |
98 | + if line.reconcile_partial_id: |
99 | + rec_partial_leg_nb += 1 |
100 | + rec_partial_set.add(line.reconcile_partial_id.id) |
101 | # prepare some values |
102 | account_id = line.account_id.id |
103 | # some verifications |
104 | @@ -166,9 +171,9 @@ |
105 | # UTP-1040: 3RD party is also desactivated in case of account that is "Disregard Third Party" as "type_for_register" |
106 | if not transfer and not disregard_third_party: |
107 | third_party = { |
108 | - 'partner_id': line.partner_id and line.partner_id.id or False, |
109 | - 'employee_id': line.employee_id and line.employee_id.id or False, |
110 | - 'transfer_journal_id': line.transfer_journal_id and line.transfer_journal_id.id or False} |
111 | + 'partner_id': line.partner_id and line.partner_id.id or False, |
112 | + 'employee_id': line.employee_id and line.employee_id.id or False, |
113 | + 'transfer_journal_id': line.transfer_journal_id and line.transfer_journal_id.id or False} |
114 | if not prev_third_party: |
115 | prev_third_party = third_party |
116 | if prev_third_party != third_party: |
117 | @@ -186,8 +191,27 @@ |
118 | debit += line.debit_currency |
119 | fcredit += line.credit |
120 | fdebit += line.debit |
121 | + |
122 | + diff_in_booking = abs(debit - credit) |
123 | + # (US-1847) If we reconcile together entries from at least 2 different partial reconciliations: |
124 | + # - the reconciliation must be total |
125 | + # - all the legs of the partial reconciliations must be included |
126 | + if len(rec_partial_set) > 1: |
127 | + if diff_in_booking > 10**-3: |
128 | + raise osv.except_osv(_('Error'), _('Only full reconciliation is allowed when entries from two (or more)' |
129 | + ' different partial reconciliations are included.')) |
130 | + elif rec_partial_leg_nb != account_move_line_obj.search(cr, uid, |
131 | + [('reconcile_partial_id', 'in', list(rec_partial_set))], |
132 | + count=True, order='NO_ORDER', context=context): |
133 | + raise osv.except_osv(_('Error'), |
134 | + _('When entries from different partial reconciliations are reconciled together, ' |
135 | + 'all the legs of these partial reconciliations must be included.')) |
136 | + |
137 | + if debit <= 10**-3 or credit <= 10**-3: |
138 | + raise osv.except_osv(_('Error'), _('Both Debit and Credit lines are required for reconciliation.')) |
139 | + |
140 | # Adapt state value |
141 | - if abs(debit - credit) <= 10**-3: |
142 | + if diff_in_booking <= 10**-3: |
143 | state = 'total' |
144 | if transfer_with_change: |
145 | debit = fdebit |
146 | |
147 | === modified file 'bin/addons/base/ir/ir_attachment.py' |
148 | --- bin/addons/base/ir/ir_attachment.py 2016-11-24 14:41:24 +0000 |
149 | +++ bin/addons/base/ir/ir_attachment.py 2017-01-16 10:24:56 +0000 |
150 | @@ -442,12 +442,12 @@ |
151 | |
152 | _columns = { |
153 | 'name': fields.char('Path to save the attachments to', size=256, |
154 | - help="The complet path to the local folder where Unifield will save attachment files.", |
155 | + help="The complete path to the local folder where Unifield will save attachment files.", |
156 | required=True), |
157 | 'next_migration' : fields.datetime('Next migration date', |
158 | - help="Next planned execution of thei migration to move the old attachment to the path you defined"), |
159 | + help="Next planned execution of the migration to move the old attachment to the path defined"), |
160 | 'migration_date': fields.datetime('Last migration execution date', readonly=True), |
161 | - 'migration_error': fields.text('Migration error', readonly=True), |
162 | + 'migration_error': fields.text('Migration errors', readonly=True), |
163 | 'is_migration_running': fields.function(_is_migration_running, |
164 | type='boolean', string='Moving files...', method=True, |
165 | readonly=True), |
166 | @@ -614,7 +614,7 @@ |
167 | 'numbercall': 1, |
168 | 'active': True, |
169 | } |
170 | - cron_obj.write(cr, uid, default_migrate_attachment.id, values, context=context) |
171 | + cron_obj.write(cr, uid, default_migrate_attachment.id, values, context=context) |
172 | return super(attachment_config, self).write(cr, uid, ids, vals, context=context) |
173 | |
174 | attachment_config() |
175 | |
176 | === modified file 'bin/addons/msf_doc_import/report/po_simulation_screen_xls_report.mako' |
177 | --- bin/addons/msf_doc_import/report/po_simulation_screen_xls_report.mako 2016-06-13 07:41:13 +0000 |
178 | +++ bin/addons/msf_doc_import/report/po_simulation_screen_xls_report.mako 2017-01-16 10:24:56 +0000 |
179 | @@ -139,7 +139,7 @@ |
180 | % else: |
181 | <Cell ss:StyleID="line" ></Cell> |
182 | % endif |
183 | - % if o.imp_ready_to_ship_date != o.in_ready_to_ship_date: |
184 | + % if o.imp_ready_to_ship_date not in ('False', False) and o.imp_ready_to_ship_date != o.in_ready_to_ship_date: |
185 | <Cell ss:StyleID="line_change_short_date" ss:MergeAcross="1" ><Data ss:Type="DateTime">${(o.imp_ready_to_ship_date)|n}T00:00:00.000</Data></Cell> |
186 | % elif o.imp_ready_to_ship_date not in ('False', False): |
187 | <Cell ss:StyleID="short_date" ss:MergeAcross="1" ><Data ss:Type="DateTime">${(o.imp_ready_to_ship_date)|n}T00:00:00.000</Data></Cell> |
188 | @@ -155,7 +155,7 @@ |
189 | % else: |
190 | <Cell ss:StyleID="line" ></Cell> |
191 | % endif |
192 | - % if o.imp_shipment_date != o.in_shipment_date: |
193 | + % if o.imp_shipment_date not in ('False', False) and o.imp_shipment_date != o.in_shipment_date: |
194 | <Cell ss:StyleID="line_change_short_date" ss:MergeAcross="1" ><Data ss:Type="DateTime">${(o.imp_shipment_date)|n}T00:00:00.000</Data></Cell> |
195 | % elif o.imp_shipment_date not in ('False', False): |
196 | <Cell ss:StyleID="short_date" ss:MergeAcross="1" ><Data ss:Type="DateTime">${(o.imp_shipment_date)|n}T00:00:00.000</Data></Cell> |
197 | |
198 | === modified file 'bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py' |
199 | --- bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2016-11-09 09:07:38 +0000 |
200 | +++ bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2017-01-16 10:24:56 +0000 |
201 | @@ -155,10 +155,6 @@ |
202 | 'return to IN form view and re-try.'), |
203 | ) |
204 | |
205 | - self.write(cr, uid, [proc.id], { |
206 | - 'already_processed': True, |
207 | - }, context=context) |
208 | - |
209 | for line in proc.move_ids: |
210 | # If one line as an error, return to wizard |
211 | if line.integrity_status != 'empty': |
212 | @@ -172,6 +168,10 @@ |
213 | 'context': context, |
214 | } |
215 | |
216 | + self.write(cr, uid, [proc.id], { |
217 | + 'already_processed': True, |
218 | + }, context=context) |
219 | + |
220 | for line in proc.move_ids: |
221 | # if no quantity, don't process the move |
222 | if not line.quantity: |
223 | @@ -318,7 +318,16 @@ |
224 | _('Processing Error'), |
225 | _('No data to process !'), |
226 | ) |
227 | - incoming_obj.write(cr, uid, ids, {'draft': True}, context=context) |
228 | + |
229 | + # make sure that the current incoming proc is not already processed : |
230 | + for r in incoming_obj.read(cr, uid, ids, ['already_processed']): |
231 | + if not r['already_processed']: |
232 | + incoming_obj.write(cr, uid, ids, {'draft': True}, context=context) |
233 | + else: |
234 | + raise osv.except_osv( |
235 | + _('Error'), _('The incoming shipment has already been processed, you cannot save it as draft.') |
236 | + ) |
237 | + |
238 | return {} |
239 | |
240 | def force_process(self, cr, uid, ids, context=None): |
241 | |
242 | === modified file 'bin/addons/msf_profile/i18n/fr_MF.po' |
243 | --- bin/addons/msf_profile/i18n/fr_MF.po 2016-12-07 06:47:50 +0000 |
244 | +++ bin/addons/msf_profile/i18n/fr_MF.po 2017-01-16 10:24:56 +0000 |
245 | @@ -75286,16 +75286,61 @@ |
246 | msgstr "Dossier d'enregistrement des pièces jointes" |
247 | |
248 | #. module: base |
249 | -#: help:attachment.config,name:0 |
250 | -msgid "The complet path to the local folder where Unifield will save attachment files." |
251 | +#: field:attachment.config,next_migration:444 |
252 | +msgid "Next migration date" |
253 | +msgstr "Date de la prochaine migration" |
254 | + |
255 | +#. module: base |
256 | +#: help:attachment.config,name:445 |
257 | +msgid "The complete path to the local folder where Unifield will save attachment files." |
258 | msgstr "Dossier dans lequel les pièces jointes seront stockées." |
259 | |
260 | #. module: base |
261 | +#: help:attachment.config,next_migration:448 |
262 | +msgid "Next planned execution of the migration to move the old attachment to the path defined" |
263 | +msgstr "Prochaine execution planifiée de migration pour déplacer les anciènes pièces jointes vers le nouveau chemin défini." |
264 | + |
265 | +#. module: base |
266 | +#: field:attachment.config,migration_date:449 |
267 | +msgid "Last migration execution date" |
268 | +msgstr "Date de la dernière migration" |
269 | + |
270 | +#. module: base |
271 | +#: field:attachment.config,migration_error:450 |
272 | +msgid "Migration errors" |
273 | +msgstr "Erreurs de migration" |
274 | + |
275 | +#. module: base |
276 | +#: field:attachment.config,is_migration_running:452 |
277 | +msgid "Moving files..." |
278 | +msgstr "Déplacement des fichiers..." |
279 | + |
280 | +#. module: base |
281 | +#: field:attachment.config,moving_rate:454 |
282 | +msgid "Moving process" |
283 | +msgstr "Avancement du déplacement" |
284 | + |
285 | +#. module: base |
286 | +#: constraint:attachment.config:469 |
287 | +msgid "You cannot have more than one Attachment configuration" |
288 | +msgstr "Vous ne pouvez pas avoir plus d'une configuration de pièces jointes" |
289 | + |
290 | +#. module: base |
291 | #: view:attachment.config:0 |
292 | #: model:ir.model,name:base.model_attachment_config |
293 | msgid "Attachment configuration" |
294 | msgstr "Configuration des pièces jointes" |
295 | |
296 | +#. module: base |
297 | +#: view:attachment.config:0 |
298 | +msgid "Old attachments migration" |
299 | +msgstr "Migration des anciennes pièces jointes" |
300 | + |
301 | +#. module: base |
302 | +#: view:attachment.config:0 |
303 | +msgid "Migrate old attachement to store them in local file system" |
304 | +msgstr "Migrer les anciennes pièces jointes pour les stocker sur le système de fichier local" |
305 | + |
306 | #. module: board |
307 | #: code:addons/board/queries_finance.py:11 |
308 | #: code:addons/board/queries_finance.py:30 |
309 | @@ -75769,3 +75814,37 @@ |
310 | msgid "Partner Created on this instance" |
311 | msgstr "Partenaire créé sur cette instance" |
312 | |
313 | +#. module: account_reconciliation |
314 | +#: code:addons/account_reconciliation/wizard/account_reconcile.py:207 |
315 | +#, python-format |
316 | +msgid "When entries from different partial reconciliations are reconciled together, all the legs of these partial reconciliations must be included." |
317 | +msgstr "Lorsque des entrées de différents lettrages partiels sont lettrées ensemble, toutes les lignes de ces lettrage partiels doivent être incluses." |
318 | + |
319 | +#. module: account_reconciliation |
320 | +#: code:addons/account_reconciliation/wizard/account_reconcile.py:201 |
321 | +#, python-format |
322 | +msgid "Only full reconciliation is allowed when entries from two (or more) different partial reconciliations are included." |
323 | +msgstr "Seul un lettrage complet est autorisé lorsque des entrées de deux lettrages partiels différents (ou plus) sont incluses." |
324 | + |
325 | +#. module: account_reconciliation |
326 | +#: code:addons/account_reconciliation/wizard/account_reconcile.py:190 |
327 | +#, python-format |
328 | +msgid "Both Debit and Credit lines are required for reconciliation." |
329 | +msgstr "Un lettrage requiert à la fois des lignes en débit et en crédit." |
330 | + |
331 | +#. module: msf_outgoing |
332 | +#: code:addons/msf_outgoing/wizard/incoming_shipment_processor.py:328 |
333 | +#, python-format |
334 | +msgid "The incoming shipment has already been processed, you cannot save it as draft." |
335 | +msgstr "La livraison a déjà été traitée, vous ne pouvez pas la sauvegarder en brouillon." |
336 | + |
337 | +#. module: stock |
338 | +#: view:stock.picking:0 |
339 | +msgid "Delivery orders which are in draft state" |
340 | +msgstr "Bons de livraison dans l'état brouillon" |
341 | + |
342 | +#. module: account_override |
343 | +#: code:addons/account_override/account_move_line.py:491 |
344 | +#, python-format |
345 | +msgid "Only manually reconciled entries can be unreconciled." |
346 | +msgstr "Seules les entrées lettrées manuellement peuvent être délettrées." |
347 | |
348 | === modified file 'bin/addons/order_types/stock.py' |
349 | --- bin/addons/order_types/stock.py 2016-05-17 12:12:38 +0000 |
350 | +++ bin/addons/order_types/stock.py 2017-01-16 10:24:56 +0000 |
351 | @@ -192,7 +192,7 @@ |
352 | if new_ids: |
353 | stock_move_obj = self.pool.get('stock.move') |
354 | move_line_read_list = self.read(cr, uid, new_ids, ['id', 'move_lines', 'type'], |
355 | - context=context) |
356 | + context=context) |
357 | for move_line_dict in move_line_read_list: |
358 | stock_move_ids = move_line_dict['move_lines'] |
359 | if stock_move_obj.search(cr, uid, [('id', 'in', stock_move_ids), |
360 | @@ -200,7 +200,7 @@ |
361 | ('donation_exp', |
362 | 'donation_st', |
363 | 'in_kind')), |
364 | - ], context=context): |
365 | + ], context=context): |
366 | res[move_line_dict['id']] = True |
367 | return res |
368 | |
369 | @@ -227,12 +227,12 @@ |
370 | for picking in self.browse(cr, uid, ids): |
371 | for move in picking.move_lines: |
372 | self.pool.get('stock.certificate.valuation').create(cr, uid, {'picking_id': picking.id, |
373 | - 'product_id': move.product_id.id, |
374 | - 'qty': move.product_qty, |
375 | - 'print_id': print_id, |
376 | - 'move_id': move.id, |
377 | - 'prodlot_id': move.prodlot_id.id, |
378 | - 'unit_price': move.product_id.list_price}) |
379 | + 'product_id': move.product_id.id, |
380 | + 'qty': move.product_qty, |
381 | + 'print_id': print_id, |
382 | + 'move_id': move.id, |
383 | + 'prodlot_id': move.prodlot_id.id, |
384 | + 'unit_price': move.product_id.list_price}) |
385 | |
386 | return {'type': 'ir.actions.act_window', |
387 | 'res_model': 'stock.print.certificate', |
388 | @@ -296,7 +296,7 @@ |
389 | |
390 | if certif and not context.get('attach_ok', False): |
391 | partial_id = self.pool.get("stock.certificate.picking").create( |
392 | - cr, uid, {'picking_id': ids[0]}, context=dict(context, active_ids=ids)) |
393 | + cr, uid, {'picking_id': ids[0]}, context=dict(context, active_ids=ids)) |
394 | return {'name':_("Attach a certificate of donation"), |
395 | 'view_mode': 'form', |
396 | 'view_id': False, |
397 | @@ -326,10 +326,8 @@ |
398 | |
399 | # US-148 |
400 | if pick.type == 'in': |
401 | - args = [('picking_id', '=', pick.id), |
402 | - ('draft', '=', True)] |
403 | - wiz_ids = wizard_obj.search(cr, uid, args=args, |
404 | - context=context) |
405 | + domain = [('picking_id', '=', pick.id), ('draft', '=', True), ('already_processed', '=', False)] |
406 | + wiz_ids = wizard_obj.search(cr, uid, domain, context=context) |
407 | if wiz_ids: |
408 | proc_id = wiz_ids[0] |
409 | else: |
410 | @@ -349,13 +347,13 @@ |
411 | 'view_type': 'form', |
412 | 'view_mode': 'form', |
413 | 'target': 'new', |
414 | - } |
415 | + } |
416 | |
417 | if not context.get('force_process', False) and pick.type == 'in' \ |
418 | and not pick.in_dpo \ |
419 | and pick.state != 'shipped' and pick.partner_id.partner_type == 'internal': |
420 | view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, |
421 | - 'msf_outgoing', 'stock_incoming_processor_internal_warning_form_view')[1] |
422 | + 'msf_outgoing', 'stock_incoming_processor_internal_warning_form_view')[1] |
423 | res['view_id'] = [view_id] |
424 | |
425 | return res |
426 | |
427 | === modified file 'bin/addons/product_attributes/product_attributes.py' |
428 | --- bin/addons/product_attributes/product_attributes.py 2016-11-21 14:21:55 +0000 |
429 | +++ bin/addons/product_attributes/product_attributes.py 2017-01-16 10:24:56 +0000 |
430 | @@ -1430,7 +1430,7 @@ |
431 | |
432 | for product in self.browse(cr, uid, ids, context=context): |
433 | # Raise an error if the product is already inactive |
434 | - if not product.active: |
435 | + if not product.active and not context.get('sync_update_execution'): |
436 | raise osv.except_osv(_('Error'), _('The product [%s] %s is already inactive.') % (product.default_code, product.name)) |
437 | |
438 | # Check if the product is in some purchase order lines or request for quotation lines |
439 | |
440 | === modified file 'bin/addons/purchase_override/report/merged_order.rml' |
441 | --- bin/addons/purchase_override/report/merged_order.rml 2016-11-24 10:31:53 +0000 |
442 | +++ bin/addons/purchase_override/report/merged_order.rml 2017-01-16 10:24:56 +0000 |
443 | @@ -341,7 +341,7 @@ |
444 | </tr> |
445 | <tr> |
446 | <td> |
447 | - <para style="TextInformation">Goods value : [[ objects[0].amount_total or '0.00' ]]</para> |
448 | + <para style="TextInformation">Goods value : [[ objects[0].amount_untaxed or '0.00' ]]</para> |
449 | </td> |
450 | <td> |
451 | <para style="TextInformation"> </para> |
452 | |
453 | === modified file 'bin/addons/purchase_override/report/purchase_order.rml' |
454 | --- bin/addons/purchase_override/report/purchase_order.rml 2016-11-09 08:10:08 +0000 |
455 | +++ bin/addons/purchase_override/report/purchase_order.rml 2017-01-16 10:24:56 +0000 |
456 | @@ -343,7 +343,7 @@ |
457 | </tr> |
458 | <tr> |
459 | <td> |
460 | - <para style="TextInformation">Goods value : [[ objects[0].amount_total or '0.00' ]]</para> |
461 | + <para style="TextInformation">Goods value : [[ objects[0].amount_untaxed or '0.00' ]]</para> |
462 | </td> |
463 | <td> |
464 | <para style="TextInformation"> </para> |
465 | |
466 | === modified file 'bin/addons/stock/stock_view.xml' |
467 | --- bin/addons/stock/stock_view.xml 2016-02-18 15:39:31 +0000 |
468 | +++ bin/addons/stock/stock_view.xml 2017-01-16 10:24:56 +0000 |
469 | @@ -1024,8 +1024,9 @@ |
470 | <field name="arch" type="xml"> |
471 | <search string="Picking list"> |
472 | <group col="8" colspan="4"> |
473 | + <filter icon="terp-document-new" name="draft" string="Draft" domain="[('state', 'in', ['draft'])]" help="Delivery orders which are in draft state" /> |
474 | + <filter icon="terp-camera_test" name="confirmed" string="Confirmed" domain="[('state','=','confirmed')]" help="Confirmed Delivery Orders"/> |
475 | <filter icon="terp-check" name="available" string="Available" domain="[('state','=','assigned')]" help="Assigned Delivery Orders"/> |
476 | - <filter icon="terp-camera_test" name="confirmed" string="Confirmed" domain="[('state','=','confirmed')]" help="Confirmed Delivery Orders"/> |
477 | <filter icon="terp-dialog-close" name="done" string="Done" domain="[('state','=','done')]" help="Delivery orders already processed"/> |
478 | <separator orientation="vertical"/> |
479 | <filter icon="terp-accessories-archiver-minus" string="Back Orders" domain="[('backorder_id', '!=', False)]" help="Is a Back Order" groups="base.group_extended"/> |
480 | |
481 | === modified file 'bin/addons/supplier_catalogue/product.py' |
482 | --- bin/addons/supplier_catalogue/product.py 2016-08-18 08:32:03 +0000 |
483 | +++ bin/addons/supplier_catalogue/product.py 2017-01-16 10:24:56 +0000 |
484 | @@ -30,7 +30,7 @@ |
485 | class product_supplierinfo(osv.osv): |
486 | _name = 'product.supplierinfo' |
487 | _inherit = 'product.supplierinfo' |
488 | - |
489 | + |
490 | def unlink(self, cr, uid, info_ids, context=None): |
491 | ''' |
492 | Disallow the possibility to remove a supplier info if |
493 | @@ -43,45 +43,45 @@ |
494 | context = {} |
495 | if isinstance(info_ids, (int, long)): |
496 | info_ids = [info_ids] |
497 | - |
498 | + |
499 | for info in self.browse(cr, uid, info_ids, context=context): |
500 | if info.catalogue_id and not context.get('product_change', False): |
501 | raise osv.except_osv(_('Error'), _('You cannot remove a supplier information which is linked ' \ |
502 | 'to a supplier catalogue line ! Please remove the corresponding ' \ |
503 | 'supplier catalogue line to remove this supplier information.')) |
504 | - |
505 | + |
506 | return super(product_supplierinfo, self).unlink(cr, uid, info_ids, context=context) |
507 | - |
508 | + |
509 | def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): |
510 | if not context: |
511 | context = {} |
512 | - |
513 | + |
514 | new_res = [] |
515 | res = super(product_supplierinfo, self).search(cr, uid, args, offset, limit, |
516 | - order, context=context, count=count) |
517 | + order, context=context, count=count) |
518 | if count: |
519 | return res |
520 | - |
521 | + |
522 | if isinstance(res, (int, long)): |
523 | res = [res] |
524 | - |
525 | + |
526 | for r in self.browse(cr, uid, res, context=context): |
527 | if not r.catalogue_id or r.catalogue_id.active: |
528 | new_res.append(r.id) |
529 | - |
530 | + |
531 | return new_res |
532 | - |
533 | + |
534 | def _get_editable(self, cr, uid, ids, field_name, args, context=None): |
535 | ''' |
536 | Return True if no catalogue associated |
537 | ''' |
538 | res = {} |
539 | - |
540 | + |
541 | for x in self.browse(cr, uid, ids, context=context): |
542 | res[x.id] = True |
543 | if x.catalogue_id: |
544 | res[x.id] = False |
545 | - |
546 | + |
547 | return res |
548 | |
549 | def _get_seller_delay(self, cr, uid, ids, field_name, args, context=None): |
550 | @@ -95,7 +95,7 @@ |
551 | res[price.id] = (price.name and price.name.supplier_lt) or (product_id and int(product[0].procure_delay)) or 1 |
552 | |
553 | return res |
554 | - |
555 | + |
556 | _columns = { |
557 | 'catalogue_id': fields.many2one('supplier.catalogue', string='Associated catalogue', ondelete='cascade'), |
558 | 'editable': fields.function(_get_editable, method=True, string='Editable', store=False, type='boolean'), |
559 | @@ -122,7 +122,7 @@ |
560 | |
561 | return {'value': v} |
562 | |
563 | - |
564 | + |
565 | # Override the original method |
566 | def price_get(self, cr, uid, supplier_ids, product_id, product_qty=1, context=None): |
567 | """ |
568 | @@ -141,16 +141,16 @@ |
569 | uom_id = context.get('uom', False) or product_pool.browse(cr, uid, product_id, context=context).uom_id.id |
570 | for supplier in partner_pool.browse(cr, uid, supplier_ids, context=context): |
571 | res[supplier.id] = product_pool._get_partner_price(cr, uid, product_id, supplier.id, product_qty, |
572 | - currency_id, date, uom_id, context=context) |
573 | + currency_id, date, uom_id, context=context) |
574 | return res |
575 | - |
576 | + |
577 | product_supplierinfo() |
578 | |
579 | |
580 | class pricelist_partnerinfo(osv.osv): |
581 | _name = 'pricelist.partnerinfo' |
582 | _inherit = 'pricelist.partnerinfo' |
583 | - |
584 | + |
585 | def default_get(self, cr, uid, fields, context=None): |
586 | ''' |
587 | Set automatically the currency of the line with the default |
588 | @@ -158,9 +158,9 @@ |
589 | ''' |
590 | if not context: |
591 | context = {} |
592 | - |
593 | + |
594 | res = super(pricelist_partnerinfo, self).default_get(cr, uid, fields, context=context) |
595 | - |
596 | + |
597 | if context.get('partner_id', False) and isinstance(context['partner_id'], (int, long)): |
598 | partner = self.pool.get('res.partner').browse(cr, uid, context.get('partner_id'), context=context) |
599 | res['currency_id'] = partner.property_product_pricelist_purchase.currency_id.id |
600 | @@ -168,9 +168,9 @@ |
601 | if context.get('active_model', False) == 'product.supplierinfo' and context.get('active_id', False) and isinstance(context['active_id'], (int, long)): |
602 | read_partnerinfo = self.pool.get('product.supplierinfo').read(cr, uid, context['active_id']) |
603 | res['uom_id'] = read_partnerinfo['product_uom'][0] |
604 | - |
605 | + |
606 | return res |
607 | - |
608 | + |
609 | def unlink(self, cr, uid, info_id, context=None): |
610 | ''' |
611 | Disallow the possibility to remove a supplier pricelist |
612 | @@ -181,7 +181,7 @@ |
613 | ''' |
614 | if context is None: |
615 | context = {} |
616 | - |
617 | + |
618 | if isinstance(info_id, (int, long)): |
619 | info_id = [info_id] |
620 | |
621 | @@ -190,27 +190,27 @@ |
622 | raise osv.except_osv(_('Error'), _('You cannot remove a supplier pricelist line which is linked ' \ |
623 | 'to a supplier catalogue line ! Please remove the corresponding ' \ |
624 | 'supplier catalogue line to remove this supplier information.')) |
625 | - |
626 | + |
627 | return super(pricelist_partnerinfo, self).unlink(cr, uid, info_id, context=context) |
628 | - |
629 | + |
630 | def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): |
631 | if not context: |
632 | context = {} |
633 | - |
634 | + |
635 | res = super(pricelist_partnerinfo, self).search(cr, uid, args, offset, limit, |
636 | - order, context=context, count=count) |
637 | + order, context=context, count=count) |
638 | |
639 | if count: |
640 | return res |
641 | - |
642 | + |
643 | new_res = [] |
644 | - |
645 | + |
646 | for r in self.browse(cr, uid, res, context=context): |
647 | if not r.suppinfo_id or not r.suppinfo_id.catalogue_id or r.suppinfo_id.catalogue_id.active: |
648 | new_res.append(r.id) |
649 | - |
650 | + |
651 | return new_res |
652 | - |
653 | + |
654 | def _check_min_quantity(self, cr, uid, ids, context=None): |
655 | ''' |
656 | Check if the min_qty field is set |
657 | @@ -226,7 +226,7 @@ |
658 | if line.min_quantity <= 0.00: |
659 | raise osv.except_osv(_('Error'), _('The line of product %s has a negative or zero min. quantity !') %line.suppinfo_id.product_id.name) |
660 | return False |
661 | - |
662 | + |
663 | return True |
664 | |
665 | def _get_supplierinfo(self, cr, uid, ids, context=None): |
666 | @@ -254,7 +254,7 @@ |
667 | 'sequence': fields.function(_get_sequence, method=True, string='Sequence', type='integer', |
668 | store={'pricelist.partnerinfo': (lambda self, cr, uid, ids, c={}: ids, [], 20), |
669 | 'product.supplierinfo': (_get_supplierinfo, ['sequence'], 20), |
670 | - }) |
671 | + }) |
672 | } |
673 | |
674 | def create(self, cr, uid, vals, context=None): |
675 | @@ -285,15 +285,15 @@ |
676 | class product_product(osv.osv): |
677 | _name = 'product.product' |
678 | _inherit = 'product.product' |
679 | - |
680 | + |
681 | def _get_partner_info_price(self, cr, uid, product, partner_id, product_qty, currency_id, |
682 | - order_date, product_uom_id, context=None): |
683 | + order_date, product_uom_id, context=None): |
684 | ''' |
685 | Returns the pricelist_information from product form |
686 | ''' |
687 | if not context: |
688 | context = {} |
689 | - |
690 | + |
691 | partner_price = self.pool.get('pricelist.partnerinfo') |
692 | info_prices = [] |
693 | suppinfo_ids = self.pool.get('product.supplierinfo').search(cr, uid, [('name', '=', partner_id), ('product_id', '=', product.product_tmpl_id.id)], context=context) |
694 | @@ -304,18 +304,18 @@ |
695 | ('valid_from', '=', False), |
696 | '|', ('valid_till', '>=', order_date), |
697 | ('valid_till', '=', False)] |
698 | - |
699 | + |
700 | domain_cur = [('currency_id', '=', currency_id)] |
701 | domain_cur.extend(domain) |
702 | - |
703 | + |
704 | info_prices = partner_price.search(cr, uid, domain_cur, order='sequence asc, min_quantity desc, id desc', limit=1, context=context) |
705 | if not info_prices: |
706 | info_prices = partner_price.search(cr, uid, domain, order='sequence asc, min_quantity desc, id desc', limit=1, context=context) |
707 | - |
708 | + |
709 | return info_prices |
710 | - |
711 | + |
712 | def _get_partner_price(self, cr, uid, product_ids, partner_id, product_qty, currency_id, |
713 | - order_date, product_uom_id, context=None): |
714 | + order_date, product_uom_id, context=None): |
715 | ''' |
716 | Search the good partner price line for products |
717 | ''' |
718 | @@ -323,29 +323,27 @@ |
719 | one_product = False |
720 | cur_obj = self.pool.get('res.currency') |
721 | partner_price = self.pool.get('pricelist.partnerinfo') |
722 | - suppinfo_obj = self.pool.get('product.supplierinfo') |
723 | prod_obj = self.pool.get('product.product') |
724 | - catalogue_obj = self.pool.get('supplier.catalogue') |
725 | - |
726 | + |
727 | if not context: |
728 | context = {} |
729 | - |
730 | + |
731 | if isinstance(product_ids, (int, long)): |
732 | one_product = product_ids |
733 | product_ids = [product_ids] |
734 | - |
735 | + |
736 | for product in prod_obj.browse(cr, uid, product_ids, context=context): |
737 | info_prices = self._get_partner_info_price(cr, uid, product, partner_id, product_qty, currency_id, |
738 | order_date, product_uom_id, context=context) |
739 | - |
740 | + |
741 | if info_prices: |
742 | - # info = partner_price.browse(cr, uid, info_price, context=context)[0] |
743 | + # info = partner_price.browse(cr, uid, info_price, context=context)[0] |
744 | info = partner_price.browse(cr, uid, info_prices[0], context=context) |
745 | price = cur_obj.compute(cr, uid, info.currency_id.id, currency_id, info.price, round=False, context=context) |
746 | res[product.id] = (price, info.rounding or 1.00, info.suppinfo_id.min_qty or 0.00) |
747 | else: |
748 | res[product.id] = (False, 1.0, 1.0) |
749 | - |
750 | + |
751 | return not one_product and res or res[one_product] |
752 | |
753 | def _get_catalogue_ids(self, cr, uid, ids, field_name, arg, context=None): |
754 | @@ -371,51 +369,70 @@ |
755 | res[product.id] = list(catalogue_ids) |
756 | |
757 | return res |
758 | - |
759 | + |
760 | def _search_catalogue_ids(self, cr, uid, obj, name, args, context=None): |
761 | ''' |
762 | Filter the search according to the args parameter |
763 | ''' |
764 | - # Objects |
765 | catalogue_obj = self.pool.get('supplier.catalogue') |
766 | |
767 | - context = context is None and {} or context |
768 | - ids = set() |
769 | + if context is None: |
770 | + context = {} |
771 | + product_id_list = [] |
772 | |
773 | for arg in args: |
774 | - if arg[0] == 'catalogue_ids' and arg[1] == '=' and arg[2]: |
775 | - catalogue = catalogue_obj.browse(cr, uid, int(arg[2]), context=context) |
776 | - for line in catalogue.line_ids: |
777 | - ids.add(line.product_id.id) |
778 | - elif arg[0] == 'catalogue_ids' and arg[1] == 'in' and arg[2]: |
779 | - for catalogue in catalogue_obj.browse(cr, uid, arg[2], context=context): |
780 | - for line in catalogue.line_ids: |
781 | - ids.add(line.product_id.id) |
782 | + if arg[0] == 'catalogue_ids' and arg[1] == '=': |
783 | + catalogue_list = [int(arg[2])] |
784 | + elif arg[0] == 'catalogue_ids' and arg[1] == 'in': |
785 | + catalogue_list = arg[2] |
786 | + elif arg[0] == 'catalogue_ids' and arg[1] == 'ilike': |
787 | + name_search = arg[2] |
788 | + catalogue_list = catalogue_obj.search(cr, uid, [('name', 'ilike', name_search)], |
789 | + context=context) |
790 | + if not catalogue_list: |
791 | + return [] |
792 | else: |
793 | return [] |
794 | |
795 | - return [('id', 'in', list(ids))] |
796 | + catalog_lines_result = catalogue_obj.read(cr, uid, catalogue_list, |
797 | + ['line_ids'], context) |
798 | + catalog_line_ids_list = [] |
799 | + for catalog in catalog_lines_result: |
800 | + catalog_line_ids_list.extend(catalog['line_ids']) |
801 | + |
802 | + total_lines = len(catalog_line_ids_list) |
803 | + start_chunk = 0 |
804 | + chunk_size = 500 |
805 | + while start_chunk < total_lines: |
806 | + ids_chunk = catalog_line_ids_list[start_chunk:start_chunk+chunk_size] |
807 | + cr.execute("""SELECT scl.product_id |
808 | + FROM supplier_catalogue_line as scl |
809 | + WHERE scl.id in %s""", (tuple(ids_chunk),)) |
810 | + current_res = [x[0] for x in cr.fetchall() if x] |
811 | + product_id_list.extend(current_res) |
812 | + start_chunk += chunk_size |
813 | + return [('id', 'in', product_id_list)] |
814 | |
815 | _columns = { |
816 | 'catalogue_ids': fields.function(_get_catalogue_ids, fnct_search=_search_catalogue_ids, |
817 | - type='many2many', relation='supplier.catalogue', method=True, string='Catalogues'), |
818 | + type='many2many', relation='supplier.catalogue', method=True, string='Catalogues'), |
819 | } |
820 | - |
821 | + |
822 | product_product() |
823 | |
824 | |
825 | class product_pricelist(osv.osv): |
826 | _name = 'product.pricelist' |
827 | _inherit = 'product.pricelist' |
828 | - |
829 | + |
830 | def _get_in_search(self, cr, uid, ids, field_name, args, context=None): |
831 | res = {} |
832 | - |
833 | + |
834 | for id in ids: |
835 | res[id] = True |
836 | - |
837 | + |
838 | return res |
839 | - |
840 | + |
841 | def _search_in_search(self, cr, uid, obj, name, args, context=None): |
842 | ''' |
843 | Returns pricelists according to partner type |
844 | @@ -423,7 +440,7 @@ |
845 | user_obj = self.pool.get('res.users') |
846 | cur_obj = self.pool.get('res.currency') |
847 | dom = [] |
848 | - |
849 | + |
850 | for arg in args: |
851 | if arg[0] == 'in_search': |
852 | if arg[1] != '=': |
853 | @@ -438,42 +455,42 @@ |
854 | elif arg[2] == 'esc': |
855 | currency_ids = cur_obj.search(cr, uid, [('is_esc_currency', '=', True)]) |
856 | dom.append(('currency_id', 'in', currency_ids)) |
857 | - |
858 | + |
859 | return dom |
860 | - |
861 | + |
862 | def _get_currency_name(self, cr, uid, ids, field_name, args, context=None): |
863 | ''' |
864 | Return the name of the related currency |
865 | ''' |
866 | res = {} |
867 | - |
868 | + |
869 | for p_list in self.browse(cr, uid, ids, context=context): |
870 | res[p_list.id] = False |
871 | if p_list.currency_id: |
872 | res[p_list.id] = p_list.currency_id.currency_name |
873 | - |
874 | + |
875 | return res |
876 | - |
877 | + |
878 | def _search_currency_name(self, cr, uid, obj, name, args, context=None): |
879 | ''' |
880 | Return the list corresponding to the currency name |
881 | ''' |
882 | dom = [] |
883 | - |
884 | + |
885 | for arg in args: |
886 | if arg[0] == 'currency_name': |
887 | currency_ids = self.pool.get('res.currency').search(cr, uid, [('currency_name', arg[1], arg[2])], context=context) |
888 | dom.append(('currency_id', 'in', currency_ids)) |
889 | - |
890 | + |
891 | return dom |
892 | - |
893 | - |
894 | + |
895 | + |
896 | _columns = { |
897 | 'in_search': fields.function(_get_in_search, fnct_search=_search_in_search, method=True, |
898 | type='boolean', string='In search'), |
899 | 'currency_name': fields.function(_get_currency_name, fnct_search=_search_currency_name, type='char', method=True, string='Currency name'), |
900 | } |
901 | - |
902 | + |
903 | def _hook_product_partner_price(self, cr, uid, *args, **kwargs): |
904 | ''' |
905 | Rework the computation of price from partner section in product form |
906 | @@ -490,13 +507,13 @@ |
907 | if not partner and 'partner_id' in context: |
908 | partner = context.get('partner_id', False) |
909 | uom_price_already_computed = kwargs['uom_price_already_computed'] |
910 | - |
911 | + |
912 | price, rounding, min_qty = self.pool.get('product.product')._get_partner_price(cr, uid, product_id, partner, qty, currency_id, |
913 | date, uom, context=context) |
914 | uom_price_already_computed = 1 |
915 | - |
916 | + |
917 | return price, uom_price_already_computed |
918 | - |
919 | + |
920 | def name_get(self, cr, user, ids, context=None): |
921 | ''' |
922 | Display the currency name instead of the pricelist name |
923 | @@ -507,7 +524,7 @@ |
924 | txt = pp.currency_id.name |
925 | res += [(pp.id, txt)] |
926 | return res |
927 | - |
928 | + |
929 | def name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=80): |
930 | ''' |
931 | Search pricelist by currency name instead of pricelist name |
932 | @@ -515,12 +532,12 @@ |
933 | ids = [] |
934 | if name: |
935 | currency_ids = self.pool.get('res.currency').search(cr, uid, |
936 | - [('name', operator, name)], order='NO_ORDER', context=context) |
937 | + [('name', operator, name)], order='NO_ORDER', context=context) |
938 | ids = self.search(cr, uid, [('currency_id', 'in', currency_ids)] + (args or [])) |
939 | - |
940 | + |
941 | return self.name_get(cr, uid, ids) |
942 | - |
943 | - |
944 | + |
945 | + |
946 | product_pricelist() |
947 | |
948 | class res_currency(osv.osv): |
949 | @@ -550,21 +567,21 @@ |
950 | po_currency_id = price_obj.browse(cr, uid, arg[2]).currency_id.id |
951 | dom.append(('id', 'in', [func_currency_id, po_currency_id])) |
952 | return dom |
953 | - |
954 | + |
955 | def _get_partner_currency(self, cr, uid, ids, field_name, args, context=None): |
956 | res = {} |
957 | for id in ids: |
958 | res[id] = True |
959 | - |
960 | + |
961 | return res |
962 | - |
963 | + |
964 | def _src_partner_currency(self, cr, uid, obj, name, args, context=None): |
965 | ''' |
966 | Returns currencies according to partner type |
967 | ''' |
968 | user_obj = self.pool.get('res.users') |
969 | dom = [] |
970 | - |
971 | + |
972 | for arg in args: |
973 | if arg[0] == 'partner_currency': |
974 | if arg[1] != '=': |
975 | @@ -578,14 +595,14 @@ |
976 | dom.append(('is_section_currency', '=', True)) |
977 | elif partner.partner_type == 'esc': |
978 | dom.append(('is_esc_currency', '=', True)) |
979 | - |
980 | + |
981 | return dom |
982 | |
983 | _columns = { |
984 | 'is_section_currency': fields.boolean(string='Functional currency', |
985 | - help='If this box is checked, this currency is used as a functional currency for at least one section in MSF.'), |
986 | + help='If this box is checked, this currency is used as a functional currency for at least one section in MSF.'), |
987 | 'is_esc_currency': fields.boolean(string='ESC currency', |
988 | - help='If this box is checked, this currency is used as a currency for at least one ESC.'), |
989 | + help='If this box is checked, this currency is used as a currency for at least one ESC.'), |
990 | 'is_po_functional': fields.function(_get_in_search, fnct_search=_search_in_search, method=True, |
991 | type='boolean', string='transport PO currencies'), |
992 | 'partner_currency': fields.function(_get_partner_currency, fnct_search=_src_partner_currency, type='boolean', method=True, |
993 | @@ -605,9 +622,9 @@ |
994 | # Check if Inter-section partners used one of these currencies |
995 | if 'is_section_currency' in values and not values['is_section_currency']: |
996 | pricelist_ids = pricelist_obj.search(cr, uid, [('currency_id', |
997 | - 'in', ids)], order='NO_ORDER', context=context) |
998 | + 'in', ids)], order='NO_ORDER', context=context) |
999 | partner_ids = partner_obj.search(cr, uid, [('partner_type', '=', |
1000 | - 'section')], order='NO_ORDER', context=context) |
1001 | + 'section')], order='NO_ORDER', context=context) |
1002 | value_reference = ['product.pricelist,%s' % x for x in pricelist_ids] |
1003 | res_reference = ['res.partner,%s' % x for x in partner_ids] |
1004 | property_ids = [] |
1005 | @@ -615,7 +632,7 @@ |
1006 | property_ids = property_obj.search(cr, uid, [('res_id', 'in', res_reference), |
1007 | ('value_reference', 'in', value_reference), |
1008 | '|', ('name', '=', 'property_product_pricelist'), |
1009 | - ('name', '=', 'property_product_pricelist_purchase'),], context=context) |
1010 | + ('name', '=', 'property_product_pricelist_purchase'),], context=context) |
1011 | if property_ids: |
1012 | properties = property_obj.browse(cr, uid, property_ids, context=context) |
1013 | partner_list = ' / '.join(x.res_id.name for x in properties) |
1014 | @@ -626,9 +643,9 @@ |
1015 | # Check if ESC partners used one of these currencies |
1016 | if 'is_esc_currency' in values and not values['is_esc_currency']: |
1017 | pricelist_ids = pricelist_obj.search(cr, uid, [('currency_id', |
1018 | - 'in', ids)], order='NO_ORDER', context=context) |
1019 | + 'in', ids)], order='NO_ORDER', context=context) |
1020 | partner_ids = partner_obj.search(cr, uid, [('partner_type', '=', |
1021 | - 'esc')], order='NO_ORDER', context=context) |
1022 | + 'esc')], order='NO_ORDER', context=context) |
1023 | value_reference = ['product.pricelist,%s' % x for x in pricelist_ids] |
1024 | res_reference = ['res.partner,%s' % x for x in partner_ids] |
1025 | property_ids = [] |
1026 | @@ -636,7 +653,7 @@ |
1027 | property_ids = property_obj.search(cr, uid, [('res_id', 'in', res_reference), |
1028 | ('value_reference', 'in', value_reference), |
1029 | '|', ('name', '=', 'property_product_pricelist'), |
1030 | - ('name', '=', 'property_product_pricelist_purchase'),], context=context) |
1031 | + ('name', '=', 'property_product_pricelist_purchase'),], context=context) |
1032 | if property_ids: |
1033 | properties = property_obj.browse(cr, uid, property_ids, context=context) |
1034 | partner_list = ' / '.join(x.res_id.name for x in properties) |
1035 | |
1036 | === modified file 'bin/addons/tender_flow/report/tender_rfq_comparison_xls.mako' |
1037 | --- bin/addons/tender_flow/report/tender_rfq_comparison_xls.mako 2016-12-05 15:18:05 +0000 |
1038 | +++ bin/addons/tender_flow/report/tender_rfq_comparison_xls.mako 2017-01-16 10:24:56 +0000 |
1039 | @@ -541,7 +541,7 @@ |
1040 | <Cell ss:StyleID="sTitle" ss:MergeAcross="1"><Data ss:Type="String">${_('Details')}:</Data></Cell> |
1041 | <Cell ss:StyleID="sData"><Data ss:Type="String">${o.details or ''|x}</Data></Cell> |
1042 | <Cell ss:StyleID="sTitle" ss:MergeAcross="1"><Data ss:Type="String">${_('Currency')}:</Data></Cell> |
1043 | - <Cell ss:StyleID="sData"><Data ss:Type="String">${get_same_and_default_currency(o)[1].name or ''|x}</Data></Cell> |
1044 | + <Cell ss:StyleID="sData"><Data ss:Type="String">${o.company_id.currency_id.name or ''|x}</Data></Cell> |
1045 | <Cell ss:StyleID="sTitle" ss:MergeAcross="1"><Data ss:Type="String">${_('Tender Priority')}:</Data></Cell> |
1046 | <Cell ss:StyleID="sData"><Data ss:Type="String">${getSel(o, 'priority')|x}</Data></Cell> |
1047 | </Row> |
1048 | |
1049 | === modified file 'bin/addons/tender_flow/report/tender_rfq_comparison_xls.py' |
1050 | --- bin/addons/tender_flow/report/tender_rfq_comparison_xls.py 2016-12-05 15:15:27 +0000 |
1051 | +++ bin/addons/tender_flow/report/tender_rfq_comparison_xls.py 2017-01-16 10:24:56 +0000 |
1052 | @@ -17,8 +17,6 @@ |
1053 | super(tender_rfq_comparison, self).__init__(cr, uid, name, context=context) |
1054 | self.localcontext.update({ |
1055 | 'get_compare_lines': self.get_compare_lines, |
1056 | - 'gen_line_link': self.gen_line_link, |
1057 | - 'get_same_and_default_currency': self.get_same_and_default_currency, |
1058 | }) |
1059 | |
1060 | def get_compare_lines(self, tender_obj): |
1061 | @@ -85,42 +83,5 @@ |
1062 | |
1063 | return lines |
1064 | |
1065 | - def get_same_and_default_currency(self, tender_obj): |
1066 | - |
1067 | - if tender_obj == 'draft' or not tender_obj.rfq_ids: |
1068 | - return (True, self.localcontext['company'].currency_id) |
1069 | - |
1070 | - current_cur = False |
1071 | - |
1072 | - for rfq in tender_obj.rfq_ids: |
1073 | - next_cur = rfq.currency_id |
1074 | - if current_cur and current_cur.id != next_cur.id: |
1075 | - return (False, self.localcontext['company'].currency_id) |
1076 | - current_cur = rfq.currency_id |
1077 | - return (True, current_cur) |
1078 | - |
1079 | - def gen_line_link(self, tender_obj): |
1080 | - link_line_supp = {} |
1081 | - |
1082 | - same_cur, currency = self.get_same_and_default_currency(tender_obj) |
1083 | - cur_obj = self.pool.get('res.currency') |
1084 | - |
1085 | - if tender_obj.rfq_ids: |
1086 | - # fine we have rfqs |
1087 | - for rfq in tender_obj.rfq_ids: |
1088 | - for line in rfq.order_line: |
1089 | - data = {'notes': line.notes, 'price_unit': line.price_unit} |
1090 | - if not same_cur: |
1091 | - data['price_unit'] = cur_obj.compute(self.cr, self.uid, line.currency_id.id, currency.id, line.price_unit, round=True) |
1092 | - |
1093 | - link_line_supp.setdefault(line.product_id.id, {}).setdefault(rfq.partner_id.id, data) |
1094 | - elif tender_obj.supplier_ids: |
1095 | - for line in tender_obj.tender_line_ids: |
1096 | - link_line_supp[line.product_id.id] = {} |
1097 | - for supp in tender_obj.supplier_ids: |
1098 | - link_line_supp[line.product_id.id][supp.id] = {} |
1099 | - |
1100 | - return link_line_supp |
1101 | - |
1102 | |
1103 | SpreadsheetReport('report.tender_rfq_comparison_xls', 'tender', 'tender_flow/report/tender_rfq_comparison_xls.mako', parser=tender_rfq_comparison) |
1104 | |
1105 | === modified file 'bin/addons/vertical_integration/report/hq_report_ocb.py' |
1106 | --- bin/addons/vertical_integration/report/hq_report_ocb.py 2016-11-18 16:44:10 +0000 |
1107 | +++ bin/addons/vertical_integration/report/hq_report_ocb.py 2017-01-16 10:24:56 +0000 |
1108 | @@ -63,6 +63,7 @@ |
1109 | dbname = cr.dbname |
1110 | pool = pooler.get_pool(dbname) |
1111 | partner_obj = pool.get('res.partner') |
1112 | + employee_obj = pool.get('hr.employee') |
1113 | |
1114 | # define column number corresponding to properties |
1115 | partner_name_cl = 9 |
1116 | @@ -73,6 +74,12 @@ |
1117 | partner_search_dict = {} |
1118 | employee_search_dict = {} |
1119 | employee_code_dict = {} |
1120 | + partner_name_dict = {} |
1121 | + partner_hash_dict = {} |
1122 | + |
1123 | + partner_id_list = list(set([x[partner_id_cl] for x in data if x[partner_id_cl]])) |
1124 | + partner_result = partner_obj.read(cr, uid, partner_id_list, ['name']) |
1125 | + partner_name_dict = dict((x['id'], x['name']) for x in partner_result) |
1126 | |
1127 | for line in data: |
1128 | tmp_line = list(line) |
1129 | @@ -90,7 +97,7 @@ |
1130 | partner_id = tmp_line[partner_id_cl] |
1131 | if partner_id: |
1132 | # US-497: extract name from partner_id (better than partner_txt) |
1133 | - tmp_line[partner_name_cl] = partner_obj.read(cr, uid, partner_id, ['name'])['name'] |
1134 | + tmp_line[partner_name_cl] = partner_name_dict[partner_id] |
1135 | |
1136 | partner_name = tmp_line[partner_name_cl] |
1137 | # Search only if partner_name is not empty |
1138 | @@ -109,12 +116,13 @@ |
1139 | |
1140 | # If we get some ids, fetch the partner hash |
1141 | if partner_id: |
1142 | - if isinstance(partner_id, (int, long)): |
1143 | - partner_id = [partner_id] |
1144 | - partner_hash = self.get_hash(cr, uid, partner_id, 'res.partner') |
1145 | + if partner_id in partner_hash_dict: |
1146 | + partner_hash = partner_hash_dict[partner_id] |
1147 | + else: |
1148 | + partner_hash = self.get_hash(cr, uid, [partner_id], 'res.partner') |
1149 | + partner_hash_dict[partner_id] = partner_hash |
1150 | |
1151 | if not partner_id and tmp_line[partner_name_cl]: |
1152 | - employee_obj = pool.get('hr.employee') |
1153 | if partner_name not in employee_search_dict: |
1154 | employee_search = employee_obj.search(cr, uid, [('name', '=', partner_name), ('active', 'in', ['t', 'f'])]) |
1155 | if employee_search: |
1156 | @@ -673,7 +681,7 @@ |
1157 | }) |
1158 | |
1159 | # Launch finance archive object |
1160 | - fe = finance_archive(sqlrequests, processrequests) |
1161 | + fe = finance_archive(sqlrequests, processrequests, context=context) |
1162 | # Use archive method to create the archive |
1163 | return fe.archive(cr, uid) |
1164 | |
1165 | |
1166 | === modified file 'bin/addons/vertical_integration/wizard/ocb_export.py' |
1167 | --- bin/addons/vertical_integration/wizard/ocb_export.py 2016-01-26 10:00:40 +0000 |
1168 | +++ bin/addons/vertical_integration/wizard/ocb_export.py 2017-01-16 10:24:56 +0000 |
1169 | @@ -45,6 +45,12 @@ |
1170 | """ |
1171 | Launch a report to generate the ZIP file. |
1172 | """ |
1173 | + if context is None: |
1174 | + context = {} |
1175 | + |
1176 | + if isinstance(ids, (int, long)): |
1177 | + ids = [ids] |
1178 | + |
1179 | # Prepare some values |
1180 | wizard = self.browse(cr, uid, ids[0], context=context) |
1181 | data = {} |
1182 | @@ -67,7 +73,7 @@ |
1183 | if wizard.period_id.number == 16: |
1184 | msg = _("You can not select '%s' as already included in' \ |
1185 | ' December export") % ( |
1186 | - wizard.period_id.name or 'Period 16', ) |
1187 | + wizard.period_id.name or 'Period 16', ) |
1188 | raise osv.except_osv(_('Warning'), msg) |
1189 | """elif wizard.period_id.number == 12: |
1190 | domain = [ |
1191 | @@ -89,7 +95,20 @@ |
1192 | wizard.instance_id and wizard.instance_id.code or '', |
1193 | period_name) |
1194 | |
1195 | - return {'type': 'ir.actions.report.xml', 'report_name': 'hq.ocb', 'datas': data} |
1196 | + background_id = self.pool.get('memory.background.report').create(cr, uid, { |
1197 | + 'file_name': data['target_filename'], |
1198 | + 'report_name': 'hq.ocb', |
1199 | + }, context=context) |
1200 | + context['background_id'] = background_id |
1201 | + context['background_time'] = 2 |
1202 | + |
1203 | + data['context'] = context |
1204 | + return { |
1205 | + 'type': 'ir.actions.report.xml', |
1206 | + 'report_name': 'hq.ocb', |
1207 | + 'datas': data, |
1208 | + 'context': context, |
1209 | + } |
1210 | |
1211 | ocb_export_wizard() |
1212 | # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
1213 | |
1214 | === modified file 'bin/osv/orm.py' |
1215 | --- bin/osv/orm.py 2016-11-17 08:46:41 +0000 |
1216 | +++ bin/osv/orm.py 2017-01-16 10:24:56 +0000 |
1217 | @@ -1450,11 +1450,15 @@ |
1218 | trans = translation_obj._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum')) |
1219 | if trans: |
1220 | node.set('sum', trans) |
1221 | - elif node.get('confirm'): |
1222 | + if node.get('confirm'): |
1223 | trans = translation_obj._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm')) |
1224 | if trans: |
1225 | node.set('confirm', trans) |
1226 | - elif node.get('string'): |
1227 | + if node.get('help'): |
1228 | + trans = translation_obj._get_source(cr, user, self._name, 'view', context['lang'], node.get('help')) |
1229 | + if trans: |
1230 | + node.set('help', trans) |
1231 | + if node.get('string'): |
1232 | trans = translation_obj._get_source(cr, user, self._name, 'view', context['lang'], node.get('string')) |
1233 | if trans == node.get('string') and ('base_model_name' in context): |
1234 | # If translation is same as source, perhaps we'd have more luck with the alternative model name |
1235 | @@ -1462,7 +1466,7 @@ |
1236 | trans = translation_obj._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string')) |
1237 | if trans: |
1238 | node.set('string', trans) |
1239 | - elif node.tag == 'translate': |
1240 | + if node.tag == 'translate': |
1241 | parent = node.getparent() |
1242 | source = node.text |
1243 | for child in node.getchildren(): |
1244 | |
1245 | === modified file 'bin/release.py' |
1246 | --- bin/release.py 2016-12-14 08:48:56 +0000 |
1247 | +++ bin/release.py 2017-01-16 10:24:56 +0000 |
1248 | @@ -20,7 +20,7 @@ |
1249 | ############################################################################## |
1250 | |
1251 | name = 'openerp-server' |
1252 | -version = 'UF3.0' |
1253 | +version = 'UF4.0dev' |
1254 | major_version = '6.0' |
1255 | description = 'OpenERP Server' |
1256 | long_desc = '''OpenERP is a complete ERP and CRM. The main features are accounting (analytic |
1257 | |
1258 | === modified file 'bin/service/web_services.py' |
1259 | --- bin/service/web_services.py 2016-11-15 15:32:01 +0000 |
1260 | +++ bin/service/web_services.py 2017-01-16 10:24:56 +0000 |
1261 | @@ -848,7 +848,7 @@ |
1262 | data.append(tmp_data) |
1263 | return data |
1264 | |
1265 | - def exp_export(self, db_name, uid, fields, domain, model, fields_name, |
1266 | + def exp_export(self, db_name, uid, fields, domain, model, fields_name, |
1267 | group_by=None, export_format='csv', ids=None, context=None): |
1268 | res = {'result': None} |
1269 | db, pool = pooler.get_db_and_pool(db_name) |
1270 | @@ -866,7 +866,7 @@ |
1271 | return res |
1272 | return background_id |
1273 | |
1274 | - def export(self, cr, pool, uid, fields, domain, model, fields_name, |
1275 | + def export(self, cr, pool, uid, fields, domain, model, fields_name, |
1276 | bg_id, group_by=None, export_format='csv', ids=None, res=None, |
1277 | context=None): |
1278 | |
1279 | |
1280 | === modified file 'bin/tools/translate.py' |
1281 | --- bin/tools/translate.py 2016-11-09 08:41:40 +0000 |
1282 | +++ bin/tools/translate.py 2017-01-16 10:24:56 +0000 |
1283 | @@ -559,6 +559,8 @@ |
1284 | res.append(de.get('sum').encode("utf8")) |
1285 | if de.get("confirm"): |
1286 | res.append(de.get('confirm').encode("utf8")) |
1287 | + if de.get("help"): |
1288 | + res.append(de.get('help').encode("utf8")) |
1289 | if de.tag == 'translate': |
1290 | text_to_translate = '' |
1291 | if de.text: |