Merge lp:~julie-w/unifield-server/US-6557 into lp:unifield-server

Proposed by jftempo
Status: Merged
Merged at revision: 5602
Proposed branch: lp:~julie-w/unifield-server/US-6557
Merge into: lp:unifield-server
Diff against target: 973 lines (+598/-186) (has conflicts)
8 files modified
bin/addons/account/account_invoice_view.xml (+42/-0)
bin/addons/account_override/account_invoice_view.xml (+14/-0)
bin/addons/account_override/period.py (+19/-0)
bin/addons/msf_profile/data/patches.xml (+21/-0)
bin/addons/msf_profile/i18n/fr_MF.po (+104/-0)
bin/addons/msf_profile/msf_profile.py (+298/-148)
bin/addons/vertical_integration/report/hq_report_ocb.py (+85/-38)
bin/addons/vertical_integration/vertical_integration_wizard.xml (+15/-0)
Text conflict in bin/addons/account/account_invoice_view.xml
Text conflict in bin/addons/account_override/account_invoice_view.xml
Text conflict in bin/addons/msf_profile/data/patches.xml
Text conflict in bin/addons/msf_profile/i18n/fr_MF.po
Text conflict in bin/addons/msf_profile/msf_profile.py
To merge this branch: bzr merge lp:~julie-w/unifield-server/US-6557
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+378504@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/addons/account/account_invoice_view.xml'
2--- bin/addons/account/account_invoice_view.xml 2020-01-21 14:01:11 +0000
3+++ bin/addons/account/account_invoice_view.xml 2020-02-04 10:20:45 +0000
4@@ -358,6 +358,7 @@
5 ('from_supply', '=', True),
6 ('partner_type', 'in', ('intermission', 'section'))]}"/>
7 <field name="payment_term" widget="selection"/>
8+<<<<<<< TREE
9 <field colspan="4" name="invoice_line" nolabel="1" context="{'fake': 1, 'from_inv_form': True}">
10 <tree string="Invoice Line"
11 colors="red:inactive_product == True or analytic_distribution_state in ('invalid', 'invalid_small_amount');blue:inactive_product == False and analytic_distribution_state in ('valid');black:inactive_product == False and analytic_distribution_state in ('none')"
12@@ -397,6 +398,47 @@
13 <field name="price_subtotal"/>
14 </tree>
15 </field>
16+=======
17+ <field colspan="4" name="invoice_line" nolabel="1" context="{'fake': 1, 'from_inv_form': True}">
18+ <tree string="Invoice Line"
19+ colors="red:inactive_product == True or analytic_distribution_state in ('invalid');blue:inactive_product == False and analytic_distribution_state in ('valid');black:inactive_product == False and analytic_distribution_state in ('none')"
20+ editable="both">
21+ <field name="invoice_type" invisible="1"/>
22+ <field name="line_synced" invisible="1"/>
23+ <field name="from_supply" invisible="1"/>
24+ <field name="partner_type" invisible="1"/>
25+ <field name="line_number" displayon="noteditable"/>
26+ <field name="is_corrected" invisible="1"/>
27+ <button name="button_open_analytic_lines" string="Have been corrected" type="object"
28+ icon="terp-mail-" attrs="{'invisible': [('is_corrected', '=', False)]}"/>
29+ <field name="name"/>
30+ <field name="inactive_product" invisible="1"/>
31+ <field name="account_id"
32+ domain="[('journal_id', '=', parent.journal_id), ('restricted_area', '=', 'invoice_lines')]"
33+ context="{'type': parent.type, 'journal_type': parent.journal_type}"
34+ />
35+ <button name="button_analytic_distribution" string="Analytical Distribution" type="object"
36+ icon="terp-stock_symbol-selection" context="context"
37+ attrs="{'invisible': [('is_allocatable', '=', False)]}"/>
38+ <field name="analytic_distribution_state_recap" readonly="1"/>
39+ <field name="analytic_distribution_state" invisible="1"/>
40+ <field name="is_allocatable" invisible="1"/>
41+ <field name="quantity"
42+ attrs="{'readonly': ['|',
43+ '&amp;', ('invoice_type', '=', 'in_invoice'), '|', ('line_synced', '=', True), ('from_supply', '=', True),
44+ '&amp;', '&amp;', ('invoice_type', '=', 'out_invoice'), ('from_supply', '=', True), ('partner_type', 'in', ('intermission', 'section'))]}"/>
45+ <field name="product_id" invisible="1"/>
46+ <field name="uos_id" string="UoM"
47+ on_change="uos_id_change(product_id, uos_id, quantity, name, parent.type, parent.partner_id, parent.fiscal_position, price_unit, parent.address_invoice_id, parent.currency_id, {'company_id': parent.company_id})"
48+ attrs="{'readonly': ['|',
49+ '&amp;', ('invoice_type', '=', 'in_invoice'), '|', ('line_synced', '=', True), ('from_supply', '=', True),
50+ '&amp;', '&amp;', ('invoice_type', '=', 'out_invoice'), ('from_supply', '=', True), ('partner_type', 'in', ('intermission', 'section'))]}"/>
51+ <field name="price_unit"
52+ attrs="{'readonly': [('invoice_type', '=', 'in_invoice'), ('line_synced', '=', True)]}"/>
53+ <field name="price_subtotal"/>
54+ </tree>
55+ </field>
56+>>>>>>> MERGE-SOURCE
57 <group col="1" colspan="2">
58 <field name="tax_line" nolabel="1">
59 <tree editable="bottom" string="Taxes">
60
61=== modified file 'bin/addons/account_corrections/wizard/analytic_distribution_wizard.py'
62=== modified file 'bin/addons/account_override/account_invoice_view.xml'
63--- bin/addons/account_override/account_invoice_view.xml 2020-01-21 14:01:11 +0000
64+++ bin/addons/account_override/account_invoice_view.xml 2020-02-04 10:20:45 +0000
65@@ -390,9 +390,14 @@
66 <label string="" colspan="2"/>
67 <field name="check_total" required="2"/>
68 <field colspan="4" name="invoice_line" nolabel="1" widget="one2many_list">
69+<<<<<<< TREE
70 <tree string="Donation Lines" editable="both"
71 colors="red:analytic_distribution_state in ('invalid', 'invalid_small_amount') or inactive_product == True;black:analytic_distribution_state in ('none','valid') and inactive_product == False">
72 <field name="line_number" readonly="1" displayon="noteditable"/>
73+=======
74+ <tree string="Donation Lines" editable="both">
75+ <field name="line_number" readonly="1" displayon="noteditable"/>
76+>>>>>>> MERGE-SOURCE
77 <field name="is_corrected" invisible="1"/>
78 <button name="button_open_analytic_lines" string="Have been corrected" type="object" icon="terp-mail-" attrs="{'invisible': [('is_corrected', '=', False)]}"/>
79 <field name="product_id"
80@@ -533,12 +538,21 @@
81 </group>
82 <field colspan="4" name="invoice_line" nolabel="1" widget="one2many_list" context="{'is_intermission': True, 'from_inv_form': True}">
83 <tree string="Intermission Voucher Lines"
84+<<<<<<< TREE
85 editable="both"
86 colors="red:analytic_distribution_state in ('invalid', 'invalid_small_amount') or inactive_product == True;black:inactive_product == False and analytic_distribution_state in ('none', 'valid')">
87 <field name="from_supply" invisible="1"/>
88 <field name="line_synced" invisible="1"/>
89 <field name="invoice_type" invisible="1"/>
90 <field name="line_number" displayon="noteditable"/>
91+=======
92+ editable="both"
93+ colors="red:analytic_distribution_state == 'invalid' or inactive_product == True;black:inactive_product == False and analytic_distribution_state in ('none', 'valid')">
94+ <field name="from_supply" invisible="1"/>
95+ <field name="line_synced" invisible="1"/>
96+ <field name="invoice_type" invisible="1"/>
97+ <field name="line_number" displayon="noteditable"/>
98+>>>>>>> MERGE-SOURCE
99 <field name="is_corrected" invisible="1"/>
100 <button name="button_open_analytic_lines" string="Have been corrected" type="object" icon="terp-mail-" attrs="{'invisible': [('is_corrected', '=', False)]}"/>
101 <field name="name"/>
102
103=== modified file 'bin/addons/account_override/period.py'
104--- bin/addons/account_override/period.py 2018-11-08 09:39:41 +0000
105+++ bin/addons/account_override/period.py 2020-02-04 10:20:45 +0000
106@@ -103,6 +103,22 @@
107 period_id = period_id and get_next_period_id(self, cr, uid, period_id, context=context)
108 return period_id or False
109
110+
111+def get_previous_period_id(self, cr, uid, period_id, context=None):
112+ """
113+ Returns the id of the previous regular period if it exists (no special period), else returns False.
114+ For the special periods, its returns the related regular period (Period 13 to 16 N ==> Dec. N).
115+ """
116+ if context is None:
117+ context = {}
118+ period = self.browse(cr, uid, period_id, fields_to_fetch=['date_start'], context=context)
119+ previous_period_ids = self.search(cr, uid, [('date_start', '<=', period.date_start),
120+ ('special', '=', False),
121+ ('id', '!=', period_id)],
122+ order='date_start DESC', limit=1, context=context)
123+ return previous_period_ids and previous_period_ids[0] or False
124+
125+
126 def _get_middle_years(self, cr, uid, fy1, fy2, context=None):
127 """
128 Returns the list of the FY ids included between both Fiscal Years in parameter.
129@@ -189,6 +205,9 @@
130 def get_next_period_id_at_index(self, cr, uid, period_id, index, context=None):
131 return get_next_period_id_at_index(self, cr, uid, period_id, index, context)
132
133+ def get_previous_period_id(self, cr, uid, period_id, context=None):
134+ return get_previous_period_id(self, cr, uid, period_id, context)
135+
136 def get_period_range(self, cr, uid, period_from_id, period_to_id, context=None):
137 return get_period_range(self, cr, uid, period_from_id, period_to_id, context=context)
138
139
140=== modified file 'bin/addons/analytic_distribution/analytic_distribution_wizard_view.xml'
141=== modified file 'bin/addons/analytic_distribution/wizard/analytic_distribution_wizard.py'
142=== modified file 'bin/addons/msf_profile/data/patches.xml'
143--- bin/addons/msf_profile/data/patches.xml 2020-01-28 10:17:39 +0000
144+++ bin/addons/msf_profile/data/patches.xml 2020-02-04 10:20:45 +0000
145@@ -477,6 +477,7 @@
146 <field name="method">us_6768_trigger_FP_sync</field>
147 </record>
148
149+<<<<<<< TREE
150 <!-- UF15.1 -->
151 <record id="us_6930_gen_unreconcile" model="patch.scripts">
152 <field name="method">us_6930_gen_unreconcile</field>
153@@ -496,5 +497,25 @@
154 <field name="method">us_6692_new_od_journals</field>
155 </record>
156
157+=======
158+ <!-- UF15.1 -->
159+ <record id="us_6930_gen_unreconcile" model="patch.scripts">
160+ <field name="method">us_6930_gen_unreconcile</field>
161+ </record>
162+
163+ <record id="us_6905_manage_bned_switch" model="patch.scripts">
164+ <field name="method">us_6905_manage_bned_switch</field>
165+ </record>
166+
167+ <record id="rec_entries_uf14_1_uf15" model="patch.scripts">
168+ <field name="method">rec_entries_uf14_1_uf15</field>
169+ </record>
170+
171+ <!-- UF16.0 -->
172+ <record id="us_6692_new_od_journals" model="patch.scripts">
173+ <field name="method">us_6692_new_od_journals</field>
174+ </record>
175+
176+>>>>>>> MERGE-SOURCE
177 </data>
178 </openerp>
179
180=== modified file 'bin/addons/msf_profile/i18n/fr_MF.po'
181--- bin/addons/msf_profile/i18n/fr_MF.po 2020-01-28 11:28:56 +0000
182+++ bin/addons/msf_profile/i18n/fr_MF.po 2020-02-04 10:20:45 +0000
183@@ -106598,6 +106598,7 @@
184 msgid "Funding Pool not found."
185 msgstr "Funding Pool non trouvé."
186
187+<<<<<<< TREE
188 #. modules: account_override, account, register_accounting, msf_instance, finance
189 #: selection:account.analytic.journal,type:0
190 #: selection:account.journal,type:0
191@@ -106732,3 +106733,106 @@
192 #: view:analytic.distribution.wizard:0
193 msgid "Make sure that there is only one Analytic Distribution line for the entries having a booking amount inferior or equal to 1."
194 msgstr "Assurez-vous de n'avoir qu'une seule ligne de Distribution Analytique pour les écritures ayant un montant d'enregistrement inférieur ou égal à 1."
195+=======
196+#. modules: account_override, account, register_accounting, msf_instance, finance
197+#: selection:account.analytic.journal,type:0
198+#: selection:account.journal,type:0
199+#: selection:account.analytic.line,journal_type:0
200+#: selection:account.move,journal_type:0
201+#: selection:account.move.line,journal_type:0
202+#: selection:cash.request.liquidity,type:0
203+#: selection:cash.request.liquidity.bank,type:0
204+#: selection:cash.request.liquidity.cash,type:0
205+#: selection:cash.request.liquidity.cheque,type:0
206+#: selection:account.analytic.journal.fake,type:0
207+#: selection:account.journal.fake,type:0
208+#: selection:wizard.register.opening.confirmation,register_type:0
209+msgid "Correction Auto"
210+msgstr "Correction Auto"
211+
212+#. modules: account_override, account, register_accounting, msf_instance, finance
213+#: selection:account.analytic.journal,type:0
214+#: selection:account.journal,type:0
215+#: selection:account.analytic.line,journal_type:0
216+#: selection:account.move,journal_type:0
217+#: selection:account.move.line,journal_type:0
218+#: selection:cash.request.liquidity,type:0
219+#: selection:cash.request.liquidity.bank,type:0
220+#: selection:cash.request.liquidity.cash,type:0
221+#: selection:cash.request.liquidity.cheque,type:0
222+#: selection:account.analytic.journal.fake,type:0
223+#: selection:account.journal.fake,type:0
224+#: selection:wizard.register.opening.confirmation,register_type:0
225+msgid "Correction Manual"
226+msgstr "Correction Manuelle"
227+
228+#. modules: account_override, account, register_accounting, msf_instance, finance
229+#: selection:account.analytic.journal,type:0
230+#: selection:account.journal,type:0
231+#: selection:account.analytic.line,journal_type:0
232+#: selection:account.move,journal_type:0
233+#: selection:account.move.line,journal_type:0
234+#: selection:cash.request.liquidity,type:0
235+#: selection:cash.request.liquidity.bank,type:0
236+#: selection:cash.request.liquidity.cash,type:0
237+#: selection:cash.request.liquidity.cheque,type:0
238+#: selection:account.analytic.journal.fake,type:0
239+#: selection:account.journal.fake,type:0
240+#: selection:wizard.register.opening.confirmation,register_type:0
241+msgid "Correction HQ"
242+msgstr "Correction HQ"
243+
244+#. module: account_journal
245+#: constraint:account.journal:0
246+msgid "The analytic journal selected must have the same type and prop. instance as this journal."
247+msgstr "Le journal analytique sélectionné doit avoir le même type et la même instance prop. que ce journal."
248+
249+#. module: account_journal
250+#: constraint:account.journal:0
251+msgid "A journal with this type already exists for this instance."
252+msgstr "Un journal de ce type existe déjà pour cette instance."
253+
254+#. module: account
255+#: constraint:account.analytic.journal:0
256+msgid "An analytic journal with this type already exists for this instance."
257+msgstr "Un journal analytique de ce type existe déjà pour cette instance."
258+
259+#. module: account_journal
260+#: constraint:account.journal:0
261+msgid "The prop. instance of the \"Correction HQ\" journal must be a coordination."
262+msgstr "L'instance prop. du journal de type \"Correction HQ\" doit être une coordination."
263+
264+#. module: account
265+#: constraint:account.analytic.journal:0
266+msgid "The prop. instance of the \"Correction HQ\" analytic journal must be a coordination."
267+msgstr "L'instance prop. du journal analytique de type \"Correction HQ\" doit être une coordination."
268+
269+#. module: account_override
270+#: code:addons/account_override/account.py:1198
271+#, python-format
272+msgid "The journal %s is forbidden in manual entries."
273+msgstr "Le journal %s est interdit dans les écritures manuelles."
274+
275+#. modules: account_hq_entries, account_corrections, analytic_distribution
276+#: code:addons/account_hq_entries/wizard/hq_entries_validation.py:269
277+#: code:addons/account_hq_entries/wizard/hq_entries_validation.py:568
278+#: code:addons/account_corrections/account_move_line.py:731
279+#: code:addons/analytic_distribution/analytic_line.py:354
280+#, python-format
281+msgid "No \"correction HQ\" journal found!"
282+msgstr "Aucun journal de type \"correction HQ\" n'a été trouvé !"
283+
284+#. modules: account_hq_entries, analytic_distribution
285+#: code:addons/account_hq_entries/wizard/hq_entries_validation.py:350
286+#: code:addons/account_hq_entries/wizard/hq_entries_validation.py:531
287+#: code:addons/analytic_distribution/analytic_line.py:359
288+#, python-format
289+msgid "No \"correction HQ\" analytic journal found!"
290+msgstr "Aucun journal analytique de type \"correction HQ\" n'a été trouvé !"
291+
292+#. module: vertical_integration
293+#: model:ir.actions.act_window,name:vertical_integration.action_hq_export_ocb_old
294+#: model:ir.ui.menu,name:vertical_integration.menu_action_hq_export_ocb_old
295+msgid "Export to HQ system (OCB) Access Interface"
296+msgstr "Interface d'accès de l'Export vers le système HQ (OCB)"
297+>>>>>>> MERGE-SOURCE
298
299=== modified file 'bin/addons/msf_profile/msf_profile.py'
300--- bin/addons/msf_profile/msf_profile.py 2020-01-28 10:17:39 +0000
301+++ bin/addons/msf_profile/msf_profile.py 2020-02-04 10:20:45 +0000
302@@ -52,154 +52,304 @@
303 'model': lambda *a: 'patch.scripts',
304 }
305
306- # UF16.0
307- def us_6692_new_od_journals(self, cr, uid, *a, **b):
308- """
309- 1. Change the type of the existing correction journals (except OD) to "Correction Manual" so they remain usable
310-
311- 2. Create:
312- - ODM journals in all existing instances
313- - ODHQ journals in existing coordo instances
314-
315- Notes:
316- - creations are done in Python as the objects created must sync normally
317- - none of these journals already exists in prod. DB.
318- """
319- user_obj = self.pool.get('res.users')
320- analytic_journal_obj = self.pool.get('account.analytic.journal')
321- journal_obj = self.pool.get('account.journal')
322- current_instance = user_obj.browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
323- if current_instance: # existing instances only
324- # existing correction journals
325- cr.execute("""
326- UPDATE account_analytic_journal
327- SET type = 'correction_manual'
328- WHERE type = 'correction'
329- AND code != 'OD';
330- """)
331- self._logger.warn('%s correction analytic journal(s) updated.' % (cr.rowcount,))
332- cr.execute("""
333- UPDATE account_journal
334- SET type = 'correction_manual'
335- WHERE type = 'correction'
336- AND code != 'OD';
337- """)
338- self._logger.warn('%s correction journal(s) updated.' % (cr.rowcount,))
339- # ODM analytic journal
340- odm_analytic_vals = {
341- # Prop. Instance: by default the current one is used
342- 'code': 'ODM',
343- 'name': 'Correction manual',
344- 'type': 'correction_manual',
345- }
346- odm_analytic_journal_id = analytic_journal_obj.create(cr, uid, odm_analytic_vals)
347- # ODM G/L journal
348- odm_vals = {
349- # Prop. Instance: by default the current one is used
350- 'code': 'ODM',
351- 'name': 'Correction manual',
352- 'type': 'correction_manual',
353- 'analytic_journal_id': odm_analytic_journal_id,
354- }
355- journal_obj.create(cr, uid, odm_vals)
356- if current_instance.level == 'coordo':
357- # ODHQ analytic journal
358- odhq_analytic_vals = {
359- # Prop. Instance: by default the current one is used
360- 'code': 'ODHQ',
361- 'name': 'Correction automatic HQ',
362- 'type': 'correction_hq',
363- }
364- odhq_analytic_journal_id = analytic_journal_obj.create(cr, uid, odhq_analytic_vals)
365- # ODHQ G/L journal
366- odhq_vals = {
367- # Prop. Instance: by default the current one is used
368- 'code': 'ODHQ',
369- 'name': 'Correction automatic HQ',
370- 'type': 'correction_hq',
371- 'analytic_journal_id': odhq_analytic_journal_id,
372- }
373- journal_obj.create(cr, uid, odhq_vals)
374- return True
375-
376- # UF15.2
377- def rec_entries_uf14_1_uf15(self, cr, uid, *a, **b):
378- current_instance = self.pool.get('res.users').browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
379- if current_instance:
380- trigger_obj = self.pool.get('sync.trigger.something.target')
381- cr.execute('''
382- select sdref, values, source from sync_client_update_received where model='account.move.reconcile' and execution_date > ( select applied from sync_client_version where name='UF15.0') and fields not like '%action_date%'
383- ''')
384-
385- for update in cr.fetchall():
386- rec_number = False
387- try:
388- rec_number = eval(update[1])
389- except:
390- self._logger.warn('Unable to parse values, sdref: %s' % update[0])
391-
392- if rec_number:
393- trigger_obj.create(cr, uid, {'name': 'trigger_rec', 'destination': update[2] , 'args': rec_number[0], 'local': True})
394-
395- return True
396-
397- # UF15.1
398- def us_6930_gen_unreconcile(self, cr, uid, *a, **b):
399- # generate updates to delete reconcile done after UF15.0
400- current_instance = self.pool.get('res.users').browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
401- if current_instance:
402- unrec_obj = self.pool.get('account.move.unreconcile')
403- cr.execute('''
404- select d.name from ir_model_data d
405- left join
406- account_move_reconcile rec on d.model='account.move.reconcile' and d.res_id = rec.id
407- where d.model='account.move.reconcile' and rec.id is null and touched like '%action_date%'
408- ''')
409- for sdref_rec in cr.fetchall():
410- unrec_obj.create(cr, uid, {'reconcile_sdref': sdref_rec[0]})
411- return True
412-
413- def us_6905_manage_bned_switch(self, cr, uid, *a, **b):
414- fake_ed = '2999-12-31'
415- fake_bn = 'TO-BE-REPLACED'
416-
417- lot_obj = self.pool.get('stock.production.lot')
418-
419- # old move with BN or ED if product is no_bn no_ed
420- # set no on bn or en moves
421- cr.execute('''
422- update stock_move set prodlot_id=NULL, expired_date=NULL, hidden_batch_management_mandatory='f', hidden_perishable_mandatory='f', old_lot_info=(select name||'#'||life_date from stock_production_lot where id=stock_move.prodlot_id)||E'\n'||COALESCE(old_lot_info, '') where id in
423- (select m.id from stock_move m, product_product p where p.id = m.product_id and p.perishable='f' and p.batch_management='f' and m.prodlot_id is not null and m.state in ('done', 'cancel'))
424- ''')
425- self._logger.warn('%d done/cancel moves set from ED or BN to no' % (cr.rowcount, ))
426-
427-
428- # set bn on no moves
429- cr.execute('''select distinct(product_id) from stock_move m, product_product p where p.id = m.product_id and p.perishable='t' and p.batch_management='t' and m.prodlot_id is null and m.state = 'done' and m.product_qty!=0 and m.location_dest_id != m.location_id''')
430- self._logger.warn('%d done/cancel moves set from NO to BN' % (cr.rowcount, ))
431- for prod_id in cr.fetchall():
432- batch_id = lot_obj._get_or_create_lot(cr, uid, name=fake_bn, expiry_date=fake_ed, product_id=prod_id)
433- cr.execute("update stock_move set hidden_batch_management_mandatory='t', hidden_perishable_mandatory='f', prodlot_id=%s, expired_date=%s, old_lot_info='US-6905 BN set'||E'\n'||COALESCE(old_lot_info, '') where product_id=%s and prodlot_id is null and state = 'done' and product_qty!=0 and location_dest_id != location_id", (batch_id, fake_ed, prod_id))
434-
435- # set ed on no moves
436- cr.execute('''select distinct(product_id) from stock_move m, product_product p where p.id = m.product_id and p.perishable='t' and p.batch_management='f' and m.prodlot_id is null and m.state = 'done' and m.product_qty!=0 and m.location_dest_id != m.location_id''')
437- self._logger.warn('%d done/cancel moves set from NO to ED' % (cr.rowcount, ))
438- for prod_id in cr.fetchall():
439- batch_id = lot_obj._get_or_create_lot(cr, uid, name=False, expiry_date=fake_ed, product_id=prod_id)
440- cr.execute("update stock_move set hidden_batch_management_mandatory='f', hidden_perishable_mandatory='t', prodlot_id=%s, expired_date=%s, old_lot_info='US-6905 EN set'||E'\n'||COALESCE(old_lot_info, '') where product_id=%s and prodlot_id is null and state = 'done' and product_qty!=0 and location_dest_id != location_id", (batch_id, fake_ed, prod_id))
441-
442- # set ed on bn moves
443- cr.execute("update stock_production_lot set name='MSFBN/'||name, type='internal' where id in (select lot.id from stock_production_lot lot, product_product p where p.id = lot.product_id and type='standard' and p.perishable='t' and p.batch_management='f') returning name")
444- for lot in cr.fetchall():
445- self._logger.warn('BN %s from standard to internal' % (lot[0], ))
446-
447- # set bn on ed moves
448- cr.execute("update stock_production_lot set type='standard', name='S'||name where id in (select lot.id from stock_production_lot lot, product_product p where p.id = lot.product_id and type='internal' and p.perishable='t' and p.batch_management='t') returning name")
449- for lot in cr.fetchall():
450- self._logger.warn('BN %s from internal to standard' % (lot[0], ))
451-
452- return True
453-
454+<<<<<<< TREE
455+ # UF16.0
456+ def us_6692_new_od_journals(self, cr, uid, *a, **b):
457+ """
458+ 1. Change the type of the existing correction journals (except OD) to "Correction Manual" so they remain usable
459+
460+ 2. Create:
461+ - ODM journals in all existing instances
462+ - ODHQ journals in existing coordo instances
463+
464+ Notes:
465+ - creations are done in Python as the objects created must sync normally
466+ - none of these journals already exists in prod. DB.
467+ """
468+ user_obj = self.pool.get('res.users')
469+ analytic_journal_obj = self.pool.get('account.analytic.journal')
470+ journal_obj = self.pool.get('account.journal')
471+ current_instance = user_obj.browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
472+ if current_instance: # existing instances only
473+ # existing correction journals
474+ cr.execute("""
475+ UPDATE account_analytic_journal
476+ SET type = 'correction_manual'
477+ WHERE type = 'correction'
478+ AND code != 'OD';
479+ """)
480+ self._logger.warn('%s correction analytic journal(s) updated.' % (cr.rowcount,))
481+ cr.execute("""
482+ UPDATE account_journal
483+ SET type = 'correction_manual'
484+ WHERE type = 'correction'
485+ AND code != 'OD';
486+ """)
487+ self._logger.warn('%s correction journal(s) updated.' % (cr.rowcount,))
488+ # ODM analytic journal
489+ odm_analytic_vals = {
490+ # Prop. Instance: by default the current one is used
491+ 'code': 'ODM',
492+ 'name': 'Correction manual',
493+ 'type': 'correction_manual',
494+ }
495+ odm_analytic_journal_id = analytic_journal_obj.create(cr, uid, odm_analytic_vals)
496+ # ODM G/L journal
497+ odm_vals = {
498+ # Prop. Instance: by default the current one is used
499+ 'code': 'ODM',
500+ 'name': 'Correction manual',
501+ 'type': 'correction_manual',
502+ 'analytic_journal_id': odm_analytic_journal_id,
503+ }
504+ journal_obj.create(cr, uid, odm_vals)
505+ if current_instance.level == 'coordo':
506+ # ODHQ analytic journal
507+ odhq_analytic_vals = {
508+ # Prop. Instance: by default the current one is used
509+ 'code': 'ODHQ',
510+ 'name': 'Correction automatic HQ',
511+ 'type': 'correction_hq',
512+ }
513+ odhq_analytic_journal_id = analytic_journal_obj.create(cr, uid, odhq_analytic_vals)
514+ # ODHQ G/L journal
515+ odhq_vals = {
516+ # Prop. Instance: by default the current one is used
517+ 'code': 'ODHQ',
518+ 'name': 'Correction automatic HQ',
519+ 'type': 'correction_hq',
520+ 'analytic_journal_id': odhq_analytic_journal_id,
521+ }
522+ journal_obj.create(cr, uid, odhq_vals)
523+ return True
524+
525+ # UF15.2
526+ def rec_entries_uf14_1_uf15(self, cr, uid, *a, **b):
527+ current_instance = self.pool.get('res.users').browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
528+ if current_instance:
529+ trigger_obj = self.pool.get('sync.trigger.something.target')
530+ cr.execute('''
531+ select sdref, values, source from sync_client_update_received where model='account.move.reconcile' and execution_date > ( select applied from sync_client_version where name='UF15.0') and fields not like '%action_date%'
532+ ''')
533+
534+ for update in cr.fetchall():
535+ rec_number = False
536+ try:
537+ rec_number = eval(update[1])
538+ except:
539+ self._logger.warn('Unable to parse values, sdref: %s' % update[0])
540+
541+ if rec_number:
542+ trigger_obj.create(cr, uid, {'name': 'trigger_rec', 'destination': update[2] , 'args': rec_number[0], 'local': True})
543+
544+ return True
545+
546+ # UF15.1
547+ def us_6930_gen_unreconcile(self, cr, uid, *a, **b):
548+ # generate updates to delete reconcile done after UF15.0
549+ current_instance = self.pool.get('res.users').browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
550+ if current_instance:
551+ unrec_obj = self.pool.get('account.move.unreconcile')
552+ cr.execute('''
553+ select d.name from ir_model_data d
554+ left join
555+ account_move_reconcile rec on d.model='account.move.reconcile' and d.res_id = rec.id
556+ where d.model='account.move.reconcile' and rec.id is null and touched like '%action_date%'
557+ ''')
558+ for sdref_rec in cr.fetchall():
559+ unrec_obj.create(cr, uid, {'reconcile_sdref': sdref_rec[0]})
560+ return True
561+
562+ def us_6905_manage_bned_switch(self, cr, uid, *a, **b):
563+ fake_ed = '2999-12-31'
564+ fake_bn = 'TO-BE-REPLACED'
565+
566+ lot_obj = self.pool.get('stock.production.lot')
567+
568+ # old move with BN or ED if product is no_bn no_ed
569+ # set no on bn or en moves
570+ cr.execute('''
571+ update stock_move set prodlot_id=NULL, expired_date=NULL, hidden_batch_management_mandatory='f', hidden_perishable_mandatory='f', old_lot_info=(select name||'#'||life_date from stock_production_lot where id=stock_move.prodlot_id)||E'\n'||COALESCE(old_lot_info, '') where id in
572+ (select m.id from stock_move m, product_product p where p.id = m.product_id and p.perishable='f' and p.batch_management='f' and m.prodlot_id is not null and m.state in ('done', 'cancel'))
573+ ''')
574+ self._logger.warn('%d done/cancel moves set from ED or BN to no' % (cr.rowcount, ))
575+
576+
577+ # set bn on no moves
578+ cr.execute('''select distinct(product_id) from stock_move m, product_product p where p.id = m.product_id and p.perishable='t' and p.batch_management='t' and m.prodlot_id is null and m.state = 'done' and m.product_qty!=0 and m.location_dest_id != m.location_id''')
579+ self._logger.warn('%d done/cancel moves set from NO to BN' % (cr.rowcount, ))
580+ for prod_id in cr.fetchall():
581+ batch_id = lot_obj._get_or_create_lot(cr, uid, name=fake_bn, expiry_date=fake_ed, product_id=prod_id)
582+ cr.execute("update stock_move set hidden_batch_management_mandatory='t', hidden_perishable_mandatory='f', prodlot_id=%s, expired_date=%s, old_lot_info='US-6905 BN set'||E'\n'||COALESCE(old_lot_info, '') where product_id=%s and prodlot_id is null and state = 'done' and product_qty!=0 and location_dest_id != location_id", (batch_id, fake_ed, prod_id))
583+
584+ # set ed on no moves
585+ cr.execute('''select distinct(product_id) from stock_move m, product_product p where p.id = m.product_id and p.perishable='t' and p.batch_management='f' and m.prodlot_id is null and m.state = 'done' and m.product_qty!=0 and m.location_dest_id != m.location_id''')
586+ self._logger.warn('%d done/cancel moves set from NO to ED' % (cr.rowcount, ))
587+ for prod_id in cr.fetchall():
588+ batch_id = lot_obj._get_or_create_lot(cr, uid, name=False, expiry_date=fake_ed, product_id=prod_id)
589+ cr.execute("update stock_move set hidden_batch_management_mandatory='f', hidden_perishable_mandatory='t', prodlot_id=%s, expired_date=%s, old_lot_info='US-6905 EN set'||E'\n'||COALESCE(old_lot_info, '') where product_id=%s and prodlot_id is null and state = 'done' and product_qty!=0 and location_dest_id != location_id", (batch_id, fake_ed, prod_id))
590+
591+ # set ed on bn moves
592+ cr.execute("update stock_production_lot set name='MSFBN/'||name, type='internal' where id in (select lot.id from stock_production_lot lot, product_product p where p.id = lot.product_id and type='standard' and p.perishable='t' and p.batch_management='f') returning name")
593+ for lot in cr.fetchall():
594+ self._logger.warn('BN %s from standard to internal' % (lot[0], ))
595+
596+ # set bn on ed moves
597+ cr.execute("update stock_production_lot set type='standard', name='S'||name where id in (select lot.id from stock_production_lot lot, product_product p where p.id = lot.product_id and type='internal' and p.perishable='t' and p.batch_management='t') returning name")
598+ for lot in cr.fetchall():
599+ self._logger.warn('BN %s from internal to standard' % (lot[0], ))
600+
601+ return True
602+
603+=======
604+ # UF16.0
605+ def us_6692_new_od_journals(self, cr, uid, *a, **b):
606+ """
607+ 1. Change the type of the existing correction journals (except OD) to "Correction Manual" so they remain usable
608+
609+ 2. Create:
610+ - ODM journals in all existing instances
611+ - ODHQ journals in existing coordo instances
612+
613+ Notes:
614+ - creations are done in Python as the objects created must sync normally
615+ - none of these journals already exists in prod. DB.
616+ """
617+ user_obj = self.pool.get('res.users')
618+ analytic_journal_obj = self.pool.get('account.analytic.journal')
619+ journal_obj = self.pool.get('account.journal')
620+ current_instance = user_obj.browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
621+ if current_instance: # existing instances only
622+ # existing correction journals
623+ cr.execute("""
624+ UPDATE account_analytic_journal
625+ SET type = 'correction_manual'
626+ WHERE type = 'correction'
627+ AND code != 'OD';
628+ """)
629+ self._logger.warn('%s correction analytic journal(s) updated.' % (cr.rowcount,))
630+ cr.execute("""
631+ UPDATE account_journal
632+ SET type = 'correction_manual'
633+ WHERE type = 'correction'
634+ AND code != 'OD';
635+ """)
636+ self._logger.warn('%s correction journal(s) updated.' % (cr.rowcount,))
637+ # ODM analytic journal
638+ odm_analytic_vals = {
639+ # Prop. Instance: by default the current one is used
640+ 'code': 'ODM',
641+ 'name': 'Correction manual',
642+ 'type': 'correction_manual',
643+ }
644+ odm_analytic_journal_id = analytic_journal_obj.create(cr, uid, odm_analytic_vals)
645+ # ODM G/L journal
646+ odm_vals = {
647+ # Prop. Instance: by default the current one is used
648+ 'code': 'ODM',
649+ 'name': 'Correction manual',
650+ 'type': 'correction_manual',
651+ 'analytic_journal_id': odm_analytic_journal_id,
652+ }
653+ journal_obj.create(cr, uid, odm_vals)
654+ if current_instance.level == 'coordo':
655+ # ODHQ analytic journal
656+ odhq_analytic_vals = {
657+ # Prop. Instance: by default the current one is used
658+ 'code': 'ODHQ',
659+ 'name': 'Correction automatic HQ',
660+ 'type': 'correction_hq',
661+ }
662+ odhq_analytic_journal_id = analytic_journal_obj.create(cr, uid, odhq_analytic_vals)
663+ # ODHQ G/L journal
664+ odhq_vals = {
665+ # Prop. Instance: by default the current one is used
666+ 'code': 'ODHQ',
667+ 'name': 'Correction automatic HQ',
668+ 'type': 'correction_hq',
669+ 'analytic_journal_id': odhq_analytic_journal_id,
670+ }
671+ journal_obj.create(cr, uid, odhq_vals)
672+ return True
673+
674+ # UF15.2
675+ def rec_entries_uf14_1_uf15(self, cr, uid, *a, **b):
676+ current_instance = self.pool.get('res.users').browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
677+ if current_instance:
678+ trigger_obj = self.pool.get('sync.trigger.something.target')
679+ cr.execute('''
680+ select sdref, values, source from sync_client_update_received where model='account.move.reconcile' and execution_date > ( select applied from sync_client_version where name='UF15.0') and fields not like '%action_date%'
681+ ''')
682+
683+ for update in cr.fetchall():
684+ rec_number = False
685+ try:
686+ rec_number = eval(update[1])
687+ except:
688+ self._logger.warn('Unable to parse values, sdref: %s' % update[0])
689+
690+ if rec_number:
691+ trigger_obj.create(cr, uid, {'name': 'trigger_rec', 'destination': update[2] , 'args': rec_number[0], 'local': True})
692+
693+ return True
694+
695+ # UF15.1
696+ def us_6930_gen_unreconcile(self, cr, uid, *a, **b):
697+ # generate updates to delete reconcile done after UF15.0
698+ current_instance = self.pool.get('res.users').browse(cr, uid, uid, fields_to_fetch=['company_id']).company_id.instance_id
699+ if current_instance:
700+ unrec_obj = self.pool.get('account.move.unreconcile')
701+ cr.execute('''
702+ select d.name from ir_model_data d
703+ left join
704+ account_move_reconcile rec on d.model='account.move.reconcile' and d.res_id = rec.id
705+ where d.model='account.move.reconcile' and rec.id is null and touched like '%action_date%'
706+ ''')
707+ for sdref_rec in cr.fetchall():
708+ unrec_obj.create(cr, uid, {'reconcile_sdref': sdref_rec[0]})
709+ return True
710+
711+ def us_6905_manage_bned_switch(self, cr, uid, *a, **b):
712+ fake_ed = '2999-12-31'
713+ fake_bn = 'TO-BE-REPLACED'
714+
715+ lot_obj = self.pool.get('stock.production.lot')
716+
717+ # old move with BN or ED if product is no_bn no_ed
718+ # set no on bn or en moves
719+ cr.execute('''
720+ update stock_move set prodlot_id=NULL, expired_date=NULL, hidden_batch_management_mandatory='f', hidden_perishable_mandatory='f', old_lot_info=(select name||'#'||life_date from stock_production_lot where id=stock_move.prodlot_id)||E'\n'||COALESCE(old_lot_info, '') where id in
721+ (select m.id from stock_move m, product_product p where p.id = m.product_id and p.perishable='f' and p.batch_management='f' and m.prodlot_id is not null and m.state in ('done', 'cancel'))
722+ ''')
723+ self._logger.warn('%d done/cancel moves set from ED or BN to no' % (cr.rowcount, ))
724+
725+
726+ # set bn on no moves
727+ cr.execute('''select distinct(product_id) from stock_move m, product_product p where p.id = m.product_id and p.perishable='t' and p.batch_management='t' and m.prodlot_id is null and m.state = 'done' and m.product_qty!=0 and m.location_dest_id != m.location_id''')
728+ self._logger.warn('%d done/cancel moves set from NO to BN' % (cr.rowcount, ))
729+ for prod_id in cr.fetchall():
730+ batch_id = lot_obj._get_or_create_lot(cr, uid, name=fake_bn, expiry_date=fake_ed, product_id=prod_id)
731+ cr.execute("update stock_move set hidden_batch_management_mandatory='t', hidden_perishable_mandatory='f', prodlot_id=%s, expired_date=%s, old_lot_info='US-6905 BN set'||E'\n'||COALESCE(old_lot_info, '') where product_id=%s and prodlot_id is null and state = 'done' and product_qty!=0 and location_dest_id != location_id", (batch_id, fake_ed, prod_id))
732+
733+ # set ed on no moves
734+ cr.execute('''select distinct(product_id) from stock_move m, product_product p where p.id = m.product_id and p.perishable='t' and p.batch_management='f' and m.prodlot_id is null and m.state = 'done' and m.product_qty!=0 and m.location_dest_id != m.location_id''')
735+ self._logger.warn('%d done/cancel moves set from NO to ED' % (cr.rowcount, ))
736+ for prod_id in cr.fetchall():
737+ batch_id = lot_obj._get_or_create_lot(cr, uid, name=False, expiry_date=fake_ed, product_id=prod_id)
738+ cr.execute("update stock_move set hidden_batch_management_mandatory='f', hidden_perishable_mandatory='t', prodlot_id=%s, expired_date=%s, old_lot_info='US-6905 EN set'||E'\n'||COALESCE(old_lot_info, '') where product_id=%s and prodlot_id is null and state = 'done' and product_qty!=0 and location_dest_id != location_id", (batch_id, fake_ed, prod_id))
739+
740+ # set ed on bn moves
741+ cr.execute("update stock_production_lot set name='MSFBN/'||name, type='internal' where id in (select lot.id from stock_production_lot lot, product_product p where p.id = lot.product_id and type='standard' and p.perishable='t' and p.batch_management='f') returning name")
742+ for lot in cr.fetchall():
743+ self._logger.warn('BN %s from standard to internal' % (lot[0], ))
744+
745+ # set bn on ed moves
746+ cr.execute("update stock_production_lot set type='standard', name='S'||name where id in (select lot.id from stock_production_lot lot, product_product p where p.id = lot.product_id and type='internal' and p.perishable='t' and p.batch_management='t') returning name")
747+ for lot in cr.fetchall():
748+ self._logger.warn('BN %s from internal to standard' % (lot[0], ))
749+ return True
750+
751+>>>>>>> MERGE-SOURCE
752 # UF15.0
753 def us_6768_trigger_FP_sync(self, cr, uid, *a, **b):
754 """
755
756=== modified file 'bin/addons/stock/physical_inventory.py'
757=== modified file 'bin/addons/stock/wizard/physical_inventory_generate_counting_sheet.py'
758=== modified file 'bin/addons/vertical_integration/report/hq_report_ocb.py'
759--- bin/addons/vertical_integration/report/hq_report_ocb.py 2019-12-13 15:12:51 +0000
760+++ bin/addons/vertical_integration/report/hq_report_ocb.py 2020-02-04 10:20:45 +0000
761@@ -397,6 +397,8 @@
762 context = {}
763 # Prepare some values
764 pool = pooler.get_pool(cr.dbname)
765+ mi_obj = pool.get('msf.instance')
766+ period_obj = pool.get('account.period')
767 excluded_journal_types = ['hq', 'migration'] # journal types that should not be used to take lines
768 # Fetch data from wizard
769 if not data.get('form', False):
770@@ -408,10 +410,12 @@
771 instance_id = form.get('instance_id', False)
772 if not fy_id or not period_id or not instance_ids or not instance_id:
773 raise osv.except_osv(_('Warning'), _('Some info are missing. Either fiscalyear or period or instance.'))
774+ instance_lvl = mi_obj.browse(cr, uid, instance_id, fields_to_fetch=['level'], context=context).level
775 fy = pool.get('account.fiscalyear').browse(cr, uid, fy_id)
776 last_fy_year = strptime(fy.date_start, '%Y-%m-%d').tm_year - 1 # Take previous year regarding given fiscalyear
777 first_day_of_last_fy = '%s-01-01' % (last_fy_year)
778- period = pool.get('account.period').browse(cr, uid, period_id)
779+ period = period_obj.browse(cr, uid, period_id, fields_to_fetch=['date_stop', 'date_start', 'number'])
780+ previous_period_id = period_obj.get_previous_period_id(cr, uid, period_id, context=context)
781 last_day_of_period = period.date_stop
782 first_day_of_period = period.date_start
783 selection = form.get('selection', False)
784@@ -436,7 +440,6 @@
785 plresult_ji_in_ids = []
786 if period.number == 12:
787 ayec_obj = pool.get("account.year.end.closing")
788- mi_obj = pool.get('msf.instance')
789 m_obj = pool.get('account.move')
790 ml_obj = pool.get('account.move.line')
791
792@@ -466,13 +469,33 @@
793 # SQLREQUESTS DICTIONNARY
794 # - key: name of the SQL request
795 # - value: the SQL request to use
796- sqlrequests = {
797- 'partner': """
798- SELECT id, name, ref, partner_type, CASE WHEN active='t' THEN 'True' WHEN active='f' THEN 'False' END AS active
799+ partner_sql = """
800+ SELECT id, name, ref, partner_type, CASE WHEN active='t' THEN 'True' WHEN active='f' THEN 'False' END AS active%s
801 FROM res_partner
802 WHERE partner_type != 'internal'
803 and name != 'To be defined';
804- """,
805+ """ % (not context.get("old_vi") and ", comment" or "")
806+ if not previous_period_id or instance_lvl == 'section':
807+ # empty report in case there is no previous period or an HQ instance is selected
808+ balance_previous_month_sql = "SELECT '' AS no_line;"
809+ else:
810+ # note: even balances with zero amount are displayed in the report
811+ balance_previous_month_sql = """
812+ SELECT acc.code, curr.name, SUM(COALESCE(aml.debit_currency,0) - COALESCE(aml.credit_currency,0))
813+ FROM account_move_line aml
814+ INNER JOIN account_journal j ON aml.journal_id = j.id
815+ INNER JOIN account_account acc ON aml.account_id = acc.id
816+ INNER JOIN res_currency curr ON aml.currency_id = curr.id
817+ INNER JOIN account_move m ON aml.move_id = m.id
818+ WHERE aml.period_id = %s
819+ AND j.type NOT IN %s
820+ AND aml.instance_id IN %s
821+ AND m.state = 'posted'
822+ GROUP BY acc.code, curr.name
823+ ORDER BY acc.code, curr.name;
824+ """
825+ sqlrequests = {
826+ 'partner': partner_sql,
827 'employee': """
828 SELECT r.name, e.identification_id, r.active, e.employee_type
829 FROM hr_employee AS e, resource_resource AS r
830@@ -634,6 +657,7 @@
831 AND m.state = 'posted'
832 ORDER BY aml.id;
833 """,
834+ 'balance_previous_month': balance_previous_month_sql,
835 }
836 if plresult_ji_in_ids:
837 # NOTE: for these entries: booking and fonctional ccy are same
838@@ -676,9 +700,12 @@
839 # + If you cannot do a SQL request to create the content of the file, do a simple request (with key) and add a postprocess function that returns the result you want
840 instance_name = 'OCB' # since US-949
841 reg_types = ('cash', 'bank', 'cheque')
842+ partner_header = ['XML_ID', 'Name', 'Reference', 'Partner type', 'Active/inactive']
843+ if not context.get('old_vi'):
844+ partner_header.append('Notes')
845 processrequests = [
846 {
847- 'headers': ['XML_ID', 'Name', 'Reference', 'Partner type', 'Active/inactive'],
848+ 'headers': partner_header,
849 'filename': instance_name + '_' + year + month + '_Partners.csv',
850 'key': 'partner',
851 'function': 'postprocess_partners',
852@@ -699,37 +726,6 @@
853 'fnct_params': ([('account.journal', 'type', 3)], context),
854 },
855 {
856- 'headers': ['Name', 'Code', 'Type', 'Status'],
857- 'filename': instance_name + '_' + year + month + '_Cost Centres.csv',
858- 'key': 'costcenter',
859- 'query_params': (last_day_of_period, last_day_of_period, tuple(instance_ids),last_day_of_period, last_day_of_period, tuple(instance_ids)),
860- 'function': 'postprocess_selection_columns',
861- 'fnct_params': [('account.analytic.account', 'type', 2)],
862- },
863- {
864- 'headers': ['CCY name', 'CCY code', 'Rate', 'Month'],
865- 'filename': instance_name + '_' + year + month + '_FX rates.csv',
866- 'key': 'fxrate',
867- 'query_params': (first_day_of_last_fy, last_day_of_period),
868- },
869- {
870- 'headers': ['Instance', 'Code', 'Name', 'Period', 'Starting balance', 'Calculated balance', 'Closing balance', 'Currency'],
871- 'filename': instance_name + '_' + year + month + '_Liquidity Balances.csv',
872- 'key': 'liquidity',
873- 'query_params': (tuple([period_yyyymm]), reg_types, first_day_of_period, reg_types, first_day_of_period,
874- last_day_of_period, reg_types, last_day_of_period, tuple(instance_ids)),
875- 'function': 'postprocess_liquidity_balances',
876- 'fnct_params': context,
877- },
878- {
879- 'headers': ['Name', 'Code', 'Donor code', 'Grant amount', 'Reporting CCY', 'State'],
880- 'filename': instance_name + '_' + year + month + '_Financing contracts.csv',
881- 'key': 'contract',
882- 'query_params': (tuple(instance_ids),),
883- 'function': 'postprocess_selection_columns',
884- 'fnct_params': [('financing.contract.contract', 'state', 5)],
885- },
886- {
887 'headers': ['DB ID', 'Instance', 'Journal', 'Entry sequence', 'Description', 'Reference', 'Document date', 'Posting date', 'G/L Account', 'Third party', 'Destination', 'Cost centre', 'Funding pool', 'Booking debit', 'Booking credit', 'Booking currency', 'Functional debit', 'Functional credit', 'Functional CCY', 'Emplid', 'Partner DB ID'],
888 'filename': instance_name + '_' + year + month + '_Monthly Export.csv',
889 'key': 'rawdata',
890@@ -758,6 +754,57 @@
891 'object': 'account.move.line',
892 },
893 ]
894+ if context.get('old_vi'):
895+ processrequests.extend([
896+ {
897+ 'headers': ['Name', 'Code', 'Type', 'Status'],
898+ 'filename': instance_name + '_' + year + month + '_Cost Centres.csv',
899+ 'key': 'costcenter',
900+ 'query_params': (
901+ last_day_of_period, last_day_of_period, tuple(instance_ids), last_day_of_period,
902+ last_day_of_period,
903+ tuple(instance_ids)),
904+ 'function': 'postprocess_selection_columns',
905+ 'fnct_params': [('account.analytic.account', 'type', 2)],
906+ },
907+ {
908+ 'headers': ['Name', 'Code', 'Donor code', 'Grant amount', 'Reporting CCY', 'State'],
909+ 'filename': instance_name + '_' + year + month + '_Financing contracts.csv',
910+ 'key': 'contract',
911+ 'query_params': (tuple(instance_ids),),
912+ 'function': 'postprocess_selection_columns',
913+ 'fnct_params': [('financing.contract.contract', 'state', 5)],
914+ },
915+ {
916+ 'headers': ['CCY name', 'CCY code', 'Rate', 'Month'],
917+ 'filename': instance_name + '_' + year + month + '_FX rates.csv',
918+ 'key': 'fxrate',
919+ 'query_params': (first_day_of_last_fy, last_day_of_period),
920+ },
921+ {
922+ 'headers': ['Instance', 'Code', 'Name', 'Period', 'Starting balance', 'Calculated balance',
923+ 'Closing balance', 'Currency'],
924+ 'filename': instance_name + '_' + year + month + '_Liquidity Balances.csv',
925+ 'key': 'liquidity',
926+ 'query_params': (
927+ tuple([period_yyyymm]), reg_types, first_day_of_period, reg_types, first_day_of_period,
928+ last_day_of_period, reg_types, last_day_of_period, tuple(instance_ids)),
929+ 'function': 'postprocess_liquidity_balances',
930+ 'fnct_params': context,
931+ },
932+ ])
933+ if not context.get('old_vi'):
934+ processrequests.extend([
935+ {
936+ 'headers': ['G/L Account', 'Booking currency', 'Balance'],
937+ 'filename': instance_name + '_' + year + month + '_Balance_previous_month.csv',
938+ 'key': 'balance_previous_month',
939+ 'query_params': (previous_period_id,
940+ # note: engagements are also excluded since there are no ENG/ENGI "G/L" journals
941+ tuple(excluded_journal_types + ['cur_adj']),
942+ tuple(instance_ids)),
943+ }
944+ ])
945 if plresult_ji_in_ids:
946 processrequests.append({
947 'filename': instance_name + '_' + year + month + '_Monthly Export.csv',
948
949=== modified file 'bin/addons/vertical_integration/vertical_integration_wizard.xml'
950--- bin/addons/vertical_integration/vertical_integration_wizard.xml 2017-10-17 09:57:39 +0000
951+++ bin/addons/vertical_integration/vertical_integration_wizard.xml 2020-02-04 10:20:45 +0000
952@@ -109,6 +109,21 @@
953 action="action_hq_export_ocb"
954 id="menu_action_hq_export_ocb" sequence="30"/>
955
956+ <!-- OLD OCB VI (to be deleted once this version will be dropped) -->
957+ <record id="action_hq_export_ocb_old" model="ir.actions.act_window">
958+ <field name="name">Export to HQ system (OCB) Access Interface</field>
959+ <field name="type">ir.actions.act_window</field>
960+ <field name="res_model">ocb.export.wizard</field>
961+ <field name="view_type">form</field>
962+ <field name="view_mode">form</field>
963+ <field name="view_id" ref="ocb_export_wizard"/>
964+ <field name="context">{'old_vi': True}</field>
965+ <field name="target">new</field>
966+ </record>
967+ <menuitem parent="account.menu_finance_generic_reporting"
968+ action="action_hq_export_ocb_old"
969+ id="menu_action_hq_export_ocb_old" sequence="32"/>
970+
971 <!-- OCB matching export -->
972
973 <record id="ocb_matching_export_wizard" model="ir.ui.view">

Subscribers

People subscribed via source and target branches