Merge lp:~julie-w/unifield-server/US-3677 into lp:unifield-server

Proposed by jftempo
Status: Merged
Merged at revision: 5130
Proposed branch: lp:~julie-w/unifield-server/US-3677
Merge into: lp:unifield-server
Diff against target: 288 lines (+210/-0) (has conflicts)
3 files modified
bin/addons/account_override/invoice.py (+157/-0)
bin/addons/msf_profile/i18n/fr_MF.po (+43/-0)
bin/osv/orm.py (+10/-0)
Text conflict in bin/addons/msf_profile/i18n/fr_MF.po
To merge this branch: bzr merge lp:~julie-w/unifield-server/US-3677
Reviewer Review Type Date Requested Status
UniField Reviewer Team Pending
Review via email: mp+357629@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/addons/account_override/invoice.py'
2--- bin/addons/account_override/invoice.py 2018-08-06 13:07:33 +0000
3+++ bin/addons/account_override/invoice.py 2018-10-22 13:50:56 +0000
4@@ -27,6 +27,7 @@
5 from time import strftime
6 from tools.translate import _
7 from lxml import etree
8+from datetime import datetime
9 import re
10 import netsvc
11
12@@ -264,6 +265,141 @@
13 'is_merged_by_account': lambda *a: False,
14 }
15
16+ def import_data_web(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None,
17+ display_all_errors=False, has_header=False):
18+ """
19+ Overrides the standard import_data_web method for account.invoice model:
20+ - based on the 3 values for "cost_center_id / destination_id / funding_pool_id", creates a new AD at 100% for
21+ each invoice line and add it to "datas"
22+ - removes these 3 values that won't be used in the SI line
23+ - adapts the "fields" list accordingly
24+ - converts the dates from the format '%d/%m/%Y' to the standard one '%Y-%m-%d' so the checks on dates are correctly made
25+ """
26+ if context is None:
27+ context = {}
28+ new_data = datas
29+ analytic_acc_obj = self.pool.get('account.analytic.account')
30+ account_obj = self.pool.get('account.account')
31+ analytic_distrib_obj = self.pool.get('analytic.distribution')
32+ cc_distrib_line_obj = self.pool.get('cost.center.distribution.line')
33+ fp_distrib_line_obj = self.pool.get('funding.pool.distribution.line')
34+ curr_obj = self.pool.get('res.currency')
35+ nb_ad_fields = 0
36+ if 'invoice_line/cost_center_id' in fields:
37+ nb_ad_fields += 1
38+ if 'invoice_line/destination_id' in fields:
39+ nb_ad_fields += 1
40+ if 'invoice_line/funding_pool_id' in fields:
41+ nb_ad_fields += 1
42+ if nb_ad_fields:
43+ if nb_ad_fields != 3:
44+ raise osv.except_osv(_('Error'),
45+ _('Either the Cost Center, the Destination, or the Funding Pool is missing.'))
46+ # note: CC, dest and FP indexes always exist at this step
47+ cc_index = fields.index('invoice_line/cost_center_id')
48+ dest_index = fields.index('invoice_line/destination_id')
49+ fp_index = fields.index('invoice_line/funding_pool_id')
50+ si_line_name_index = 'invoice_line/name' in fields and fields.index('invoice_line/name')
51+ si_journal_index = 'journal_id' in fields and fields.index('journal_id')
52+ curr_index = 'currency_id' in fields and fields.index('currency_id')
53+ account_index = 'invoice_line/account_id' in fields and fields.index('invoice_line/account_id')
54+ doc_date_index = 'document_date' in fields and fields.index('document_date')
55+ date_inv_index = 'date_invoice' in fields and fields.index('date_invoice')
56+ new_data = []
57+ curr = False
58+ for data in datas:
59+ cc_ids = []
60+ dest_ids = []
61+ fp_ids = []
62+ distrib_id = ''
63+ cc = len(data) > cc_index and data[cc_index].strip()
64+ dest = len(data) > dest_index and data[dest_index].strip()
65+ fp = len(data) > fp_index and data[fp_index].strip()
66+ # check if details for SI line are filled in (based on the required field "name")
67+ has_si_line = si_line_name_index is not False and len(data) > si_line_name_index and data[si_line_name_index].strip()
68+ # process AD only for SI lines where at least one AD field has been filled in
69+ # (otherwise no AD should be added to the line AND no error should be displayed)
70+ if has_si_line and (cc or dest or fp): # at least one AD field has been filled in
71+ if cc:
72+ cc_dom = [('category', '=', 'OC'), ('type', '=', 'normal'), '|', ('code', '=ilike', cc), ('name', '=ilike', cc)]
73+ cc_ids = analytic_acc_obj.search(cr, uid, cc_dom, order='id', limit=1, context=context)
74+ if dest:
75+ dest_dom = [('category', '=', 'DEST'), ('type', '=', 'normal'), '|', ('code', '=ilike', dest), ('name', '=ilike', dest)]
76+ dest_ids = analytic_acc_obj.search(cr, uid, dest_dom, order='id', limit=1, context=context)
77+ if fp:
78+ fp_dom = [('category', '=', 'FUNDING'), ('type', '=', 'normal'), '|', ('code', '=ilike', fp), ('name', '=ilike', fp)]
79+ fp_ids = analytic_acc_obj.search(cr, uid, fp_dom, order='id', limit=1, context=context)
80+ if not cc_ids or not dest_ids or not fp_ids:
81+ raise osv.except_osv(_('Error'), _('Either the Cost Center, the Destination, or the Funding Pool '
82+ 'was not found on the line %s.') % data)
83+ else:
84+ # create the Analytic Distribution
85+ distrib_id = analytic_distrib_obj.create(cr, uid, {}, context=context)
86+ # get the next currency to use IF NEED BE (cf for an SI with several lines the curr. is indicated on the first one only)
87+ si_journal = si_journal_index is not False and len(data) > si_journal_index and data[si_journal_index].strip()
88+ if si_journal: # first line of the SI
89+ curr = curr_index is not False and len(data) > curr_index and data[curr_index].strip()
90+ curr_ids = []
91+ if curr: # must exist at least on the first imported line
92+ curr_ids = curr_obj.search(cr, uid, [('name', '=ilike', curr)], limit=1, context=context)
93+ if not curr_ids:
94+ raise osv.except_osv(_('Error'),
95+ _('The currency was not found for the line %s.') % data)
96+ vals = {
97+ 'analytic_id': cc_ids[0], # analytic_id = Cost Center for the CC distrib line
98+ 'percentage': 100.0,
99+ 'distribution_id': distrib_id,
100+ 'currency_id': curr_ids[0],
101+ 'destination_id': dest_ids[0],
102+ }
103+ cc_distrib_line_obj.create(cr, uid, vals, context=context)
104+ vals.update({
105+ 'analytic_id': fp_ids[0], # analytic_id = Funding Pool for the FP distrib line
106+ 'cost_center_id': cc_ids[0],
107+ })
108+ fp_distrib_line_obj.create(cr, uid, vals, context=context)
109+ account_code = account_index is not False and len(data) > account_index and data[account_index].strip()
110+ if account_code:
111+ account_ids = account_obj.search(cr, uid, [('code', '=', account_code)], context=context, limit=1)
112+ if not account_ids:
113+ raise osv.except_osv(_('Error'), _('The account %s was not found on the line %s.') % (account_code, data))
114+ parent_id = False # no distrib. at header level
115+ distrib_state = analytic_distrib_obj._get_distribution_state(cr, uid, distrib_id, parent_id,
116+ account_ids[0], context=context)
117+ if distrib_state == 'invalid':
118+ raise osv.except_osv(_('Error'), _('The analytic distribution is invalid on the line %s.') % data)
119+ # create a new list with the new distrib id and without the old AD fields
120+ # to be done also if no AD to ensure the size of each data list is always the same
121+ i = 0
122+ new_sub_list = []
123+ for d in data: # loop on each value of the file line
124+ if i not in [cc_index, dest_index, fp_index]:
125+ if doc_date_index is not False and date_inv_index is not False and i in [doc_date_index, date_inv_index]:
126+ # format the date from '%d/%m/%Y' to '%Y-%m-%d' so the checks on dates are correctly made
127+ raw_date = len(data) > i and data[i].strip()
128+ try:
129+ new_date = raw_date and datetime.strptime(raw_date, '%d/%m/%Y').strftime('%Y-%m-%d') or ''
130+ except ValueError:
131+ new_date = raw_date
132+ new_sub_list.append(new_date)
133+ else:
134+ new_sub_list.append(d)
135+ i += 1
136+ # add new field value
137+ new_sub_list.append(distrib_id)
138+ new_data.append(new_sub_list)
139+
140+ # remove old field names from fields
141+ fields.remove('invoice_line/cost_center_id')
142+ fields.remove('invoice_line/destination_id')
143+ fields.remove('invoice_line/funding_pool_id')
144+ # add new field
145+ fields.append('invoice_line/analytic_distribution_id/.id') # .id = id in the database
146+
147+ return super(account_invoice, self).import_data_web(cr, uid, fields, new_data, mode=mode, current_module=current_module,
148+ noupdate=noupdate, context=context, filename=filename,
149+ display_all_errors=display_all_errors, has_header=has_header)
150+
151 def onchange_company_id(self, cr, uid, ids, company_id, part_id, ctype, invoice_line, currency_id):
152 """
153 This is a method to redefine the journal_id domain with the current_instance taken into account
154@@ -1529,6 +1665,15 @@
155
156 return res
157
158+ def _get_fake_m2o(self, cr, uid, ids, field_name=None, arg=None, context=None):
159+ """
160+ Returns False for all ids
161+ """
162+ res = {}
163+ for i in ids:
164+ res[i] = False
165+ return res
166+
167 _columns = {
168 'line_number': fields.integer(string='Line Number'),
169 'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Account Computation')),
170@@ -1543,6 +1688,18 @@
171 'reversed_invoice_line_id': fields.many2one('account.invoice.line', string='Reversed Invoice Line',
172 help='Invoice line that has been reversed by this one through a '
173 '"refund cancel" or "refund modify"'),
174+ 'cost_center_id': fields.function(_get_fake_m2o, method=True, type='many2one', store=False,
175+ states={'draft': [('readonly', False)]}, # see def detect_data in unifield-web/addons/openerp/controllers/impex.py
176+ relation="account.analytic.account", string='Cost Center',
177+ help="Field used for import only"),
178+ 'destination_id': fields.function(_get_fake_m2o, method=True, type='many2one', store=False,
179+ relation="account.analytic.account", string='Destination',
180+ states={'draft': [('readonly', False)]},
181+ help="Field used for import only"),
182+ 'funding_pool_id': fields.function(_get_fake_m2o, method=True, type='many2one', store=False,
183+ relation="account.analytic.account", string='Funding Pool',
184+ states={'draft': [('readonly', False)]},
185+ help="Field used for import only"),
186 }
187
188 _defaults = {
189
190=== modified file 'bin/addons/msf_profile/i18n/fr_MF.po'
191--- bin/addons/msf_profile/i18n/fr_MF.po 2018-10-17 09:42:30 +0000
192+++ bin/addons/msf_profile/i18n/fr_MF.po 2018-10-22 13:50:56 +0000
193@@ -38394,6 +38394,7 @@
194 #: field:hr.analytic.reallocation,funding_pool_id:0
195 #: field:hr.employee,funding_pool_id:0
196 #: field:hr.payroll.msf,funding_pool_id:0
197+#: field:account.invoice.line,funding_pool_id:0
198 #: report:addons/account/report/free_allocation_report.mako:208
199 #, python-format
200 msgid "Funding Pool"
201@@ -81948,6 +81949,7 @@
202 #: report:addons/sale/report/fo_allocation_report.mako:231
203 #: report:sale.order.allocation.report:0
204 #: field:purchase.report,cost_center_id:0
205+#: field:account.invoice.line,cost_center_id:0
206 #: code:addons/register_accounting/wizard/wizard_register_import.py:570
207 #: report:addons/account/report/free_allocation_report.mako:205
208 #, python-format
209@@ -96141,6 +96143,7 @@
210 #: field:purchase.order.line.allocation.report,destination_id:0
211 #: code:addons/register_accounting/wizard/wizard_register_import.py:558
212 #: field:wizard.register.import.lines,destination_id:0
213+#: field:account.invoice.line,destination_id:0
214 #: report:addons/sale/report/fo_allocation_report.mako:228
215 #: report:sale.order.allocation.report:0
216 #: field:sale.order.line.cancel,partner_id:0
217@@ -101952,6 +101955,7 @@
218 #: field:account.commitment.line,commit_number:0
219 msgid "Commitment Voucher Number"
220 msgstr "Numéro du Bon d'Engagement"
221+<<<<<<< TREE
222
223 #. module: account_mcdb
224 #: code:addons/account_mcdb/account_mcdb.py:1494
225@@ -101975,3 +101979,42 @@
226 #: code:addons/account_mcdb/account_mcdb.py:1276
227 msgid "Related entries"
228 msgstr "Ecritures associées"
229+=======
230+
231+#. module: account_override
232+#: code:addons/account_override/invoice.py:339
233+#, python-format
234+msgid "The currency was not found for the line %s."
235+msgstr "La devise n'a pas été trouvée pour la ligne %s."
236+
237+#. module: account_override
238+#: code:addons/account_override/invoice.py:293
239+#, python-format
240+msgid "Either the Cost Center, the Destination, or the Funding Pool is missing."
241+msgstr "Il manque le Centre de Coût, la Destination, ou le Funding Pool."
242+
243+#. module: account_override
244+#: code:addons/account_override/invoice.py:325
245+#, python-format
246+msgid "Either the Cost Center, the Destination, or the Funding Pool was not found on the line %s."
247+msgstr "Le Centre de Coût, la Destination, ou le Funding Pool n'a pas été trouvé sur la ligne %s."
248+
249+#. module: account_override
250+#: code:addons/account_override/invoice.py:365
251+#, python-format
252+msgid "The account %s was not found on the line %s."
253+msgstr "Le compte %s n'a pas été trouvé sur la ligne %s."
254+
255+#. module: account_override
256+#: code:addons/account_override/invoice.py:370
257+#, python-format
258+msgid "The analytic distribution is invalid on the line %s."
259+msgstr "La distribution analytique est invalide sur la ligne %s."
260+
261+#. module: account_override
262+#: help:account.invoice.line,cost_center_id:0
263+#: help:account.invoice.line,destination_id:0
264+#: help:account.invoice.line,funding_pool_id:0
265+msgid "Field used for import only"
266+msgstr "Champ utilisé pour l'import uniquement"
267+>>>>>>> MERGE-SOURCE
268
269=== modified file 'bin/osv/orm.py'
270--- bin/osv/orm.py 2018-08-14 14:10:58 +0000
271+++ bin/osv/orm.py 2018-10-22 13:50:56 +0000
272@@ -1122,6 +1122,16 @@
273 self._parent_store_compute(cr)
274 return (position, 0, 0, 0)
275
276+ def import_data_web(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None,
277+ filename=None, display_all_errors=False, has_header=False):
278+ """
279+ Import data method called at import from web ONLY (contrary to the import_data method also used for sync msgs).
280+ Call import_data by default but can be overridden if needed.
281+ """
282+ return super(orm, self).import_data(cr, uid, fields, datas, mode=mode, current_module=current_module,
283+ noupdate=noupdate, context=context, filename=filename,
284+ display_all_errors=display_all_errors, has_header=has_header)
285+
286 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
287 """
288 Read records with given ids with the given fields

Subscribers

People subscribed via source and target branches