Merge lp:~openerp-commiter/openobject-server/5.0-opw-4592-dhs into lp:openobject-server/5.0
- 5.0-opw-4592-dhs
- Merge into 5.0
Proposed by
Dhruti Shastri(OpenERP)
Status: | Merged |
---|---|
Merged at revision: | 2174 |
Proposed branch: | lp:~openerp-commiter/openobject-server/5.0-opw-4592-dhs |
Merge into: | lp:openobject-server/5.0 |
Diff against target: |
526 lines (+94/-92) 1 file modified
bin/osv/orm.py (+94/-92) |
To merge this branch: | bzr merge lp:~openerp-commiter/openobject-server/5.0-opw-4592-dhs |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
OpenERP Core Team | Pending | ||
Review via email: mp+55468@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Jay Vora (Serpent Consulting Services) (jayvora) wrote : | # |
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/osv/orm.py' |
2 | --- bin/osv/orm.py 2011-02-04 09:32:33 +0000 |
3 | +++ bin/osv/orm.py 2011-03-30 04:57:36 +0000 |
4 | @@ -388,7 +388,7 @@ |
5 | #setting value to let the problem NOT occur next time |
6 | elif k in cols: |
7 | vals['select_level'] = cols[k]['select_level'] |
8 | - |
9 | + |
10 | if k not in cols: |
11 | cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',)) |
12 | id = cr.fetchone()[0] |
13 | @@ -468,10 +468,10 @@ |
14 | return 0.0 |
15 | elif field_type == 'integer': |
16 | return 0 |
17 | - elif field_type == 'boolean': |
18 | - return False |
19 | + elif field_type == 'boolean': |
20 | + return False |
21 | return '' |
22 | - |
23 | + |
24 | def selection_field(in_field): |
25 | col_obj = self.pool.get(in_field.keys()[0]) |
26 | if f[i] in col_obj._columns.keys(): |
27 | @@ -479,20 +479,20 @@ |
28 | elif f[i] in col_obj._inherits.keys(): |
29 | selection_field(col_obj._inherits) |
30 | else: |
31 | - return False |
32 | - |
33 | + return False |
34 | + |
35 | lines = [] |
36 | data = map(lambda x: '', range(len(fields))) |
37 | done = [] |
38 | for fpos in range(len(fields)): |
39 | - f = fields[fpos] |
40 | + f = fields[fpos] |
41 | if f: |
42 | r = row |
43 | i = 0 |
44 | while i < len(f): |
45 | if f[i] == 'db_id': |
46 | - r = r['id'] |
47 | - elif f[i] == 'id': |
48 | + r = r['id'] |
49 | + elif f[i] == 'id': |
50 | model_data = self.pool.get('ir.model.data') |
51 | data_ids = model_data.search(cr, uid, [('model','=',r._table_name),('res_id','=',r['id'])]) |
52 | if len(data_ids): |
53 | @@ -506,7 +506,7 @@ |
54 | else: |
55 | r = r[f[i]] |
56 | # To display external name of selection field when its exported |
57 | - if not context.get('import_comp',False):# Allow external name only if its not import compatible |
58 | + if not context.get('import_comp',False):# Allow external name only if its not import compatible |
59 | cols = False |
60 | if f[i] in self._columns.keys(): |
61 | cols = self._columns[f[i]] |
62 | @@ -518,11 +518,11 @@ |
63 | r = [x[1] for x in sel_list if r==x[0]] |
64 | r = r and r[0] or False |
65 | if not r: |
66 | - if f[i] in self._columns: |
67 | + if f[i] in self._columns: |
68 | r = check_type(self._columns[f[i]]._type) |
69 | elif f[i] in self._inherit_fields: |
70 | - r = check_type(self._inherit_fields[f[i]][2]._type) |
71 | - data[fpos] = r |
72 | + r = check_type(self._inherit_fields[f[i]][2]._type) |
73 | + data[fpos] = r |
74 | break |
75 | if isinstance(r, (browse_record_list, list)): |
76 | first = True |
77 | @@ -531,10 +531,10 @@ |
78 | if fields2 in done: |
79 | if [x for x in fields2 if x]: |
80 | break |
81 | - done.append(fields2) |
82 | + done.append(fields2) |
83 | for row2 in r: |
84 | lines2 = self.__export_row(cr, uid, row2, fields2, |
85 | - context) |
86 | + context) |
87 | if first: |
88 | for fpos2 in range(len(fields)): |
89 | if lines2 and lines2[0][fpos2]: |
90 | @@ -542,17 +542,19 @@ |
91 | if not data[fpos]: |
92 | dt = '' |
93 | for rr in r : |
94 | - if isinstance(rr.name, browse_record): |
95 | - rr = rr.name |
96 | - rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context) |
97 | - rr_name = rr_name and rr_name[0] and rr_name[0][1] or '' |
98 | + rr_name = '' |
99 | + if rr.name: |
100 | + if isinstance(rr.name, browse_record): |
101 | + rr = rr.name |
102 | + rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context) |
103 | + rr_name = rr_name and rr_name[0] and rr_name[0][1] or '' |
104 | dt += tools.ustr(rr_name or '') + ',' |
105 | data[fpos] = dt[:-1] |
106 | break |
107 | lines += lines2[1:] |
108 | first = False |
109 | else: |
110 | - lines += lines2 |
111 | + lines += lines2 |
112 | break |
113 | i += 1 |
114 | if i == len(f): |
115 | @@ -565,14 +567,14 @@ |
116 | def export_data(self, cr, uid, ids, fields_to_export, context=None): |
117 | if not context: |
118 | context = {} |
119 | - imp_comp = context.get('import_comp',False) |
120 | + imp_comp = context.get('import_comp',False) |
121 | cols = self._columns.copy() |
122 | for f in self._inherit_fields: |
123 | - cols.update({f: self._inherit_fields[f][2]}) |
124 | + cols.update({f: self._inherit_fields[f][2]}) |
125 | fields_to_export = map(lambda x: x.split('/'), fields_to_export) |
126 | - fields_export = fields_to_export+[] |
127 | - warning = '' |
128 | - warning_fields = [] |
129 | + fields_export = fields_to_export+[] |
130 | + warning = '' |
131 | + warning_fields = [] |
132 | for field in fields_export: |
133 | if imp_comp and len(field)>1: |
134 | warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field))) |
135 | @@ -580,11 +582,11 @@ |
136 | if imp_comp and cols.get(field and field[0],False): |
137 | if ((isinstance(cols[field[0]], fields.function) and not cols[field[0]].store) \ |
138 | or isinstance(cols[field[0]], fields.related)\ |
139 | - or isinstance(cols[field[0]], fields.one2many)): |
140 | + or isinstance(cols[field[0]], fields.one2many)): |
141 | warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field))) |
142 | datas = [] |
143 | if imp_comp and len(warning_fields): |
144 | - warning = 'Following columns cannot be exported since you select to be import compatible.\n%s' %('\n'.join(warning_fields)) |
145 | + warning = 'Following columns cannot be exported since you select to be import compatible.\n%s' %('\n'.join(warning_fields)) |
146 | cr.rollback() |
147 | return {'warning' : warning} |
148 | for row in self.browse(cr, uid, ids, context): |
149 | @@ -597,14 +599,14 @@ |
150 | fields = map(lambda x: x.split('/'), fields) |
151 | logger = netsvc.Logger() |
152 | ir_model_data_obj = self.pool.get('ir.model.data') |
153 | - |
154 | + |
155 | def _check_db_id(self, model_name, db_id): |
156 | obj_model = self.pool.get(model_name) |
157 | ids = obj_model.search(cr, uid, [('id','=',int(db_id))]) |
158 | if not len(ids): |
159 | raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, db_id)) |
160 | return True |
161 | - |
162 | + |
163 | def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0): |
164 | line = datas[position] |
165 | row = {} |
166 | @@ -624,25 +626,25 @@ |
167 | raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),)) |
168 | if not line[i]: |
169 | continue |
170 | - |
171 | + |
172 | field = fields[i] |
173 | if prefix and not prefix[0] in field: |
174 | continue |
175 | - |
176 | + |
177 | if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':db_id'): |
178 | # Database ID |
179 | res = False |
180 | if line[i]: |
181 | field_name = field[0].split(':')[0] |
182 | - model_rel = fields_def[field_name]['relation'] |
183 | - |
184 | + model_rel = fields_def[field_name]['relation'] |
185 | + |
186 | if fields_def[field[len(prefix)][:-6]]['type']=='many2many': |
187 | res_id = [] |
188 | for db_id in line[i].split(config.get('csv_internal_sep')): |
189 | try: |
190 | _check_db_id(self, model_rel, db_id) |
191 | res_id.append(db_id) |
192 | - except Exception,e: |
193 | + except Exception,e: |
194 | warning += [tools.exception_to_unicode(e)] |
195 | logger.notifyChannel("import", netsvc.LOG_ERROR, |
196 | tools.exception_to_unicode(e)) |
197 | @@ -652,10 +654,10 @@ |
198 | try: |
199 | _check_db_id(self, model_rel, line[i]) |
200 | res = line[i] |
201 | - except Exception,e: |
202 | + except Exception,e: |
203 | warning += [tools.exception_to_unicode(e)] |
204 | logger.notifyChannel("import", netsvc.LOG_ERROR, |
205 | - tools.exception_to_unicode(e)) |
206 | + tools.exception_to_unicode(e)) |
207 | row[field_name] = res or False |
208 | continue |
209 | |
210 | @@ -668,7 +670,7 @@ |
211 | if '.' in word: |
212 | module, xml_id = word.rsplit('.', 1) |
213 | else: |
214 | - module, xml_id = current_module, word |
215 | + module, xml_id = current_module, word |
216 | id = ir_model_data_obj._get_id(cr, uid, module, |
217 | xml_id) |
218 | res_id2 = ir_model_data_obj.read(cr, uid, [id], |
219 | @@ -681,7 +683,7 @@ |
220 | if '.' in line[i]: |
221 | module, xml_id = line[i].rsplit('.', 1) |
222 | else: |
223 | - module, xml_id = current_module, line[i] |
224 | + module, xml_id = current_module, line[i] |
225 | |
226 | record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id) |
227 | ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id']) |
228 | @@ -699,31 +701,31 @@ |
229 | continue |
230 | if (len(field) == len(prefix)+1) and \ |
231 | (prefix == field[0:len(prefix)]): |
232 | - if field[len(prefix)] == "id": |
233 | - # XML ID |
234 | - db_id = False |
235 | - is_xml_id = data_id = line[i] |
236 | + if field[len(prefix)] == "id": |
237 | + # XML ID |
238 | + db_id = False |
239 | + is_xml_id = data_id = line[i] |
240 | d = data_id.split('.') |
241 | module = len(d)>1 and d[0] or '' |
242 | - name = len(d)>1 and d[1] or d[0] |
243 | - data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('name','=',name)]) |
244 | + name = len(d)>1 and d[1] or d[0] |
245 | + data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('name','=',name)]) |
246 | if len(data_ids): |
247 | - d = ir_model_data_obj.read(cr, uid, data_ids, ['res_id'])[0] |
248 | - db_id = d['res_id'] |
249 | + d = ir_model_data_obj.read(cr, uid, data_ids, ['res_id'])[0] |
250 | + db_id = d['res_id'] |
251 | if is_db_id and not db_id: |
252 | - data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('res_id','=',is_db_id)]) |
253 | + data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('res_id','=',is_db_id)]) |
254 | if not len(data_ids): |
255 | - ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id}) |
256 | - db_id = is_db_id |
257 | - if is_db_id and int(db_id) != int(is_db_id): |
258 | + ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id}) |
259 | + db_id = is_db_id |
260 | + if is_db_id and int(db_id) != int(is_db_id): |
261 | warning += [_("Id is not the same than existing one: %s")%(is_db_id)] |
262 | logger.notifyChannel("import", netsvc.LOG_ERROR, |
263 | _("Id is not the same than existing one: %s")%(is_db_id)) |
264 | continue |
265 | |
266 | if field[len(prefix)] == "db_id": |
267 | - # Database ID |
268 | - try: |
269 | + # Database ID |
270 | + try: |
271 | _check_db_id(self, model_name, line[i]) |
272 | data_res_id = is_db_id = int(line[i]) |
273 | except Exception,e: |
274 | @@ -733,19 +735,19 @@ |
275 | continue |
276 | data_ids = ir_model_data_obj.search(cr, uid, [('model','=',model_name),('res_id','=',line[i])]) |
277 | if len(data_ids): |
278 | - d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0] |
279 | - data_id = d['name'] |
280 | + d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0] |
281 | + data_id = d['name'] |
282 | if d['module']: |
283 | data_id = '%s.%s'%(d['module'],d['name']) |
284 | else: |
285 | data_id = d['name'] |
286 | if is_xml_id and not data_id: |
287 | - data_id = is_xml_id |
288 | - if is_xml_id and is_xml_id!=data_id: |
289 | + data_id = is_xml_id |
290 | + if is_xml_id and is_xml_id!=data_id: |
291 | warning += [_("Id is not the same than existing one: %s")%(line[i])] |
292 | logger.notifyChannel("import", netsvc.LOG_ERROR, |
293 | _("Id is not the same than existing one: %s")%(line[i])) |
294 | - |
295 | + |
296 | continue |
297 | if fields_def[field[len(prefix)]]['type'] == 'integer': |
298 | res = line[i] and int(line[i]) |
299 | @@ -769,9 +771,9 @@ |
300 | logger.notifyChannel("import", netsvc.LOG_WARNING, |
301 | _("key '%s' not found in selection field '%s'") % \ |
302 | (line[i], field[len(prefix)])) |
303 | - |
304 | + |
305 | warning += [_("Key/value '%s' not found in selection field '%s'")%(line[i],field[len(prefix)])] |
306 | - |
307 | + |
308 | elif fields_def[field[len(prefix)]]['type']=='many2one': |
309 | res = False |
310 | if line[i]: |
311 | @@ -814,13 +816,13 @@ |
312 | relation_obj = self.pool.get(fields_def[field]['relation']) |
313 | newfd = relation_obj.fields_get( |
314 | cr, uid, context=context) |
315 | - res = process_liness(self, datas, prefix + [field], current_module, relation_obj._name, newfd, position) |
316 | - (newrow, max2, w2, translate2, data_id2, data_res_id2) = res |
317 | + res = process_liness(self, datas, prefix + [field], current_module, relation_obj._name, newfd, position) |
318 | + (newrow, max2, w2, translate2, data_id2, data_res_id2) = res |
319 | nbrmax = max(nbrmax, max2) |
320 | - warning = warning + w2 |
321 | - reduce(lambda x, y: x and y, newrow) |
322 | + warning = warning + w2 |
323 | + reduce(lambda x, y: x and y, newrow) |
324 | row[field] = newrow and (reduce(lambda x, y: x or y, newrow.values()) and \ |
325 | - [(0, 0, newrow)]) or [] |
326 | + [(0, 0, newrow)]) or [] |
327 | i = max2 |
328 | while (position+i)<len(datas): |
329 | ok = True |
330 | @@ -835,7 +837,7 @@ |
331 | self, datas, prefix+[field], current_module, relation_obj._name, newfd, position+i) |
332 | warning = warning+w2 |
333 | if newrow and reduce(lambda x, y: x or y, newrow.values()): |
334 | - row[field].append((0, 0, newrow)) |
335 | + row[field].append((0, 0, newrow)) |
336 | i += max2 |
337 | nbrmax = max(nbrmax, i) |
338 | |
339 | @@ -884,7 +886,7 @@ |
340 | return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' ) |
341 | #Raising Uncaught exception |
342 | return (-1, res, 'Line ' + str(counter) +' : ' + str(e), '' ) |
343 | - |
344 | + |
345 | for lang in translate: |
346 | context2 = context.copy() |
347 | context2['lang'] = lang |
348 | @@ -1130,15 +1132,15 @@ |
349 | if user != 1: # admin user has all roles |
350 | user_roles = usersobj.read(cr, user, [user], ['roles_id'])[0]['roles_id'] |
351 | # TODO handle the case of more than one workflow for a model |
352 | - cr.execute("""SELECT DISTINCT t.role_id |
353 | - FROM wkf |
354 | - INNER JOIN wkf_activity a ON a.wkf_id = wkf.id |
355 | + cr.execute("""SELECT DISTINCT t.role_id |
356 | + FROM wkf |
357 | + INNER JOIN wkf_activity a ON a.wkf_id = wkf.id |
358 | INNER JOIN wkf_transition t ON (t.act_to = a.id) |
359 | WHERE wkf.osv = %s |
360 | AND t.signal = %s |
361 | """, (self._name, button.get('name'),)) |
362 | roles = cr.fetchall() |
363 | - |
364 | + |
365 | # draft -> valid = signal_next (role X) |
366 | # draft -> cancel = signal_cancel (no role) |
367 | # |
368 | @@ -1147,13 +1149,13 @@ |
369 | # |
370 | # running -> done = signal_next (role Z) |
371 | # running -> cancel = signal_cancel (role Z) |
372 | - # As we don't know the object state, in this scenario, |
373 | + # As we don't know the object state, in this scenario, |
374 | # the button "signal_cancel" will be always shown as there is no restriction to cancel in draft |
375 | # the button "signal_next" will be show if the user has any of the roles (X Y or Z) |
376 | # The verification will be made later in workflow process... |
377 | if roles: |
378 | can_click = any((not role) or rolesobj.check(cr, user, user_roles, role) for (role,) in roles) |
379 | - |
380 | + |
381 | button.set('readonly', str(int(not can_click))) |
382 | |
383 | arch = etree.tostring(node, encoding="utf-8").replace('\t', '') |
384 | @@ -1665,7 +1667,7 @@ |
385 | def _check_removed_columns(self, cr, log=False): |
386 | # nothing to check in memory... |
387 | pass |
388 | - |
389 | + |
390 | def exists(self, cr, uid, id, context=None): |
391 | return id in self.datas |
392 | |
393 | @@ -1804,12 +1806,12 @@ |
394 | |
395 | if isinstance(f, fields.one2many): |
396 | cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,)) |
397 | - |
398 | + |
399 | if self.pool.get(f._obj): |
400 | if f._fields_id not in self.pool.get(f._obj)._columns.keys(): |
401 | if not self.pool.get(f._obj)._inherits or (f._fields_id not in self.pool.get(f._obj)._inherit_fields.keys()): |
402 | raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id,f._obj,)) |
403 | - |
404 | + |
405 | if cr.fetchone(): |
406 | cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id)) |
407 | res = cr.fetchone()[0] |
408 | @@ -1916,7 +1918,7 @@ |
409 | field_size = (65535 * f.digits[0]) + f.digits[0] + f.digits[1] |
410 | if field_size != f_pg_size: |
411 | field_size_change = True |
412 | - |
413 | + |
414 | if f_pg_type != f_obj_type or field_size_change: |
415 | if f_pg_type != f_obj_type: |
416 | logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed type to %s." % (k, self._table, c[1])) |
417 | @@ -2004,11 +2006,11 @@ |
418 | create = not bool(cr.fetchone()) |
419 | |
420 | cr.commit() # start a new transaction |
421 | - |
422 | + |
423 | store_fncts = self.pool._store_function.get(self._name, []) |
424 | #if any field is changed from function to storable, we need to remove its entry from store_function |
425 | remove_stores = [] |
426 | - |
427 | + |
428 | for record in store_fncts: |
429 | if record[0] == self._name and (self._columns.get(record[1],False) or self._inherit_fields.get(record[1],False)): |
430 | if (not isinstance(self._columns[record[1]],fields.function)) or (record[1] in self._inherit_fields and not isinstance(self._inherit_fields[record[1]][2],fields.function)): |
431 | @@ -2018,7 +2020,7 @@ |
432 | store_fncts.remove(stores) |
433 | |
434 | self.pool._store_function[self._name] = store_fncts |
435 | - |
436 | + |
437 | for (key, con, _) in self._sql_constraints: |
438 | conname = '%s_%s' % (self._table, key) |
439 | cr.execute("SELECT conname FROM pg_constraint where conname=%s", (conname,)) |
440 | @@ -2306,7 +2308,7 @@ |
441 | res.extend(cr.dictfetchall()) |
442 | else: |
443 | res = map(lambda x: {'id': x}, ids) |
444 | - |
445 | + |
446 | # if not res: |
447 | # res = map(lambda x: {'id': x}, ids) |
448 | # for record in res: |
449 | @@ -2321,7 +2323,7 @@ |
450 | # if ftype in ('one2many', 'many2many'): |
451 | # field_val = [] |
452 | # record.update({f:field_val}) |
453 | - |
454 | + |
455 | for f in fields_pre: |
456 | if f == self.CONCURRENCY_CHECK_FIELD: |
457 | continue |
458 | @@ -2481,8 +2483,8 @@ |
459 | self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context) |
460 | |
461 | properties = self.pool.get('ir.property') |
462 | - domain = [('res_id', '=', False), |
463 | - ('value', 'in', ['%s,%s' % (self._name, i) for i in ids]), |
464 | + domain = [('res_id', '=', False), |
465 | + ('value', 'in', ['%s,%s' % (self._name, i) for i in ids]), |
466 | ] |
467 | if properties.search(cr, uid, domain, context=context): |
468 | raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property')) |
469 | @@ -2717,10 +2719,10 @@ |
470 | cr.execute('SELECT parent_left, parent_right FROM "%s" WHERE id=%%s' % (self._table,), (id,)) |
471 | pleft, pright = cr.fetchone() |
472 | distance = pright - pleft + 1 |
473 | - |
474 | + |
475 | cr.execute('SELECT parent_right, id FROM "%s" WHERE %s ORDER BY %s' % (self._table, clause, order), params) |
476 | parents = cr.fetchall() |
477 | - |
478 | + |
479 | # Find Position of the element |
480 | position = None |
481 | for (parent_pright, parent_id) in parents: |
482 | @@ -2760,7 +2762,7 @@ |
483 | done[key][id] = True |
484 | todo.append(id) |
485 | self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context) |
486 | - |
487 | + |
488 | wf_service = netsvc.LocalService("workflow") |
489 | for id in ids: |
490 | wf_service.trg_write(user, self._name, id, cr) |
491 | @@ -2843,15 +2845,15 @@ |
492 | upd0 += ','+self._inherits[table] |
493 | upd1 += ',%s' |
494 | upd2.append(record_id) |
495 | - |
496 | - #Start : Set bool fields to be False if they are not touched(to make search more powerful) |
497 | + |
498 | + #Start : Set bool fields to be False if they are not touched(to make search more powerful) |
499 | bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean'] |
500 | - |
501 | + |
502 | for bool_field in bool_fields: |
503 | if bool_field not in vals: |
504 | vals[bool_field] = False |
505 | #End |
506 | - |
507 | + |
508 | for field in vals: |
509 | if field in self._columns: |
510 | if self._columns[field]._classic_write: |
511 | @@ -2909,13 +2911,13 @@ |
512 | cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,)) |
513 | cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,)) |
514 | cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1,pleft+2,id_new)) |
515 | - |
516 | + |
517 | # default element in context must be removed when call a one2many or many2many |
518 | rel_context = context.copy() |
519 | for c in context.items(): |
520 | if c[0].startswith('default_'): |
521 | del rel_context[c[0]] |
522 | - |
523 | + |
524 | result = [] |
525 | for field in upd_todo: |
526 | result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or [] |
Dhruti,
Kindly by the next time, take care of those unnecessary whitespaces.
Thanks.