Merge lp:~openerp-commiter/openobject-server/5.0-opw-4592-dhs into lp:openobject-server/5.0
- 5.0-opw-4592-dhs
- Merge into 5.0
Proposed by
Dhruti Shastri(OpenERP)
Status: | Merged |
---|---|
Merged at revision: | 2174 |
Proposed branch: | lp:~openerp-commiter/openobject-server/5.0-opw-4592-dhs |
Merge into: | lp:openobject-server/5.0 |
Diff against target: |
526 lines (+94/-92) 1 file modified
bin/osv/orm.py (+94/-92) |
To merge this branch: | bzr merge lp:~openerp-commiter/openobject-server/5.0-opw-4592-dhs |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
OpenERP Core Team | Pending | ||
Review via email: mp+55468@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Jay Vora (Serpent Consulting Services) (jayvora) wrote : | # |
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/osv/orm.py' | |||
2 | --- bin/osv/orm.py 2011-02-04 09:32:33 +0000 | |||
3 | +++ bin/osv/orm.py 2011-03-30 04:57:36 +0000 | |||
4 | @@ -388,7 +388,7 @@ | |||
5 | 388 | #setting value to let the problem NOT occur next time | 388 | #setting value to let the problem NOT occur next time |
6 | 389 | elif k in cols: | 389 | elif k in cols: |
7 | 390 | vals['select_level'] = cols[k]['select_level'] | 390 | vals['select_level'] = cols[k]['select_level'] |
9 | 391 | 391 | ||
10 | 392 | if k not in cols: | 392 | if k not in cols: |
11 | 393 | cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',)) | 393 | cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',)) |
12 | 394 | id = cr.fetchone()[0] | 394 | id = cr.fetchone()[0] |
13 | @@ -468,10 +468,10 @@ | |||
14 | 468 | return 0.0 | 468 | return 0.0 |
15 | 469 | elif field_type == 'integer': | 469 | elif field_type == 'integer': |
16 | 470 | return 0 | 470 | return 0 |
19 | 471 | elif field_type == 'boolean': | 471 | elif field_type == 'boolean': |
20 | 472 | return False | 472 | return False |
21 | 473 | return '' | 473 | return '' |
23 | 474 | 474 | ||
24 | 475 | def selection_field(in_field): | 475 | def selection_field(in_field): |
25 | 476 | col_obj = self.pool.get(in_field.keys()[0]) | 476 | col_obj = self.pool.get(in_field.keys()[0]) |
26 | 477 | if f[i] in col_obj._columns.keys(): | 477 | if f[i] in col_obj._columns.keys(): |
27 | @@ -479,20 +479,20 @@ | |||
28 | 479 | elif f[i] in col_obj._inherits.keys(): | 479 | elif f[i] in col_obj._inherits.keys(): |
29 | 480 | selection_field(col_obj._inherits) | 480 | selection_field(col_obj._inherits) |
30 | 481 | else: | 481 | else: |
33 | 482 | return False | 482 | return False |
34 | 483 | 483 | ||
35 | 484 | lines = [] | 484 | lines = [] |
36 | 485 | data = map(lambda x: '', range(len(fields))) | 485 | data = map(lambda x: '', range(len(fields))) |
37 | 486 | done = [] | 486 | done = [] |
38 | 487 | for fpos in range(len(fields)): | 487 | for fpos in range(len(fields)): |
40 | 488 | f = fields[fpos] | 488 | f = fields[fpos] |
41 | 489 | if f: | 489 | if f: |
42 | 490 | r = row | 490 | r = row |
43 | 491 | i = 0 | 491 | i = 0 |
44 | 492 | while i < len(f): | 492 | while i < len(f): |
45 | 493 | if f[i] == 'db_id': | 493 | if f[i] == 'db_id': |
48 | 494 | r = r['id'] | 494 | r = r['id'] |
49 | 495 | elif f[i] == 'id': | 495 | elif f[i] == 'id': |
50 | 496 | model_data = self.pool.get('ir.model.data') | 496 | model_data = self.pool.get('ir.model.data') |
51 | 497 | data_ids = model_data.search(cr, uid, [('model','=',r._table_name),('res_id','=',r['id'])]) | 497 | data_ids = model_data.search(cr, uid, [('model','=',r._table_name),('res_id','=',r['id'])]) |
52 | 498 | if len(data_ids): | 498 | if len(data_ids): |
53 | @@ -506,7 +506,7 @@ | |||
54 | 506 | else: | 506 | else: |
55 | 507 | r = r[f[i]] | 507 | r = r[f[i]] |
56 | 508 | # To display external name of selection field when its exported | 508 | # To display external name of selection field when its exported |
58 | 509 | if not context.get('import_comp',False):# Allow external name only if its not import compatible | 509 | if not context.get('import_comp',False):# Allow external name only if its not import compatible |
59 | 510 | cols = False | 510 | cols = False |
60 | 511 | if f[i] in self._columns.keys(): | 511 | if f[i] in self._columns.keys(): |
61 | 512 | cols = self._columns[f[i]] | 512 | cols = self._columns[f[i]] |
62 | @@ -518,11 +518,11 @@ | |||
63 | 518 | r = [x[1] for x in sel_list if r==x[0]] | 518 | r = [x[1] for x in sel_list if r==x[0]] |
64 | 519 | r = r and r[0] or False | 519 | r = r and r[0] or False |
65 | 520 | if not r: | 520 | if not r: |
67 | 521 | if f[i] in self._columns: | 521 | if f[i] in self._columns: |
68 | 522 | r = check_type(self._columns[f[i]]._type) | 522 | r = check_type(self._columns[f[i]]._type) |
69 | 523 | elif f[i] in self._inherit_fields: | 523 | elif f[i] in self._inherit_fields: |
72 | 524 | r = check_type(self._inherit_fields[f[i]][2]._type) | 524 | r = check_type(self._inherit_fields[f[i]][2]._type) |
73 | 525 | data[fpos] = r | 525 | data[fpos] = r |
74 | 526 | break | 526 | break |
75 | 527 | if isinstance(r, (browse_record_list, list)): | 527 | if isinstance(r, (browse_record_list, list)): |
76 | 528 | first = True | 528 | first = True |
77 | @@ -531,10 +531,10 @@ | |||
78 | 531 | if fields2 in done: | 531 | if fields2 in done: |
79 | 532 | if [x for x in fields2 if x]: | 532 | if [x for x in fields2 if x]: |
80 | 533 | break | 533 | break |
82 | 534 | done.append(fields2) | 534 | done.append(fields2) |
83 | 535 | for row2 in r: | 535 | for row2 in r: |
84 | 536 | lines2 = self.__export_row(cr, uid, row2, fields2, | 536 | lines2 = self.__export_row(cr, uid, row2, fields2, |
86 | 537 | context) | 537 | context) |
87 | 538 | if first: | 538 | if first: |
88 | 539 | for fpos2 in range(len(fields)): | 539 | for fpos2 in range(len(fields)): |
89 | 540 | if lines2 and lines2[0][fpos2]: | 540 | if lines2 and lines2[0][fpos2]: |
90 | @@ -542,17 +542,19 @@ | |||
91 | 542 | if not data[fpos]: | 542 | if not data[fpos]: |
92 | 543 | dt = '' | 543 | dt = '' |
93 | 544 | for rr in r : | 544 | for rr in r : |
98 | 545 | if isinstance(rr.name, browse_record): | 545 | rr_name = '' |
99 | 546 | rr = rr.name | 546 | if rr.name: |
100 | 547 | rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context) | 547 | if isinstance(rr.name, browse_record): |
101 | 548 | rr_name = rr_name and rr_name[0] and rr_name[0][1] or '' | 548 | rr = rr.name |
102 | 549 | rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context) | ||
103 | 550 | rr_name = rr_name and rr_name[0] and rr_name[0][1] or '' | ||
104 | 549 | dt += tools.ustr(rr_name or '') + ',' | 551 | dt += tools.ustr(rr_name or '') + ',' |
105 | 550 | data[fpos] = dt[:-1] | 552 | data[fpos] = dt[:-1] |
106 | 551 | break | 553 | break |
107 | 552 | lines += lines2[1:] | 554 | lines += lines2[1:] |
108 | 553 | first = False | 555 | first = False |
109 | 554 | else: | 556 | else: |
111 | 555 | lines += lines2 | 557 | lines += lines2 |
112 | 556 | break | 558 | break |
113 | 557 | i += 1 | 559 | i += 1 |
114 | 558 | if i == len(f): | 560 | if i == len(f): |
115 | @@ -565,14 +567,14 @@ | |||
116 | 565 | def export_data(self, cr, uid, ids, fields_to_export, context=None): | 567 | def export_data(self, cr, uid, ids, fields_to_export, context=None): |
117 | 566 | if not context: | 568 | if not context: |
118 | 567 | context = {} | 569 | context = {} |
120 | 568 | imp_comp = context.get('import_comp',False) | 570 | imp_comp = context.get('import_comp',False) |
121 | 569 | cols = self._columns.copy() | 571 | cols = self._columns.copy() |
122 | 570 | for f in self._inherit_fields: | 572 | for f in self._inherit_fields: |
124 | 571 | cols.update({f: self._inherit_fields[f][2]}) | 573 | cols.update({f: self._inherit_fields[f][2]}) |
125 | 572 | fields_to_export = map(lambda x: x.split('/'), fields_to_export) | 574 | fields_to_export = map(lambda x: x.split('/'), fields_to_export) |
129 | 573 | fields_export = fields_to_export+[] | 575 | fields_export = fields_to_export+[] |
130 | 574 | warning = '' | 576 | warning = '' |
131 | 575 | warning_fields = [] | 577 | warning_fields = [] |
132 | 576 | for field in fields_export: | 578 | for field in fields_export: |
133 | 577 | if imp_comp and len(field)>1: | 579 | if imp_comp and len(field)>1: |
134 | 578 | warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field))) | 580 | warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field))) |
135 | @@ -580,11 +582,11 @@ | |||
136 | 580 | if imp_comp and cols.get(field and field[0],False): | 582 | if imp_comp and cols.get(field and field[0],False): |
137 | 581 | if ((isinstance(cols[field[0]], fields.function) and not cols[field[0]].store) \ | 583 | if ((isinstance(cols[field[0]], fields.function) and not cols[field[0]].store) \ |
138 | 582 | or isinstance(cols[field[0]], fields.related)\ | 584 | or isinstance(cols[field[0]], fields.related)\ |
140 | 583 | or isinstance(cols[field[0]], fields.one2many)): | 585 | or isinstance(cols[field[0]], fields.one2many)): |
141 | 584 | warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field))) | 586 | warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field))) |
142 | 585 | datas = [] | 587 | datas = [] |
143 | 586 | if imp_comp and len(warning_fields): | 588 | if imp_comp and len(warning_fields): |
145 | 587 | warning = 'Following columns cannot be exported since you select to be import compatible.\n%s' %('\n'.join(warning_fields)) | 589 | warning = 'Following columns cannot be exported since you select to be import compatible.\n%s' %('\n'.join(warning_fields)) |
146 | 588 | cr.rollback() | 590 | cr.rollback() |
147 | 589 | return {'warning' : warning} | 591 | return {'warning' : warning} |
148 | 590 | for row in self.browse(cr, uid, ids, context): | 592 | for row in self.browse(cr, uid, ids, context): |
149 | @@ -597,14 +599,14 @@ | |||
150 | 597 | fields = map(lambda x: x.split('/'), fields) | 599 | fields = map(lambda x: x.split('/'), fields) |
151 | 598 | logger = netsvc.Logger() | 600 | logger = netsvc.Logger() |
152 | 599 | ir_model_data_obj = self.pool.get('ir.model.data') | 601 | ir_model_data_obj = self.pool.get('ir.model.data') |
154 | 600 | 602 | ||
155 | 601 | def _check_db_id(self, model_name, db_id): | 603 | def _check_db_id(self, model_name, db_id): |
156 | 602 | obj_model = self.pool.get(model_name) | 604 | obj_model = self.pool.get(model_name) |
157 | 603 | ids = obj_model.search(cr, uid, [('id','=',int(db_id))]) | 605 | ids = obj_model.search(cr, uid, [('id','=',int(db_id))]) |
158 | 604 | if not len(ids): | 606 | if not len(ids): |
159 | 605 | raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, db_id)) | 607 | raise Exception(_("Database ID doesn't exist: %s : %s") %(model_name, db_id)) |
160 | 606 | return True | 608 | return True |
162 | 607 | 609 | ||
163 | 608 | def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0): | 610 | def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0): |
164 | 609 | line = datas[position] | 611 | line = datas[position] |
165 | 610 | row = {} | 612 | row = {} |
166 | @@ -624,25 +626,25 @@ | |||
167 | 624 | raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),)) | 626 | raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),)) |
168 | 625 | if not line[i]: | 627 | if not line[i]: |
169 | 626 | continue | 628 | continue |
171 | 627 | 629 | ||
172 | 628 | field = fields[i] | 630 | field = fields[i] |
173 | 629 | if prefix and not prefix[0] in field: | 631 | if prefix and not prefix[0] in field: |
174 | 630 | continue | 632 | continue |
176 | 631 | 633 | ||
177 | 632 | if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':db_id'): | 634 | if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':db_id'): |
178 | 633 | # Database ID | 635 | # Database ID |
179 | 634 | res = False | 636 | res = False |
180 | 635 | if line[i]: | 637 | if line[i]: |
181 | 636 | field_name = field[0].split(':')[0] | 638 | field_name = field[0].split(':')[0] |
184 | 637 | model_rel = fields_def[field_name]['relation'] | 639 | model_rel = fields_def[field_name]['relation'] |
185 | 638 | 640 | ||
186 | 639 | if fields_def[field[len(prefix)][:-6]]['type']=='many2many': | 641 | if fields_def[field[len(prefix)][:-6]]['type']=='many2many': |
187 | 640 | res_id = [] | 642 | res_id = [] |
188 | 641 | for db_id in line[i].split(config.get('csv_internal_sep')): | 643 | for db_id in line[i].split(config.get('csv_internal_sep')): |
189 | 642 | try: | 644 | try: |
190 | 643 | _check_db_id(self, model_rel, db_id) | 645 | _check_db_id(self, model_rel, db_id) |
191 | 644 | res_id.append(db_id) | 646 | res_id.append(db_id) |
193 | 645 | except Exception,e: | 647 | except Exception,e: |
194 | 646 | warning += [tools.exception_to_unicode(e)] | 648 | warning += [tools.exception_to_unicode(e)] |
195 | 647 | logger.notifyChannel("import", netsvc.LOG_ERROR, | 649 | logger.notifyChannel("import", netsvc.LOG_ERROR, |
196 | 648 | tools.exception_to_unicode(e)) | 650 | tools.exception_to_unicode(e)) |
197 | @@ -652,10 +654,10 @@ | |||
198 | 652 | try: | 654 | try: |
199 | 653 | _check_db_id(self, model_rel, line[i]) | 655 | _check_db_id(self, model_rel, line[i]) |
200 | 654 | res = line[i] | 656 | res = line[i] |
202 | 655 | except Exception,e: | 657 | except Exception,e: |
203 | 656 | warning += [tools.exception_to_unicode(e)] | 658 | warning += [tools.exception_to_unicode(e)] |
204 | 657 | logger.notifyChannel("import", netsvc.LOG_ERROR, | 659 | logger.notifyChannel("import", netsvc.LOG_ERROR, |
206 | 658 | tools.exception_to_unicode(e)) | 660 | tools.exception_to_unicode(e)) |
207 | 659 | row[field_name] = res or False | 661 | row[field_name] = res or False |
208 | 660 | continue | 662 | continue |
209 | 661 | 663 | ||
210 | @@ -668,7 +670,7 @@ | |||
211 | 668 | if '.' in word: | 670 | if '.' in word: |
212 | 669 | module, xml_id = word.rsplit('.', 1) | 671 | module, xml_id = word.rsplit('.', 1) |
213 | 670 | else: | 672 | else: |
215 | 671 | module, xml_id = current_module, word | 673 | module, xml_id = current_module, word |
216 | 672 | id = ir_model_data_obj._get_id(cr, uid, module, | 674 | id = ir_model_data_obj._get_id(cr, uid, module, |
217 | 673 | xml_id) | 675 | xml_id) |
218 | 674 | res_id2 = ir_model_data_obj.read(cr, uid, [id], | 676 | res_id2 = ir_model_data_obj.read(cr, uid, [id], |
219 | @@ -681,7 +683,7 @@ | |||
220 | 681 | if '.' in line[i]: | 683 | if '.' in line[i]: |
221 | 682 | module, xml_id = line[i].rsplit('.', 1) | 684 | module, xml_id = line[i].rsplit('.', 1) |
222 | 683 | else: | 685 | else: |
224 | 684 | module, xml_id = current_module, line[i] | 686 | module, xml_id = current_module, line[i] |
225 | 685 | 687 | ||
226 | 686 | record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id) | 688 | record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id) |
227 | 687 | ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id']) | 689 | ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id']) |
228 | @@ -699,31 +701,31 @@ | |||
229 | 699 | continue | 701 | continue |
230 | 700 | if (len(field) == len(prefix)+1) and \ | 702 | if (len(field) == len(prefix)+1) and \ |
231 | 701 | (prefix == field[0:len(prefix)]): | 703 | (prefix == field[0:len(prefix)]): |
236 | 702 | if field[len(prefix)] == "id": | 704 | if field[len(prefix)] == "id": |
237 | 703 | # XML ID | 705 | # XML ID |
238 | 704 | db_id = False | 706 | db_id = False |
239 | 705 | is_xml_id = data_id = line[i] | 707 | is_xml_id = data_id = line[i] |
240 | 706 | d = data_id.split('.') | 708 | d = data_id.split('.') |
241 | 707 | module = len(d)>1 and d[0] or '' | 709 | module = len(d)>1 and d[0] or '' |
244 | 708 | name = len(d)>1 and d[1] or d[0] | 710 | name = len(d)>1 and d[1] or d[0] |
245 | 709 | data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('name','=',name)]) | 711 | data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('name','=',name)]) |
246 | 710 | if len(data_ids): | 712 | if len(data_ids): |
249 | 711 | d = ir_model_data_obj.read(cr, uid, data_ids, ['res_id'])[0] | 713 | d = ir_model_data_obj.read(cr, uid, data_ids, ['res_id'])[0] |
250 | 712 | db_id = d['res_id'] | 714 | db_id = d['res_id'] |
251 | 713 | if is_db_id and not db_id: | 715 | if is_db_id and not db_id: |
253 | 714 | data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('res_id','=',is_db_id)]) | 716 | data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('res_id','=',is_db_id)]) |
254 | 715 | if not len(data_ids): | 717 | if not len(data_ids): |
258 | 716 | ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id}) | 718 | ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id}) |
259 | 717 | db_id = is_db_id | 719 | db_id = is_db_id |
260 | 718 | if is_db_id and int(db_id) != int(is_db_id): | 720 | if is_db_id and int(db_id) != int(is_db_id): |
261 | 719 | warning += [_("Id is not the same than existing one: %s")%(is_db_id)] | 721 | warning += [_("Id is not the same than existing one: %s")%(is_db_id)] |
262 | 720 | logger.notifyChannel("import", netsvc.LOG_ERROR, | 722 | logger.notifyChannel("import", netsvc.LOG_ERROR, |
263 | 721 | _("Id is not the same than existing one: %s")%(is_db_id)) | 723 | _("Id is not the same than existing one: %s")%(is_db_id)) |
264 | 722 | continue | 724 | continue |
265 | 723 | 725 | ||
266 | 724 | if field[len(prefix)] == "db_id": | 726 | if field[len(prefix)] == "db_id": |
269 | 725 | # Database ID | 727 | # Database ID |
270 | 726 | try: | 728 | try: |
271 | 727 | _check_db_id(self, model_name, line[i]) | 729 | _check_db_id(self, model_name, line[i]) |
272 | 728 | data_res_id = is_db_id = int(line[i]) | 730 | data_res_id = is_db_id = int(line[i]) |
273 | 729 | except Exception,e: | 731 | except Exception,e: |
274 | @@ -733,19 +735,19 @@ | |||
275 | 733 | continue | 735 | continue |
276 | 734 | data_ids = ir_model_data_obj.search(cr, uid, [('model','=',model_name),('res_id','=',line[i])]) | 736 | data_ids = ir_model_data_obj.search(cr, uid, [('model','=',model_name),('res_id','=',line[i])]) |
277 | 735 | if len(data_ids): | 737 | if len(data_ids): |
280 | 736 | d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0] | 738 | d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0] |
281 | 737 | data_id = d['name'] | 739 | data_id = d['name'] |
282 | 738 | if d['module']: | 740 | if d['module']: |
283 | 739 | data_id = '%s.%s'%(d['module'],d['name']) | 741 | data_id = '%s.%s'%(d['module'],d['name']) |
284 | 740 | else: | 742 | else: |
285 | 741 | data_id = d['name'] | 743 | data_id = d['name'] |
286 | 742 | if is_xml_id and not data_id: | 744 | if is_xml_id and not data_id: |
289 | 743 | data_id = is_xml_id | 745 | data_id = is_xml_id |
290 | 744 | if is_xml_id and is_xml_id!=data_id: | 746 | if is_xml_id and is_xml_id!=data_id: |
291 | 745 | warning += [_("Id is not the same than existing one: %s")%(line[i])] | 747 | warning += [_("Id is not the same than existing one: %s")%(line[i])] |
292 | 746 | logger.notifyChannel("import", netsvc.LOG_ERROR, | 748 | logger.notifyChannel("import", netsvc.LOG_ERROR, |
293 | 747 | _("Id is not the same than existing one: %s")%(line[i])) | 749 | _("Id is not the same than existing one: %s")%(line[i])) |
295 | 748 | 750 | ||
296 | 749 | continue | 751 | continue |
297 | 750 | if fields_def[field[len(prefix)]]['type'] == 'integer': | 752 | if fields_def[field[len(prefix)]]['type'] == 'integer': |
298 | 751 | res = line[i] and int(line[i]) | 753 | res = line[i] and int(line[i]) |
299 | @@ -769,9 +771,9 @@ | |||
300 | 769 | logger.notifyChannel("import", netsvc.LOG_WARNING, | 771 | logger.notifyChannel("import", netsvc.LOG_WARNING, |
301 | 770 | _("key '%s' not found in selection field '%s'") % \ | 772 | _("key '%s' not found in selection field '%s'") % \ |
302 | 771 | (line[i], field[len(prefix)])) | 773 | (line[i], field[len(prefix)])) |
304 | 772 | 774 | ||
305 | 773 | warning += [_("Key/value '%s' not found in selection field '%s'")%(line[i],field[len(prefix)])] | 775 | warning += [_("Key/value '%s' not found in selection field '%s'")%(line[i],field[len(prefix)])] |
307 | 774 | 776 | ||
308 | 775 | elif fields_def[field[len(prefix)]]['type']=='many2one': | 777 | elif fields_def[field[len(prefix)]]['type']=='many2one': |
309 | 776 | res = False | 778 | res = False |
310 | 777 | if line[i]: | 779 | if line[i]: |
311 | @@ -814,13 +816,13 @@ | |||
312 | 814 | relation_obj = self.pool.get(fields_def[field]['relation']) | 816 | relation_obj = self.pool.get(fields_def[field]['relation']) |
313 | 815 | newfd = relation_obj.fields_get( | 817 | newfd = relation_obj.fields_get( |
314 | 816 | cr, uid, context=context) | 818 | cr, uid, context=context) |
317 | 817 | res = process_liness(self, datas, prefix + [field], current_module, relation_obj._name, newfd, position) | 819 | res = process_liness(self, datas, prefix + [field], current_module, relation_obj._name, newfd, position) |
318 | 818 | (newrow, max2, w2, translate2, data_id2, data_res_id2) = res | 820 | (newrow, max2, w2, translate2, data_id2, data_res_id2) = res |
319 | 819 | nbrmax = max(nbrmax, max2) | 821 | nbrmax = max(nbrmax, max2) |
322 | 820 | warning = warning + w2 | 822 | warning = warning + w2 |
323 | 821 | reduce(lambda x, y: x and y, newrow) | 823 | reduce(lambda x, y: x and y, newrow) |
324 | 822 | row[field] = newrow and (reduce(lambda x, y: x or y, newrow.values()) and \ | 824 | row[field] = newrow and (reduce(lambda x, y: x or y, newrow.values()) and \ |
326 | 823 | [(0, 0, newrow)]) or [] | 825 | [(0, 0, newrow)]) or [] |
327 | 824 | i = max2 | 826 | i = max2 |
328 | 825 | while (position+i)<len(datas): | 827 | while (position+i)<len(datas): |
329 | 826 | ok = True | 828 | ok = True |
330 | @@ -835,7 +837,7 @@ | |||
331 | 835 | self, datas, prefix+[field], current_module, relation_obj._name, newfd, position+i) | 837 | self, datas, prefix+[field], current_module, relation_obj._name, newfd, position+i) |
332 | 836 | warning = warning+w2 | 838 | warning = warning+w2 |
333 | 837 | if newrow and reduce(lambda x, y: x or y, newrow.values()): | 839 | if newrow and reduce(lambda x, y: x or y, newrow.values()): |
335 | 838 | row[field].append((0, 0, newrow)) | 840 | row[field].append((0, 0, newrow)) |
336 | 839 | i += max2 | 841 | i += max2 |
337 | 840 | nbrmax = max(nbrmax, i) | 842 | nbrmax = max(nbrmax, i) |
338 | 841 | 843 | ||
339 | @@ -884,7 +886,7 @@ | |||
340 | 884 | return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' ) | 886 | return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' ) |
341 | 885 | #Raising Uncaught exception | 887 | #Raising Uncaught exception |
342 | 886 | return (-1, res, 'Line ' + str(counter) +' : ' + str(e), '' ) | 888 | return (-1, res, 'Line ' + str(counter) +' : ' + str(e), '' ) |
344 | 887 | 889 | ||
345 | 888 | for lang in translate: | 890 | for lang in translate: |
346 | 889 | context2 = context.copy() | 891 | context2 = context.copy() |
347 | 890 | context2['lang'] = lang | 892 | context2['lang'] = lang |
348 | @@ -1130,15 +1132,15 @@ | |||
349 | 1130 | if user != 1: # admin user has all roles | 1132 | if user != 1: # admin user has all roles |
350 | 1131 | user_roles = usersobj.read(cr, user, [user], ['roles_id'])[0]['roles_id'] | 1133 | user_roles = usersobj.read(cr, user, [user], ['roles_id'])[0]['roles_id'] |
351 | 1132 | # TODO handle the case of more than one workflow for a model | 1134 | # TODO handle the case of more than one workflow for a model |
355 | 1133 | cr.execute("""SELECT DISTINCT t.role_id | 1135 | cr.execute("""SELECT DISTINCT t.role_id |
356 | 1134 | FROM wkf | 1136 | FROM wkf |
357 | 1135 | INNER JOIN wkf_activity a ON a.wkf_id = wkf.id | 1137 | INNER JOIN wkf_activity a ON a.wkf_id = wkf.id |
358 | 1136 | INNER JOIN wkf_transition t ON (t.act_to = a.id) | 1138 | INNER JOIN wkf_transition t ON (t.act_to = a.id) |
359 | 1137 | WHERE wkf.osv = %s | 1139 | WHERE wkf.osv = %s |
360 | 1138 | AND t.signal = %s | 1140 | AND t.signal = %s |
361 | 1139 | """, (self._name, button.get('name'),)) | 1141 | """, (self._name, button.get('name'),)) |
362 | 1140 | roles = cr.fetchall() | 1142 | roles = cr.fetchall() |
364 | 1141 | 1143 | ||
365 | 1142 | # draft -> valid = signal_next (role X) | 1144 | # draft -> valid = signal_next (role X) |
366 | 1143 | # draft -> cancel = signal_cancel (no role) | 1145 | # draft -> cancel = signal_cancel (no role) |
367 | 1144 | # | 1146 | # |
368 | @@ -1147,13 +1149,13 @@ | |||
369 | 1147 | # | 1149 | # |
370 | 1148 | # running -> done = signal_next (role Z) | 1150 | # running -> done = signal_next (role Z) |
371 | 1149 | # running -> cancel = signal_cancel (role Z) | 1151 | # running -> cancel = signal_cancel (role Z) |
373 | 1150 | # As we don't know the object state, in this scenario, | 1152 | # As we don't know the object state, in this scenario, |
374 | 1151 | # the button "signal_cancel" will be always shown as there is no restriction to cancel in draft | 1153 | # the button "signal_cancel" will be always shown as there is no restriction to cancel in draft |
375 | 1152 | # the button "signal_next" will be show if the user has any of the roles (X Y or Z) | 1154 | # the button "signal_next" will be show if the user has any of the roles (X Y or Z) |
376 | 1153 | # The verification will be made later in workflow process... | 1155 | # The verification will be made later in workflow process... |
377 | 1154 | if roles: | 1156 | if roles: |
378 | 1155 | can_click = any((not role) or rolesobj.check(cr, user, user_roles, role) for (role,) in roles) | 1157 | can_click = any((not role) or rolesobj.check(cr, user, user_roles, role) for (role,) in roles) |
380 | 1156 | 1158 | ||
381 | 1157 | button.set('readonly', str(int(not can_click))) | 1159 | button.set('readonly', str(int(not can_click))) |
382 | 1158 | 1160 | ||
383 | 1159 | arch = etree.tostring(node, encoding="utf-8").replace('\t', '') | 1161 | arch = etree.tostring(node, encoding="utf-8").replace('\t', '') |
384 | @@ -1665,7 +1667,7 @@ | |||
385 | 1665 | def _check_removed_columns(self, cr, log=False): | 1667 | def _check_removed_columns(self, cr, log=False): |
386 | 1666 | # nothing to check in memory... | 1668 | # nothing to check in memory... |
387 | 1667 | pass | 1669 | pass |
389 | 1668 | 1670 | ||
390 | 1669 | def exists(self, cr, uid, id, context=None): | 1671 | def exists(self, cr, uid, id, context=None): |
391 | 1670 | return id in self.datas | 1672 | return id in self.datas |
392 | 1671 | 1673 | ||
393 | @@ -1804,12 +1806,12 @@ | |||
394 | 1804 | 1806 | ||
395 | 1805 | if isinstance(f, fields.one2many): | 1807 | if isinstance(f, fields.one2many): |
396 | 1806 | cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,)) | 1808 | cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,)) |
398 | 1807 | 1809 | ||
399 | 1808 | if self.pool.get(f._obj): | 1810 | if self.pool.get(f._obj): |
400 | 1809 | if f._fields_id not in self.pool.get(f._obj)._columns.keys(): | 1811 | if f._fields_id not in self.pool.get(f._obj)._columns.keys(): |
401 | 1810 | if not self.pool.get(f._obj)._inherits or (f._fields_id not in self.pool.get(f._obj)._inherit_fields.keys()): | 1812 | if not self.pool.get(f._obj)._inherits or (f._fields_id not in self.pool.get(f._obj)._inherit_fields.keys()): |
402 | 1811 | raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id,f._obj,)) | 1813 | raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id,f._obj,)) |
404 | 1812 | 1814 | ||
405 | 1813 | if cr.fetchone(): | 1815 | if cr.fetchone(): |
406 | 1814 | cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id)) | 1816 | cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id)) |
407 | 1815 | res = cr.fetchone()[0] | 1817 | res = cr.fetchone()[0] |
408 | @@ -1916,7 +1918,7 @@ | |||
409 | 1916 | field_size = (65535 * f.digits[0]) + f.digits[0] + f.digits[1] | 1918 | field_size = (65535 * f.digits[0]) + f.digits[0] + f.digits[1] |
410 | 1917 | if field_size != f_pg_size: | 1919 | if field_size != f_pg_size: |
411 | 1918 | field_size_change = True | 1920 | field_size_change = True |
413 | 1919 | 1921 | ||
414 | 1920 | if f_pg_type != f_obj_type or field_size_change: | 1922 | if f_pg_type != f_obj_type or field_size_change: |
415 | 1921 | if f_pg_type != f_obj_type: | 1923 | if f_pg_type != f_obj_type: |
416 | 1922 | logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed type to %s." % (k, self._table, c[1])) | 1924 | logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed type to %s." % (k, self._table, c[1])) |
417 | @@ -2004,11 +2006,11 @@ | |||
418 | 2004 | create = not bool(cr.fetchone()) | 2006 | create = not bool(cr.fetchone()) |
419 | 2005 | 2007 | ||
420 | 2006 | cr.commit() # start a new transaction | 2008 | cr.commit() # start a new transaction |
422 | 2007 | 2009 | ||
423 | 2008 | store_fncts = self.pool._store_function.get(self._name, []) | 2010 | store_fncts = self.pool._store_function.get(self._name, []) |
424 | 2009 | #if any field is changed from function to storable, we need to remove its entry from store_function | 2011 | #if any field is changed from function to storable, we need to remove its entry from store_function |
425 | 2010 | remove_stores = [] | 2012 | remove_stores = [] |
427 | 2011 | 2013 | ||
428 | 2012 | for record in store_fncts: | 2014 | for record in store_fncts: |
429 | 2013 | if record[0] == self._name and (self._columns.get(record[1],False) or self._inherit_fields.get(record[1],False)): | 2015 | if record[0] == self._name and (self._columns.get(record[1],False) or self._inherit_fields.get(record[1],False)): |
430 | 2014 | if (not isinstance(self._columns[record[1]],fields.function)) or (record[1] in self._inherit_fields and not isinstance(self._inherit_fields[record[1]][2],fields.function)): | 2016 | if (not isinstance(self._columns[record[1]],fields.function)) or (record[1] in self._inherit_fields and not isinstance(self._inherit_fields[record[1]][2],fields.function)): |
431 | @@ -2018,7 +2020,7 @@ | |||
432 | 2018 | store_fncts.remove(stores) | 2020 | store_fncts.remove(stores) |
433 | 2019 | 2021 | ||
434 | 2020 | self.pool._store_function[self._name] = store_fncts | 2022 | self.pool._store_function[self._name] = store_fncts |
436 | 2021 | 2023 | ||
437 | 2022 | for (key, con, _) in self._sql_constraints: | 2024 | for (key, con, _) in self._sql_constraints: |
438 | 2023 | conname = '%s_%s' % (self._table, key) | 2025 | conname = '%s_%s' % (self._table, key) |
439 | 2024 | cr.execute("SELECT conname FROM pg_constraint where conname=%s", (conname,)) | 2026 | cr.execute("SELECT conname FROM pg_constraint where conname=%s", (conname,)) |
440 | @@ -2306,7 +2308,7 @@ | |||
441 | 2306 | res.extend(cr.dictfetchall()) | 2308 | res.extend(cr.dictfetchall()) |
442 | 2307 | else: | 2309 | else: |
443 | 2308 | res = map(lambda x: {'id': x}, ids) | 2310 | res = map(lambda x: {'id': x}, ids) |
445 | 2309 | 2311 | ||
446 | 2310 | # if not res: | 2312 | # if not res: |
447 | 2311 | # res = map(lambda x: {'id': x}, ids) | 2313 | # res = map(lambda x: {'id': x}, ids) |
448 | 2312 | # for record in res: | 2314 | # for record in res: |
449 | @@ -2321,7 +2323,7 @@ | |||
450 | 2321 | # if ftype in ('one2many', 'many2many'): | 2323 | # if ftype in ('one2many', 'many2many'): |
451 | 2322 | # field_val = [] | 2324 | # field_val = [] |
452 | 2323 | # record.update({f:field_val}) | 2325 | # record.update({f:field_val}) |
454 | 2324 | 2326 | ||
455 | 2325 | for f in fields_pre: | 2327 | for f in fields_pre: |
456 | 2326 | if f == self.CONCURRENCY_CHECK_FIELD: | 2328 | if f == self.CONCURRENCY_CHECK_FIELD: |
457 | 2327 | continue | 2329 | continue |
458 | @@ -2481,8 +2483,8 @@ | |||
459 | 2481 | self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context) | 2483 | self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context) |
460 | 2482 | 2484 | ||
461 | 2483 | properties = self.pool.get('ir.property') | 2485 | properties = self.pool.get('ir.property') |
464 | 2484 | domain = [('res_id', '=', False), | 2486 | domain = [('res_id', '=', False), |
465 | 2485 | ('value', 'in', ['%s,%s' % (self._name, i) for i in ids]), | 2487 | ('value', 'in', ['%s,%s' % (self._name, i) for i in ids]), |
466 | 2486 | ] | 2488 | ] |
467 | 2487 | if properties.search(cr, uid, domain, context=context): | 2489 | if properties.search(cr, uid, domain, context=context): |
468 | 2488 | raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property')) | 2490 | raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property')) |
469 | @@ -2717,10 +2719,10 @@ | |||
470 | 2717 | cr.execute('SELECT parent_left, parent_right FROM "%s" WHERE id=%%s' % (self._table,), (id,)) | 2719 | cr.execute('SELECT parent_left, parent_right FROM "%s" WHERE id=%%s' % (self._table,), (id,)) |
471 | 2718 | pleft, pright = cr.fetchone() | 2720 | pleft, pright = cr.fetchone() |
472 | 2719 | distance = pright - pleft + 1 | 2721 | distance = pright - pleft + 1 |
474 | 2720 | 2722 | ||
475 | 2721 | cr.execute('SELECT parent_right, id FROM "%s" WHERE %s ORDER BY %s' % (self._table, clause, order), params) | 2723 | cr.execute('SELECT parent_right, id FROM "%s" WHERE %s ORDER BY %s' % (self._table, clause, order), params) |
476 | 2722 | parents = cr.fetchall() | 2724 | parents = cr.fetchall() |
478 | 2723 | 2725 | ||
479 | 2724 | # Find Position of the element | 2726 | # Find Position of the element |
480 | 2725 | position = None | 2727 | position = None |
481 | 2726 | for (parent_pright, parent_id) in parents: | 2728 | for (parent_pright, parent_id) in parents: |
482 | @@ -2760,7 +2762,7 @@ | |||
483 | 2760 | done[key][id] = True | 2762 | done[key][id] = True |
484 | 2761 | todo.append(id) | 2763 | todo.append(id) |
485 | 2762 | self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context) | 2764 | self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context) |
487 | 2763 | 2765 | ||
488 | 2764 | wf_service = netsvc.LocalService("workflow") | 2766 | wf_service = netsvc.LocalService("workflow") |
489 | 2765 | for id in ids: | 2767 | for id in ids: |
490 | 2766 | wf_service.trg_write(user, self._name, id, cr) | 2768 | wf_service.trg_write(user, self._name, id, cr) |
491 | @@ -2843,15 +2845,15 @@ | |||
492 | 2843 | upd0 += ','+self._inherits[table] | 2845 | upd0 += ','+self._inherits[table] |
493 | 2844 | upd1 += ',%s' | 2846 | upd1 += ',%s' |
494 | 2845 | upd2.append(record_id) | 2847 | upd2.append(record_id) |
497 | 2846 | 2848 | ||
498 | 2847 | #Start : Set bool fields to be False if they are not touched(to make search more powerful) | 2849 | #Start : Set bool fields to be False if they are not touched(to make search more powerful) |
499 | 2848 | bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean'] | 2850 | bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean'] |
501 | 2849 | 2851 | ||
502 | 2850 | for bool_field in bool_fields: | 2852 | for bool_field in bool_fields: |
503 | 2851 | if bool_field not in vals: | 2853 | if bool_field not in vals: |
504 | 2852 | vals[bool_field] = False | 2854 | vals[bool_field] = False |
505 | 2853 | #End | 2855 | #End |
507 | 2854 | 2856 | ||
508 | 2855 | for field in vals: | 2857 | for field in vals: |
509 | 2856 | if field in self._columns: | 2858 | if field in self._columns: |
510 | 2857 | if self._columns[field]._classic_write: | 2859 | if self._columns[field]._classic_write: |
511 | @@ -2909,13 +2911,13 @@ | |||
512 | 2909 | cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,)) | 2911 | cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,)) |
513 | 2910 | cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,)) | 2912 | cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,)) |
514 | 2911 | cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1,pleft+2,id_new)) | 2913 | cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1,pleft+2,id_new)) |
516 | 2912 | 2914 | ||
517 | 2913 | # default element in context must be removed when call a one2many or many2many | 2915 | # default element in context must be removed when call a one2many or many2many |
518 | 2914 | rel_context = context.copy() | 2916 | rel_context = context.copy() |
519 | 2915 | for c in context.items(): | 2917 | for c in context.items(): |
520 | 2916 | if c[0].startswith('default_'): | 2918 | if c[0].startswith('default_'): |
521 | 2917 | del rel_context[c[0]] | 2919 | del rel_context[c[0]] |
523 | 2918 | 2920 | ||
524 | 2919 | result = [] | 2921 | result = [] |
525 | 2920 | for field in upd_todo: | 2922 | for field in upd_todo: |
526 | 2921 | result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or [] | 2923 | result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or [] |
Dhruti,
Kindly by the next time, take care of those unnecessary whitespaces.
Thanks.