Merge lp:~fabien-morin/unifield-server/fm-us-3386 into lp:unifield-server
- fm-us-3386
- Merge into trunk
Status: | Merged |
---|---|
Merged at revision: | 4830 |
Proposed branch: | lp:~fabien-morin/unifield-server/fm-us-3386 |
Merge into: | lp:unifield-server |
Diff against target: |
4799 lines (+689/-2000) 90 files modified
bin/addons/account/account.py (+28/-135) bin/addons/account/account_move_line.py (+6/-7) bin/addons/account/partner.py (+21/-20) bin/addons/account/res_currency.py (+3/-3) bin/addons/account/sequence.py (+2/-2) bin/addons/account/wizard/account_automatic_reconcile.py (+7/-7) bin/addons/account/wizard/account_fiscalyear_close.py (+40/-36) bin/addons/account_corrections/wizard/analytic_distribution_wizard.py (+1/-1) bin/addons/account_hq_entries/wizard/hq_entries_validation.py (+1/-1) bin/addons/account_override/account.py (+6/-9) bin/addons/account_override/account_move_line.py (+5/-5) bin/addons/account_override/finance_export.py (+1/-1) bin/addons/account_override/res_partner.py (+2/-2) bin/addons/account_payment/account_move_line.py (+2/-2) bin/addons/account_payment/account_payment.py (+0/-28) bin/addons/account_period_closing_level/account_period.py (+4/-4) bin/addons/account_reconciliation/account_move_line.py (+2/-2) bin/addons/analytic/analytic.py (+2/-3) bin/addons/analytic_distribution/destination_tools.py (+14/-15) bin/addons/analytic_override/analytic_account.py (+5/-7) bin/addons/analytic_override/analytic_line.py (+1/-1) bin/addons/base/ir/ir_attachment.py (+3/-2) bin/addons/base/ir/ir_cron.py (+1/-1) bin/addons/base/ir/ir_model.py (+28/-29) bin/addons/base/ir/ir_rule.py (+3/-3) bin/addons/base/ir/ir_sequence.py (+7/-7) bin/addons/base/ir/ir_values.py (+1/-1) bin/addons/base/ir/workflow/print_instance.py (+3/-3) bin/addons/base/res/res_log.py (+1/-2) bin/addons/consumption_calculation/consumption_calculation.py (+2/-2) bin/addons/data_finance/import_analytic_lines.py (+1/-1) bin/addons/finance/wizard/account_report_partner_balance_tree.py (+26/-27) bin/addons/financing_contract/contract.py (+1/-1) bin/addons/financing_contract/financing_contract_account_quadruplet.py (+4/-4) bin/addons/mission_stock/mission_stock.py (+4/-5) bin/addons/msf_budget/msf_budget_line.py (+5/-5) bin/addons/msf_cross_docking/cross_docking.py (+1/-1) bin/addons/msf_outgoing/msf_outgoing.py (+5/-16) bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py (+1/-1) bin/addons/msf_profile/msf_profile.py (+11/-13) bin/addons/msf_profile/user_access_configurator.py (+3/-3) bin/addons/msf_tools/msf_tools.py (+4/-5) bin/addons/object_query/query.py (+1/-1) bin/addons/product/pricelist.py (+3/-3) bin/addons/product_attributes/product_attributes.py (+6/-6) bin/addons/product_expiry/product_expiry.py (+6/-6) bin/addons/register_accounting/account_analytic_line.py (+1/-1) bin/addons/register_accounting/register_tools.py (+2/-2) bin/addons/register_accounting/report/report_open_advances.py (+3/-3) bin/addons/register_accounting/wizard/wizard_liquidity_position.py (+2/-2) bin/addons/res_currency_functional/account_move_compute_currency.py (+1/-1) bin/addons/sale/sale.py (+1/-1) bin/addons/sale_override/sale.py (+3/-3) bin/addons/specific_rules/specific_rules.py (+2/-2) bin/addons/specific_rules/unconsistent_stock_report.py (+3/-3) bin/addons/stock/product.py (+16/-18) bin/addons/stock/stock.py (+13/-10) bin/addons/supplier_catalogue/supplier_catalogue.py (+6/-5) bin/addons/sync_client/ir_model_data.py (+3/-4) bin/addons/sync_client/log_sale_purchase.py (+2/-3) bin/addons/sync_client/orm.py (+5/-8) bin/addons/sync_client/special_handling.py (+1/-1) bin/addons/sync_client/sync_client.py (+4/-4) bin/addons/sync_common/migration_scripts.py (+5/-5) bin/addons/sync_remote_warehouse/orm.py (+3/-3) bin/addons/sync_remote_warehouse/wizard/setup_remote_warehouse.py (+4/-4) bin/addons/sync_server/sync_server.py (+1/-1) bin/addons/sync_server/update.py (+6/-8) bin/addons/sync_so/so_po_common.py (+1/-1) bin/addons/sync_so/specific_xml_id.py (+4/-4) bin/addons/tender_flow/tender_flow.py (+1/-2) bin/addons/update_client/version.py (+1/-2) bin/osv/expression.py (+7/-8) bin/osv/fields.py (+39/-38) bin/osv/orm.py (+62/-65) bin/service/web_services.py (+29/-5) bin/tools/sql.py (+1/-1) doc/migrate/3.3.0-3.4.0/README (+0/-35) doc/migrate/3.3.0-3.4.0/post.py (+0/-146) doc/migrate/3.3.0-3.4.0/pre.py (+0/-112) doc/migrate/3.4.0-4.0.0/README (+0/-29) doc/migrate/3.4.0-4.0.0/post-tiny.py (+0/-87) doc/migrate/3.4.0-4.0.0/post.py (+0/-116) doc/migrate/3.4.0-4.0.0/pre-tiny.py (+0/-145) doc/migrate/3.4.0-4.0.0/pre.py (+0/-127) doc/migrate/4.0.0-4.2.0/pre.py (+0/-247) doc/migrate/4.0.0-4.2.0/tiny/README (+0/-1) doc/migrate/4.0.0-4.2.0/tiny/pre-tiny.py (+0/-188) doc/migrate/4.2.0-4.4.0/pre.py (+0/-110) tools/find_sql_injection.py (+173/-0) |
To merge this branch: | bzr merge lp:~fabien-morin/unifield-server/fm-us-3386 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+331867@code.launchpad.net |
Commit message
Description of the change
jftempo (jfb-tempo-consulting) wrote : | # |
Fabien MORIN (fabien-morin) wrote : | # |
On vendredi 6 octobre 2017 09:15:36 CEST you wrote:
> to fix
>
> Diff comments:
>
> > === modified file 'bin/addons/
> > --- bin/addons/
> > +++ bin/addons/
> > @@ -1189,13 +1080,18 @@
> > if isinstance(
> > if cond[1] in ['in','not in']:
> > amount = tuple(cond[2])
> > + for cur_amount in amount:
> > + if not isinstance(
> > + raise osv.except_
> > else:
> > continue
> > else:
> > + if not isinstance(amount, (int, long, float)):
> > + raise osv.except_
>
> amount is already protected and not string concatenated in cr.execute
> Do we need this check ?
I did some tries and I was not able to pass other things than number to this method, so, you right, it is well enough protected already, I will remove this extra protection.
Sometimes there is no need to have string concatenated in a cr.execute to get an sql injection flaw, I can show you how in some curently in use forms it is possible to filter by an sql injection that will reset the admin password (or drop table, ...)
But in this precise case, the risk is already filter by the web. (I was thinking it was possible to squeeze the web check by changing the html with an inspector, but I was not able to).
>
> > if cond[1] in ['=like', 'like', 'not like', 'ilike', 'not ilike', 'in', 'not in', 'child_of']:
> > continue
> >
> > - cr.execute("select move_id from account_move_line group by move_id having sum(debit) %s %%s" % (cond[1]
> > + cr.execute("select move_id from account_move_line group by move_id having sum(debit) %s %%s" % (cond[1]
> > res_ids = set(id[0] for id in cr.fetchall())
> > ids = ids and (ids & res_ids) or res_ids
> > if ids:
> > @@ -1704,25 +1600,27 @@
> > def _sum(self, cr, uid, ids, name, args, context, where ='', where_params=()):
> > parent_ids = tuple(self.
> > if context.
> > - cr.execute('SELECT line.tax_code_id, sum(line.
> > - FROM account_move_line AS line, \
> > - account_move AS move \
> > - LEFT JOIN account_invoice invoice ON \
> > - (invoice.move_id = move.id) \
> > - WHERE line.tax_code_id IN %s '+where+' \
> > - AND move.id = line.move_id \
> > - AND ((invoice.state = \'paid\') \
> > - OR (invoice.id IS NULL)) \
> > - GROUP BY line.tax_code_id',
> > - (parent_ids,) + where_params)
> > + ...
Preview Diff
1 | === modified file 'bin/addons/account/account.py' |
2 | --- bin/addons/account/account.py 2017-09-18 12:35:40 +0000 |
3 | +++ bin/addons/account/account.py 2017-10-06 09:59:17 +0000 |
4 | @@ -39,10 +39,11 @@ |
5 | """ |
6 | level = 100 |
7 | while len(ids): |
8 | - cr.execute('SELECT DISTINCT parent_id '\ |
9 | - 'FROM '+self._table+' '\ |
10 | - 'WHERE id IN %s '\ |
11 | - 'AND parent_id IS NOT NULL',(tuple(ids),)) |
12 | + cr.execute(''' |
13 | + SELECT DISTINCT parent_id |
14 | + FROM %s |
15 | + WHERE id IN %%s |
16 | + AND parent_id IS NOT NULL''' % self._table, (tuple(ids),)) # not_a_user_entry |
17 | ids = map(itemgetter(0), cr.fetchall()) |
18 | if not level: |
19 | return False |
20 | @@ -242,101 +243,6 @@ |
21 | ids3 = self._get_children_and_consol(cr, uid, ids3, context=context) |
22 | return ids2 + ids3 |
23 | |
24 | - |
25 | - def __compute(self, cr, uid, ids, field_names, arg=None, context=None, |
26 | - query='', query_params=()): |
27 | - """ compute the balance, debit and/or credit for the provided |
28 | - account ids |
29 | - Arguments: |
30 | - `ids`: account ids |
31 | - `field_names`: the fields to compute (a list of any of |
32 | - 'balance', 'debit' and 'credit') |
33 | - `arg`: unused fields.function stuff |
34 | - `query`: additional query filter (as a string) |
35 | - `query_params`: parameters for the provided query string |
36 | - (__compute will handle their escaping) as a |
37 | - tuple |
38 | - """ |
39 | - if context is None: |
40 | - context = {} |
41 | - mapping = { |
42 | - 'balance': "COALESCE(SUM(l.debit),0) " \ |
43 | - "- COALESCE(SUM(l.credit), 0) as balance", |
44 | - 'debit': "COALESCE(SUM(l.debit), 0) as debit", |
45 | - 'credit': "COALESCE(SUM(l.credit), 0) as credit" |
46 | - } |
47 | - #get all the necessary accounts |
48 | - children_and_consolidated = self._get_children_and_consol(cr, uid, ids, |
49 | - context=context) |
50 | - #compute for each account the balance/debit/credit from the move lines |
51 | - accounts = {} |
52 | - sums = {} |
53 | - if children_and_consolidated: |
54 | - aml_query = self.pool.get('account.move.line')._query_get(cr, uid, context=context) |
55 | - |
56 | - wheres = [""] |
57 | - if query.strip(): |
58 | - wheres.append(query.strip()) |
59 | - if aml_query.strip(): |
60 | - wheres.append(aml_query.strip()) |
61 | - filters = " AND ".join(wheres) |
62 | - self.logger.notifyChannel('addons.'+self._name, netsvc.LOG_DEBUG, |
63 | - 'Filters: %s'%filters) |
64 | - # IN might not work ideally in case there are too many |
65 | - # children_and_consolidated, in that case join on a |
66 | - # values() e.g.: |
67 | - # SELECT l.account_id as id FROM account_move_line l |
68 | - # INNER JOIN (VALUES (id1), (id2), (id3), ...) AS tmp (id) |
69 | - # ON l.account_id = tmp.id |
70 | - # or make _get_children_and_consol return a query and join on that |
71 | - request = ("SELECT l.account_id as id, " +\ |
72 | - ', '.join(map(mapping.__getitem__, field_names)) + |
73 | - " FROM account_move_line l" \ |
74 | - " WHERE l.account_id IN %s " \ |
75 | - + filters + |
76 | - " GROUP BY l.account_id") |
77 | - params = (tuple(children_and_consolidated),) + query_params |
78 | - cr.execute(request, params) |
79 | - self.logger.notifyChannel('addons.'+self._name, netsvc.LOG_DEBUG, |
80 | - 'Status: %s'%cr.statusmessage) |
81 | - |
82 | - for res in cr.dictfetchall(): |
83 | - accounts[res['id']] = res |
84 | - |
85 | - # consolidate accounts with direct children |
86 | - children_and_consolidated.reverse() |
87 | - brs = list(self.browse(cr, uid, children_and_consolidated, context=context)) |
88 | - currency_obj = self.pool.get('res.currency') |
89 | - display_only_checked_account = context.get('display_only_checked_account', False) |
90 | - while brs: |
91 | - current = brs[0] |
92 | -# can_compute = True |
93 | -# for child in current.child_id: |
94 | -# if child.id not in sums: |
95 | -# can_compute = False |
96 | -# try: |
97 | -# brs.insert(0, brs.pop(brs.index(child))) |
98 | -# except ValueError: |
99 | -# brs.insert(0, child) |
100 | -# if can_compute: |
101 | - brs.pop(0) |
102 | - for fn in field_names: |
103 | - sums.setdefault(current.id, {})[fn] = accounts.get(current.id, {}).get(fn, 0.0) |
104 | - for child in current.child_id: |
105 | - # in context of report, if the current account is not |
106 | - # displayed, it should no impact the total amount |
107 | - if display_only_checked_account and not child.display_in_reports: |
108 | - continue |
109 | - if child.company_id.currency_id.id == current.company_id.currency_id.id: |
110 | - sums[current.id][fn] += sums[child.id][fn] |
111 | - else: |
112 | - sums[current.id][fn] += currency_obj.compute(cr, uid, child.company_id.currency_id.id, current.company_id.currency_id.id, sums[child.id][fn], context=context) |
113 | - res = {} |
114 | - null_result = dict((fn, 0.0) for fn in field_names) |
115 | - for id in ids: |
116 | - res[id] = sums.get(id, null_result) |
117 | - return res |
118 | - |
119 | def _get_company_currency(self, cr, uid, ids, field_name, arg, context=None): |
120 | result = {} |
121 | for rec in self.browse(cr, uid, ids, context=context): |
122 | @@ -409,9 +315,6 @@ |
123 | 'child_parent_ids': fields.one2many('account.account','parent_id','Children'), |
124 | 'child_consol_ids': fields.many2many('account.account', 'account_account_consol_rel', 'child_id', 'parent_id', 'Consolidated Children'), |
125 | 'child_id': fields.function(_get_child_ids, method=True, type='many2many', relation="account.account", string="Child Accounts"), |
126 | - 'balance': fields.function(__compute, digits_compute=dp.get_precision('Account'), method=True, string='Balance', multi='balance'), |
127 | - 'credit': fields.function(__compute, digits_compute=dp.get_precision('Account'), method=True, string='Credit', multi='balance'), |
128 | - 'debit': fields.function(__compute, digits_compute=dp.get_precision('Account'), method=True, string='Debit', multi='balance'), |
129 | 'reconcile': fields.boolean('Reconcile', help="Check this if the user is allowed to reconcile entries in this account."), |
130 | 'shortcut': fields.char('Shortcut', size=12), |
131 | 'tax_ids': fields.many2many('account.tax', 'account_account_tax_default_rel', |
132 | @@ -510,18 +413,6 @@ |
133 | ids = self.search(cr, user, args, context=context, limit=limit) |
134 | return self.name_get(cr, user, ids, context=context) |
135 | |
136 | - def name_get(self, cr, uid, ids, context=None): |
137 | - if not ids: |
138 | - return [] |
139 | - reads = self.read(cr, uid, ids, ['name', 'code'], context=context) |
140 | - res = [] |
141 | - for record in reads: |
142 | - name = record['name'] |
143 | - if record['code']: |
144 | - name = record['code'] + ' '+name |
145 | - res.append((record['id'], name)) |
146 | - return res |
147 | - |
148 | def copy(self, cr, uid, id, default={}, context=None, done_list=[], local=False): |
149 | account = self.browse(cr, uid, id, context=context) |
150 | new_child_ids = [] |
151 | @@ -1195,7 +1086,7 @@ |
152 | if cond[1] in ['=like', 'like', 'not like', 'ilike', 'not ilike', 'in', 'not in', 'child_of']: |
153 | continue |
154 | |
155 | - cr.execute("select move_id from account_move_line group by move_id having sum(debit) %s %%s" % (cond[1]),(amount,)) |
156 | + cr.execute("select move_id from account_move_line group by move_id having sum(debit) %s %%s" % (cond[1]),(amount,)) # ignore_sql_check |
157 | res_ids = set(id[0] for id in cr.fetchall()) |
158 | ids = ids and (ids & res_ids) or res_ids |
159 | if ids: |
160 | @@ -1455,9 +1346,9 @@ |
161 | else: |
162 | line_id2 = 0 |
163 | |
164 | - cr.execute('SELECT SUM(%s) FROM account_move_line WHERE move_id=%%s AND id!=%%s' % (mode,), (move.id, line_id2)) |
165 | + cr.execute('SELECT SUM(%s) FROM account_move_line WHERE move_id=%%s AND id!=%%s' % (mode,), (move.id, line_id2)) # not_a_user_entry |
166 | result = cr.fetchone()[0] or 0.0 |
167 | - cr.execute('update account_move_line set '+mode2+'=%s where id=%s', (result, line_id)) |
168 | + cr.execute('update account_move_line set '+mode2+'=%s where id=%s', (result, line_id)) # not_a_user_entry |
169 | |
170 | #adjust also the amount in currency if needed |
171 | cr.execute("select currency_id, sum(amount_currency) as amount_currency from account_move_line where move_id = %s and currency_id is not null group by currency_id", (move.id,)) |
172 | @@ -1704,25 +1595,27 @@ |
173 | def _sum(self, cr, uid, ids, name, args, context, where ='', where_params=()): |
174 | parent_ids = tuple(self.search(cr, uid, [('parent_id', 'child_of', ids)])) |
175 | if context.get('based_on', 'invoices') == 'payments': |
176 | - cr.execute('SELECT line.tax_code_id, sum(line.tax_amount) \ |
177 | - FROM account_move_line AS line, \ |
178 | - account_move AS move \ |
179 | - LEFT JOIN account_invoice invoice ON \ |
180 | - (invoice.move_id = move.id) \ |
181 | - WHERE line.tax_code_id IN %s '+where+' \ |
182 | - AND move.id = line.move_id \ |
183 | - AND ((invoice.state = \'paid\') \ |
184 | - OR (invoice.id IS NULL)) \ |
185 | - GROUP BY line.tax_code_id', |
186 | - (parent_ids,) + where_params) |
187 | + cr.execute(''' |
188 | + SELECT line.tax_code_id, sum(line.tax_amount) |
189 | + FROM account_move_line AS line, |
190 | + account_move AS move |
191 | + LEFT JOIN account_invoice invoice ON |
192 | + (invoice.move_id = move.id) |
193 | + WHERE line.tax_code_id IN %%s %s |
194 | + AND move.id = line.move_id |
195 | + AND ((invoice.state = 'paid') |
196 | + OR (invoice.id IS NULL)) |
197 | + GROUP BY line.tax_code_id''' % where, # not_a_user_entry |
198 | + (parent_ids,) + where_params) |
199 | else: |
200 | - cr.execute('SELECT line.tax_code_id, sum(line.tax_amount) \ |
201 | - FROM account_move_line AS line, \ |
202 | - account_move AS move \ |
203 | - WHERE line.tax_code_id IN %s '+where+' \ |
204 | - AND move.id = line.move_id \ |
205 | - GROUP BY line.tax_code_id', |
206 | - (parent_ids,) + where_params) |
207 | + cr.execute(''' |
208 | + SELECT line.tax_code_id, sum(line.tax_amount) |
209 | + FROM account_move_line AS line, |
210 | + account_move AS move |
211 | + WHERE line.tax_code_id IN %%s %s |
212 | + AND move.id = line.move_id |
213 | + GROUP BY line.tax_code_id''' % where, # not_a_user_entry |
214 | + (parent_ids,) + where_params) |
215 | res = dict(cr.fetchall()) |
216 | res2 = {} |
217 | obj_precision = self.pool.get('decimal.precision') |
218 | |
219 | === modified file 'bin/addons/account/account_move_line.py' |
220 | --- bin/addons/account/account_move_line.py 2017-07-19 13:23:04 +0000 |
221 | +++ bin/addons/account/account_move_line.py 2017-10-06 09:59:17 +0000 |
222 | @@ -415,9 +415,7 @@ |
223 | FROM account_move_line l1, account_move_line l2 |
224 | WHERE l2.account_id = l1.account_id |
225 | AND l1.id <= l2.id |
226 | - AND l2.id IN %s AND """ + \ |
227 | - self._query_get(cr, uid, obj='l1', context=c) + \ |
228 | - " GROUP BY l2.id" |
229 | + AND l2.id IN %%s AND %s GROUP BY l2.id""" % self._query_get(cr, uid, obj='l1', context=c) # ignore_sql_check |
230 | |
231 | cr.execute(sql, [tuple(ids)]) |
232 | result = dict(cr.fetchall()) |
233 | @@ -465,7 +463,7 @@ |
234 | return [] |
235 | where = ' AND '.join(map(lambda x: '(abs(sum(debit-credit))'+x[1]+str(x[2])+')',args)) |
236 | cursor.execute('SELECT id, SUM(debit-credit) FROM account_move_line \ |
237 | - GROUP BY id, debit, credit having '+where) |
238 | + GROUP BY id, debit, credit having '+where) # not_a_user_entry |
239 | res = cursor.fetchall() |
240 | if not res: |
241 | return [('id', '=', '0')] |
242 | @@ -508,9 +506,10 @@ |
243 | qu1 = ' AND' + ' AND'.join(qu1) |
244 | else: |
245 | qu1 = '' |
246 | - cursor.execute('SELECT l.id ' \ |
247 | - 'FROM account_move_line l, account_invoice i ' \ |
248 | - 'WHERE l.move_id = i.move_id ' + qu1, qu2) |
249 | + cursor.execute(''' |
250 | + SELECT l.id |
251 | + FROM account_move_line l, account_invoice i |
252 | + WHERE l.move_id = i.move_id ''' + qu1, qu2) # not_a_user_entry |
253 | res = cursor.fetchall() |
254 | if not res: |
255 | return [('id', '=', '0')] |
256 | |
257 | === modified file 'bin/addons/account/partner.py' |
258 | --- bin/addons/account/partner.py 2011-01-14 00:11:01 +0000 |
259 | +++ bin/addons/account/partner.py 2017-10-06 09:59:17 +0000 |
260 | @@ -93,16 +93,16 @@ |
261 | |
262 | def _credit_debit_get(self, cr, uid, ids, field_names, arg, context=None): |
263 | query = self.pool.get('account.move.line')._query_get(cr, uid, context=context) |
264 | - cr.execute("""SELECT l.partner_id, a.type, SUM(l.debit-l.credit) |
265 | - FROM account_move_line l |
266 | - LEFT JOIN account_account a ON (l.account_id=a.id) |
267 | - WHERE a.type IN ('receivable','payable') |
268 | - AND l.partner_id IN %s |
269 | - AND l.reconcile_id IS NULL |
270 | - AND """ + query + """ |
271 | - GROUP BY l.partner_id, a.type |
272 | - """, |
273 | - (tuple(ids),)) |
274 | + cr.execute(""" |
275 | + SELECT l.partner_id, a.type, SUM(l.debit-l.credit) |
276 | + FROM account_move_line l |
277 | + LEFT JOIN account_account a ON (l.account_id=a.id) |
278 | + WHERE a.type IN ('receivable','payable') |
279 | + AND l.partner_id IN %%s |
280 | + AND l.reconcile_id IS NULL |
281 | + AND %s |
282 | + GROUP BY l.partner_id, a.type""" % query, # not_a_user_entry |
283 | + (tuple(ids),)) |
284 | maps = {'receivable':'credit', 'payable':'debit' } |
285 | res = {} |
286 | for id in ids: |
287 | @@ -112,7 +112,7 @@ |
288 | res[pid][maps[type]] = (type=='receivable') and val or -val |
289 | return res |
290 | |
291 | - def _asset_difference_search(self, cr, uid, obj, name, type, args, context=None): |
292 | + def _asset_difference_search(self, cr, uid, obj, name, line_type, args, context=None): |
293 | if not args: |
294 | return [] |
295 | having_values = tuple(map(itemgetter(2), args)) |
296 | @@ -120,15 +120,16 @@ |
297 | map(lambda x: '(SUM(debit-credit) %(operator)s %%s)' % { |
298 | 'operator':x[1]},args)) |
299 | query = self.pool.get('account.move.line')._query_get(cr, uid, context=context) |
300 | - cr.execute(('SELECT partner_id FROM account_move_line l '\ |
301 | - 'WHERE account_id IN '\ |
302 | - '(SELECT id FROM account_account '\ |
303 | - 'WHERE type=%s AND active) '\ |
304 | - 'AND reconcile_id IS NULL '\ |
305 | - 'AND '+query+' '\ |
306 | - 'AND partner_id IS NOT NULL '\ |
307 | - 'GROUP BY partner_id HAVING '+where), |
308 | - (type,) + having_values) |
309 | + cr.execute((''' |
310 | + SELECT partner_id FROM account_move_line l |
311 | + WHERE account_id IN |
312 | + (SELECT id FROM account_account |
313 | + WHERE type=%%s AND active) |
314 | + AND reconcile_id IS NULL |
315 | + AND %s |
316 | + AND partner_id IS NOT NULL |
317 | + GROUP BY partner_id HAVING %s''' % (query, where)), # not_a_user_entry |
318 | + (line_type,) + having_values) |
319 | res = cr.fetchall() |
320 | if not res: |
321 | return [('id','=','0')] |
322 | |
323 | === modified file 'bin/addons/account/res_currency.py' |
324 | --- bin/addons/account/res_currency.py 2011-01-14 00:11:01 +0000 |
325 | +++ bin/addons/account/res_currency.py 2017-10-06 09:59:17 +0000 |
326 | @@ -33,8 +33,8 @@ |
327 | account_invert = context.get('res.currency.compute.account_invert') |
328 | if account and account.currency_mode == 'average' and account.currency_id: |
329 | query = self.pool.get('account.move.line')._query_get(cr, uid, context=context) |
330 | - cr.execute('select sum(debit-credit),sum(amount_currency) from account_move_line l ' \ |
331 | - 'where l.currency_id=%s and l.account_id=%s and '+query, (account.currency_id.id,account.id,)) |
332 | + cr.execute('''select sum(debit-credit),sum(amount_currency) from account_move_line l |
333 | + where l.currency_id=%%s and l.account_id=%%s and %s''' % query, (account.currency_id.id,account.id,)) # not_a_user_entry |
334 | tot1,tot2 = cr.fetchone() |
335 | if tot2 and not account_invert: |
336 | rate = float(tot1)/float(tot2) |
337 | @@ -44,4 +44,4 @@ |
338 | |
339 | res_currency_account() |
340 | |
341 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
342 | \ No newline at end of file |
343 | +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
344 | |
345 | === modified file 'bin/addons/account/sequence.py' |
346 | --- bin/addons/account/sequence.py 2013-11-25 16:23:07 +0000 |
347 | +++ bin/addons/account/sequence.py 2017-10-06 09:59:17 +0000 |
348 | @@ -49,8 +49,8 @@ |
349 | def get_id(self, cr, uid, sequence_id, code_or_id='id', context=None): |
350 | if context is None: |
351 | context = {} |
352 | - cr.execute('select id from ir_sequence where ' |
353 | - + code_or_id + '=%s and active=%s', (sequence_id, True,)) |
354 | + cr.execute('''select id from ir_sequence |
355 | + where %s=%%s and active=%%s''' % code_or_id, (sequence_id, True,)) # not_a_user_entry |
356 | res = cr.dictfetchone() |
357 | if res: |
358 | for line in self.browse(cr, uid, res['id'], |
359 | |
360 | === modified file 'bin/addons/account/wizard/account_automatic_reconcile.py' |
361 | --- bin/addons/account/wizard/account_automatic_reconcile.py 2011-01-14 00:11:01 +0000 |
362 | +++ bin/addons/account/wizard/account_automatic_reconcile.py 2017-10-06 09:59:17 +0000 |
363 | @@ -228,12 +228,12 @@ |
364 | # add the number of transactions for partners who have only one |
365 | # unreconciled transactions to the unreconciled count |
366 | partner_filter = partner_ids and 'AND partner_id not in (%s)' % ','.join(map(str, filter(None, partner_ids))) or '' |
367 | - cr.execute( |
368 | - "SELECT count(*) " \ |
369 | - "FROM account_move_line " \ |
370 | - "WHERE account_id=%s " \ |
371 | - "AND reconcile_id IS NULL " \ |
372 | - "AND state <> 'draft' " + partner_filter, |
373 | + cr.execute(""" |
374 | + SELECT count(*) |
375 | + FROM account_move_line |
376 | + WHERE account_id=%s |
377 | + AND reconcile_id IS NULL |
378 | + AND state <> 'draft' """ + partner_filter, # not_a_user_entry |
379 | (account_id,)) |
380 | additional_unrec = cr.fetchone()[0] |
381 | unreconciled = unreconciled + additional_unrec |
382 | @@ -252,4 +252,4 @@ |
383 | |
384 | account_automatic_reconcile() |
385 | |
386 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
387 | \ No newline at end of file |
388 | +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
389 | |
390 | === modified file 'bin/addons/account/wizard/account_fiscalyear_close.py' |
391 | --- bin/addons/account/wizard/account_fiscalyear_close.py 2011-06-22 20:05:58 +0000 |
392 | +++ bin/addons/account/wizard/account_fiscalyear_close.py 2017-10-06 09:59:17 +0000 |
393 | @@ -63,9 +63,9 @@ |
394 | fy_id = data[0]['fy_id'] |
395 | |
396 | cr.execute("SELECT id FROM account_period WHERE date_stop < (SELECT date_start FROM account_fiscalyear WHERE id = %s)", (str(data[0]['fy2_id']),)) |
397 | - fy_period_set = ','.join(map(lambda id: str(id[0]), cr.fetchall())) |
398 | + fy_period_list = [x[0] for x in cr.fetchall()] |
399 | cr.execute("SELECT id FROM account_period WHERE date_start > (SELECT date_stop FROM account_fiscalyear WHERE id = %s)", (str(fy_id),)) |
400 | - fy2_period_set = ','.join(map(lambda id: str(id[0]), cr.fetchall())) |
401 | + fy2_period_list = [x[0] for x in cr.fetchall()] |
402 | |
403 | period = obj_acc_period.browse(cr, uid, data[0]['period_id'], context=context) |
404 | new_fyear = obj_acc_fiscalyear.browse(cr, uid, data[0]['fy2_id'], context=context) |
405 | @@ -103,15 +103,16 @@ |
406 | if accnt_type_data.close_method=='none' or account.type == 'view': |
407 | continue |
408 | if accnt_type_data.close_method=='balance': |
409 | - |
410 | + |
411 | balance_in_currency = 0.0 |
412 | if account.currency_id: |
413 | - cr.execute('SELECT sum(amount_currency) as balance_in_currency FROM account_move_line ' \ |
414 | - 'WHERE account_id = %s ' \ |
415 | - 'AND ' + query_line + ' ' \ |
416 | - 'AND currency_id = %s', (account.id, account.currency_id.id)) |
417 | + cr.execute(''' |
418 | + SELECT sum(amount_currency) as balance_in_currency FROM account_move_line |
419 | + WHERE account_id = %%s |
420 | + AND %s |
421 | + AND currency_id = %%s''' % query_line, (account.id, account.currency_id.id)) # not_a_user_entry |
422 | balance_in_currency = cr.dictfetchone()['balance_in_currency'] |
423 | - |
424 | + |
425 | if abs(account.balance)>0.0001: |
426 | obj_acc_move_line.create(cr, uid, { |
427 | 'debit': account.balance>0 and account.balance, |
428 | @@ -128,15 +129,16 @@ |
429 | offset = 0 |
430 | limit = 100 |
431 | while True: |
432 | - cr.execute('SELECT id, name, quantity, debit, credit, account_id, ref, ' \ |
433 | - 'amount_currency, currency_id, blocked, partner_id, ' \ |
434 | - 'date_maturity, date_created ' \ |
435 | - 'FROM account_move_line ' \ |
436 | - 'WHERE account_id = %s ' \ |
437 | - 'AND ' + query_line + ' ' \ |
438 | - 'AND reconcile_id is NULL ' \ |
439 | - 'ORDER BY id ' \ |
440 | - 'LIMIT %s OFFSET %s', (account.id, limit, offset)) |
441 | + cr.execute(''' |
442 | + SELECT id, name, quantity, debit, credit, account_id, ref, |
443 | + amount_currency, currency_id, blocked, partner_id, |
444 | + date_maturity, date_created |
445 | + FROM account_move_line |
446 | + WHERE account_id = %%s |
447 | + AND %s |
448 | + AND reconcile_id is NULL |
449 | + ORDER BY id |
450 | + LIMIT %%s OFFSET %%s''' % query_line, (account.id, limit, offset)) # not_a_user_entry |
451 | result = cr.dictfetchall() |
452 | if not result: |
453 | break |
454 | @@ -158,17 +160,18 @@ |
455 | offset = 0 |
456 | limit = 100 |
457 | while True: |
458 | - cr.execute('SELECT DISTINCT b.id, b.name, b.quantity, b.debit, b.credit, b.account_id, b.ref, ' \ |
459 | - 'b.amount_currency, b.currency_id, b.blocked, b.partner_id, ' \ |
460 | - 'b.date_maturity, b.date_created ' \ |
461 | - 'FROM account_move_line a, account_move_line b ' \ |
462 | - 'WHERE b.account_id = %s ' \ |
463 | - 'AND b.reconcile_id is NOT NULL ' \ |
464 | - 'AND a.reconcile_id = b.reconcile_id ' \ |
465 | - 'AND b.period_id IN ('+fy_period_set+') ' \ |
466 | - 'AND a.period_id IN ('+fy2_period_set+') ' \ |
467 | - 'ORDER BY id ' \ |
468 | - 'LIMIT %s OFFSET %s', (account.id, limit, offset)) |
469 | + cr.execute(''' |
470 | + SELECT DISTINCT b.id, b.name, b.quantity, b.debit, b.credit, b.account_id, b.ref, |
471 | + b.amount_currency, b.currency_id, b.blocked, b.partner_id, |
472 | + b.date_maturity, b.date_created |
473 | + FROM account_move_line a, account_move_line b |
474 | + WHERE b.account_id = %s |
475 | + AND b.reconcile_id is NOT NULL |
476 | + AND a.reconcile_id = b.reconcile_id |
477 | + AND b.period_id IN %s |
478 | + AND a.period_id IN %s |
479 | + ORDER BY id |
480 | + LIMIT %s OFFSET %s''', (account.id, fy_period_list, fy2_period_list, limit, offset)) |
481 | result = cr.dictfetchall() |
482 | if not result: |
483 | break |
484 | @@ -188,14 +191,15 @@ |
485 | offset = 0 |
486 | limit = 100 |
487 | while True: |
488 | - cr.execute('SELECT id, name, quantity, debit, credit, account_id, ref, ' \ |
489 | - 'amount_currency, currency_id, blocked, partner_id, ' \ |
490 | - 'date_maturity, date_created ' \ |
491 | - 'FROM account_move_line ' \ |
492 | - 'WHERE account_id = %s ' \ |
493 | - 'AND ' + query_line + ' ' \ |
494 | - 'ORDER BY id ' \ |
495 | - 'LIMIT %s OFFSET %s', (account.id, limit, offset)) |
496 | + cr.execute(''' |
497 | + SELECT id, name, quantity, debit, credit, account_id, ref, |
498 | + amount_currency, currency_id, blocked, partner_id, |
499 | + date_maturity, date_created |
500 | + FROM account_move_line |
501 | + WHERE account_id = %%s |
502 | + AND %s |
503 | + ORDER BY id |
504 | + LIMIT %%s OFFSET %%s''' % query_line, (account.id, limit, offset)) # not_a_user_entry |
505 | |
506 | result = cr.dictfetchall() |
507 | if not result: |
508 | |
509 | === modified file 'bin/addons/account_corrections/wizard/analytic_distribution_wizard.py' |
510 | --- bin/addons/account_corrections/wizard/analytic_distribution_wizard.py 2017-09-08 13:56:55 +0000 |
511 | +++ bin/addons/account_corrections/wizard/analytic_distribution_wizard.py 2017-10-06 09:59:17 +0000 |
512 | @@ -348,7 +348,7 @@ |
513 | sql_to_cor += ['entry_sequence=%s', 'last_corrected_id=%s', 'ref=%s'] |
514 | sql_data += [keep_seq_and_corrected[0], keep_seq_and_corrected[1], keep_seq_and_corrected[3] or ''] |
515 | sql_data += [created_analytic_line_ids[new_distrib_line]] |
516 | - cr.execute('update account_analytic_line set '+','.join(sql_to_cor)+' where id = %s', |
517 | + cr.execute('update account_analytic_line set '+','.join(sql_to_cor)+' where id = %s', # not_a_user_entry |
518 | sql_data) |
519 | have_been_created.append(created_analytic_line_ids[new_distrib_line]) |
520 | if created_analytic_line_ids and greater_amount['gap_amount'] and greater_amount['wl'] and greater_amount['wl'].id == line.id: |
521 | |
522 | === modified file 'bin/addons/account_hq_entries/wizard/hq_entries_validation.py' |
523 | --- bin/addons/account_hq_entries/wizard/hq_entries_validation.py 2016-11-16 17:25:09 +0000 |
524 | +++ bin/addons/account_hq_entries/wizard/hq_entries_validation.py 2017-10-06 09:59:17 +0000 |
525 | @@ -349,7 +349,7 @@ |
526 | ana_line_obj.write(cr, uid, aal.id, {'last_corrected_id': original_aal_ids[0],'name': cor_name, 'ref': cor_ref}) |
527 | # also write the OD entry_sequence to the REV aal |
528 | # ana_line_obj.write(cr, uid, res_reverse, {'journal_id': acor_journal_id, 'entry_sequence': aal.entry_sequence}) |
529 | - cr.execute('''update account_analytic_line set entry_sequence = '%s' where id = %s''' % (aal.entry_sequence, res_reverse[0])) |
530 | + cr.execute('''UPDATE account_analytic_line SET entry_sequence=%s WHERE id=%s''', (aal.entry_sequence, res_reverse[0])) |
531 | |
532 | # US-1333/1 - BKLG-12 pure AD correction flag marker for splitted lines |
533 | # (do this bypassing model write) |
534 | |
535 | === modified file 'bin/addons/account_override/account.py' |
536 | --- bin/addons/account_override/account.py 2017-08-18 13:59:37 +0000 |
537 | +++ bin/addons/account_override/account.py 2017-10-06 09:59:17 +0000 |
538 | @@ -76,7 +76,6 @@ |
539 | arg.append(('inactivation_date', '<=', cmp_date)) |
540 | return arg |
541 | |
542 | - #@@@override account.account_account.__compute |
543 | def __compute(self, cr, uid, ids, field_names, arg=None, context=None, |
544 | query='', query_params=()): |
545 | """ compute the balance, debit and/or credit for the provided |
546 | @@ -136,7 +135,8 @@ |
547 | # target_move from chart of account wizard |
548 | filters = filters.replace("AND l.state <> 'draft'", '') |
549 | prefilters = " " |
550 | - if context.get('move_state', False): |
551 | + possible_states = [x[0] for x in self.pool.get('account.move')._columns['state'].selection] |
552 | + if context.get('move_state', False) and context['move_state'] in possible_states: |
553 | prefilters += "AND l.move_id = m.id AND m.state = '%s'" % context.get('move_state') |
554 | else: |
555 | prefilters += "AND l.move_id = m.id AND m.state in ('posted', 'draft')" |
556 | @@ -150,12 +150,10 @@ |
557 | # INNER JOIN (VALUES (id1), (id2), (id3), ...) AS tmp (id) |
558 | # ON l.account_id = tmp.id |
559 | # or make _get_children_and_consol return a query and join on that |
560 | - request = ("SELECT l.account_id as id, " +\ |
561 | - ', '.join(map(mapping.__getitem__, field_names)) + |
562 | - " FROM account_move_line l, account_move m" +\ |
563 | - " WHERE l.account_id IN %s " \ |
564 | - + prefilters + filters + |
565 | - " GROUP BY l.account_id") |
566 | + request = """SELECT l.account_id as id, %s |
567 | + FROM account_move_line l, account_move m |
568 | + WHERE l.account_id IN %%s %s |
569 | + GROUP BY l.account_id""" % (', '.join(map(mapping.__getitem__, field_names)), prefilters + filters) # not_a_user_entry |
570 | params = [tuple(children_and_consolidated)] |
571 | if query_params: |
572 | for qp in query_params: |
573 | @@ -197,7 +195,6 @@ |
574 | new_amount = currency_obj.compute(cr, uid, context.get('output_currency_id'), company_currency, res[i].get(f_name), context=context) |
575 | res[i][f_name] = new_amount |
576 | return res |
577 | - #@@@end |
578 | |
579 | def _get_restricted_area(self, cr, uid, ids, field_name, args, context=None): |
580 | """ |
581 | |
582 | === modified file 'bin/addons/account_override/account_move_line.py' |
583 | --- bin/addons/account_override/account_move_line.py 2017-08-21 10:17:48 +0000 |
584 | +++ bin/addons/account_override/account_move_line.py 2017-10-06 09:59:17 +0000 |
585 | @@ -101,7 +101,7 @@ |
586 | Just used to not break default OpenERP behaviour |
587 | """ |
588 | if name and value: |
589 | - sql = "UPDATE "+ self._table + " SET " + name + " = %s WHERE id = %s" |
590 | + sql = "UPDATE "+ self._table + " SET " + name + " = %s WHERE id = %s" # not_a_user_entry |
591 | cr.execute(sql, (value, aml_id)) |
592 | return True |
593 | |
594 | @@ -178,9 +178,9 @@ |
595 | FROM account_move_line l1, account_move_line l2 |
596 | WHERE l2.account_id = l1.account_id |
597 | AND l1.id <= l2.id |
598 | - AND l2.id IN %s AND """ + \ |
599 | - self._query_get(cr, uid, obj='l1', context=c) + \ |
600 | - " GROUP BY l2.id" |
601 | + AND l2.id IN %%s AND %s |
602 | + GROUP BY l2.id |
603 | + """ % self._query_get(cr, uid, obj='l1', context=c) # not_a_user_entry |
604 | |
605 | cr.execute(sql, [tuple(ids)]) |
606 | result = dict(cr.fetchall()) |
607 | @@ -196,7 +196,7 @@ |
608 | return [] |
609 | where = ' AND '.join(map(lambda x: '(abs(sum(debit_currency-credit_currency))'+x[1]+str(x[2])+')',args)) |
610 | cursor.execute('SELECT id, SUM(debit_currency-credit_currency) FROM account_move_line \ |
611 | - GROUP BY id, debit_currency, credit_currency having '+where) |
612 | + GROUP BY id, debit_currency, credit_currency having '+where) # not_a_user_entry |
613 | res = cursor.fetchall() |
614 | if not res: |
615 | return [('id', '=', '0')] |
616 | |
617 | === modified file 'bin/addons/account_override/finance_export.py' |
618 | --- bin/addons/account_override/finance_export.py 2017-03-17 13:33:26 +0000 |
619 | +++ bin/addons/account_override/finance_export.py 2017-10-06 09:59:17 +0000 |
620 | @@ -234,7 +234,7 @@ |
621 | ids = [x and x[key_column_number] or 0 for x in sqlres] |
622 | # mark lines as exported |
623 | if ids: |
624 | - update_request = 'UPDATE ' + tablename + ' SET exported=\'t\' WHERE id in %s' |
625 | + update_request = 'UPDATE ' + tablename + ' SET exported=\'t\' WHERE id in %s' # not_a_user_entry |
626 | try: |
627 | cr.execute(update_request, (tuple(ids),)) |
628 | except Exception, e: |
629 | |
630 | === modified file 'bin/addons/account_override/res_partner.py' |
631 | --- bin/addons/account_override/res_partner.py 2016-08-18 08:32:03 +0000 |
632 | +++ bin/addons/account_override/res_partner.py 2017-10-06 09:59:17 +0000 |
633 | @@ -42,9 +42,9 @@ |
634 | res = False |
635 | sql = "select ml.id from account_move_line ml" \ |
636 | " left join account_move m on m.id=ml.move_id" \ |
637 | - " where m.state='posted' and ml.partner_id=%d limit 1" % (partner_id, ) |
638 | + " where m.state='posted' and ml.partner_id=%s limit 1" |
639 | |
640 | - cr.execute(sql) |
641 | + cr.execute(sql, (partner_id,)) |
642 | res = cr.fetchone() |
643 | return res and res[0] > 0 or False |
644 | |
645 | |
646 | === modified file 'bin/addons/account_payment/account_move_line.py' |
647 | --- bin/addons/account_payment/account_move_line.py 2011-06-24 11:32:40 +0000 |
648 | +++ bin/addons/account_payment/account_move_line.py 2017-10-06 09:59:17 +0000 |
649 | @@ -71,7 +71,7 @@ |
650 | WHERE type=%s AND active) |
651 | AND reconcile_id IS null |
652 | AND credit > 0 |
653 | - AND ''' + where + ' and ' + query), ('payable',)+sql_args ) |
654 | + AND ''' + where + ' and ' + query), ('payable',)+sql_args ) # not_a_user_entry |
655 | |
656 | res = cr.fetchall() |
657 | if not res: |
658 | @@ -117,4 +117,4 @@ |
659 | |
660 | account_move_line() |
661 | |
662 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
663 | \ No newline at end of file |
664 | +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
665 | |
666 | === modified file 'bin/addons/account_payment/account_payment.py' |
667 | --- bin/addons/account_payment/account_payment.py 2016-08-18 08:32:03 +0000 |
668 | +++ bin/addons/account_payment/account_payment.py 2017-10-06 09:59:17 +0000 |
669 | @@ -225,34 +225,6 @@ |
670 | break |
671 | return result |
672 | |
673 | - def select_by_name(self, cr, uid, ids, name, args, context=None): |
674 | - if not ids: return {} |
675 | - partner_obj = self.pool.get('res.partner') |
676 | - |
677 | - cr.execute("""SELECT pl.id, ml.%s |
678 | - FROM account_move_line ml |
679 | - INNER JOIN payment_line pl |
680 | - ON (ml.id = pl.move_line_id) |
681 | - WHERE pl.id IN %%s"""% self.translate(name), |
682 | - (tuple(ids),)) |
683 | - res = dict(cr.fetchall()) |
684 | - |
685 | - if name == 'partner_id': |
686 | - partner_name = {} |
687 | - for p_id, p_name in partner_obj.name_get(cr, uid, |
688 | - filter(lambda x:x and x != 0,res.values()), context=context): |
689 | - partner_name[p_id] = p_name |
690 | - |
691 | - for id in ids: |
692 | - if id in res and partner_name: |
693 | - res[id] = (res[id],partner_name[res[id]]) |
694 | - else: |
695 | - res[id] = (False,False) |
696 | - else: |
697 | - for id in ids: |
698 | - res.setdefault(id, (False, "")) |
699 | - return res |
700 | - |
701 | def _amount(self, cursor, user, ids, name, args, context=None): |
702 | if not ids: |
703 | return {} |
704 | |
705 | === modified file 'bin/addons/account_period_closing_level/account_period.py' |
706 | --- bin/addons/account_period_closing_level/account_period.py 2017-08-28 14:06:27 +0000 |
707 | +++ bin/addons/account_period_closing_level/account_period.py 2017-10-06 09:59:17 +0000 |
708 | @@ -47,8 +47,8 @@ |
709 | """ |
710 | Check that no oustanding unposted entries remain |
711 | """ |
712 | - sql = """SELECT COUNT(id) FROM account_move WHERE period_id = %s AND state != 'posted'""" % period_id |
713 | - cr.execute(sql) |
714 | + sql = """SELECT COUNT(id) FROM account_move WHERE period_id = %s AND state != 'posted'""" |
715 | + cr.execute(sql, (period_id,)) |
716 | sql_res = cr.fetchall() |
717 | count_moves = sql_res and sql_res[0] and sql_res[0][0] or 0 |
718 | if count_moves > 0: |
719 | @@ -226,8 +226,8 @@ |
720 | # retrieve currencies for this period (in account_move_lines) |
721 | sql = """SELECT DISTINCT currency_id |
722 | FROM account_move_line |
723 | - WHERE period_id = %s""" % period.id |
724 | - cr.execute(sql) |
725 | + WHERE period_id = %s""" |
726 | + cr.execute(sql, (period.id,)) |
727 | res = [x[0] for x in cr.fetchall()] |
728 | comp_curr_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.currency_id.id |
729 | # for each currency do a verification about fx rate |
730 | |
731 | === modified file 'bin/addons/account_reconciliation/account_move_line.py' |
732 | --- bin/addons/account_reconciliation/account_move_line.py 2017-08-18 09:42:01 +0000 |
733 | +++ bin/addons/account_reconciliation/account_move_line.py 2017-10-06 09:59:17 +0000 |
734 | @@ -509,7 +509,7 @@ |
735 | if d and d[0] and d[0][1]: |
736 | name = d[0][1] |
737 | if p or t: |
738 | - sql = "UPDATE " + self.pool.get('account.move.line')._table + " SET reconcile_txt = %s WHERE id in %s" |
739 | + sql = "UPDATE " + self.pool.get('account.move.line')._table + " SET reconcile_txt = %s WHERE id in %s" # not_a_user_entry |
740 | cr.execute(sql, (name, tuple(p+t))) |
741 | return res |
742 | |
743 | @@ -533,7 +533,7 @@ |
744 | if d and d[0] and d[0][1]: |
745 | name = d[0][1] |
746 | if p or t: |
747 | - sql = "UPDATE " + self.pool.get('account.move.line')._table + " SET reconcile_txt = %s WHERE id in %s" |
748 | + sql = "UPDATE " + self.pool.get('account.move.line')._table + " SET reconcile_txt = %s WHERE id in %s" # not_a_user_entry |
749 | cr.execute(sql, (name, tuple(p+t))) |
750 | return res |
751 | |
752 | |
753 | === modified file 'bin/addons/analytic/analytic.py' |
754 | --- bin/addons/analytic/analytic.py 2017-03-10 15:57:47 +0000 |
755 | +++ bin/addons/analytic/analytic.py 2017-10-06 09:59:17 +0000 |
756 | @@ -86,9 +86,8 @@ |
757 | COALESCE(SUM(l.unit_amount),0) AS quantity |
758 | FROM account_analytic_account a |
759 | LEFT JOIN account_analytic_line l ON (a.id = l.account_id) |
760 | - WHERE a.id IN %s |
761 | - """ + where_date + """ |
762 | - GROUP BY a.id""", where_clause_args) |
763 | + WHERE a.id IN %%s %s |
764 | + GROUP BY a.id""" % where_date, where_clause_args) # not_a_user_entry |
765 | for ac_id, debit, credit, balance, quantity in cr.fetchall(): |
766 | res[ac_id] = {'debit': debit, 'credit': credit, 'balance': balance, 'quantity': quantity} |
767 | return self._compute_level_tree(cr, uid, ids, child_ids, res, ['debit', 'credit', 'balance', 'quantity'], context) |
768 | |
769 | === modified file 'bin/addons/analytic_distribution/destination_tools.py' |
770 | --- bin/addons/analytic_distribution/destination_tools.py 2017-02-10 08:51:30 +0000 |
771 | +++ bin/addons/analytic_distribution/destination_tools.py 2017-10-06 09:59:17 +0000 |
772 | @@ -48,15 +48,14 @@ |
773 | |
774 | if self._rel == 'account_destination_link' and not context.get('display_disabled'): |
775 | where_c = " %s AND disabled='f' " % (where_c, ) |
776 | - query = 'SELECT %(rel)s.%(id2)s, %(rel)s.%(id1)s \ |
777 | - FROM %(rel)s, %(from_c)s \ |
778 | - WHERE %(rel)s.%(id1)s IN %%s \ |
779 | - AND %(rel)s.%(id2)s = %(tbl)s.id \ |
780 | - %(where_c)s \ |
781 | - %(order_by)s \ |
782 | - %(limit)s \ |
783 | - OFFSET %(offset)d' \ |
784 | - % {'rel': self._rel, |
785 | + query = '''SELECT %(rel)s.%(id2)s, %(rel)s.%(id1)s |
786 | + FROM %(rel)s, %(from_c)s |
787 | + WHERE %(rel)s.%(id1)s IN %%s |
788 | + AND %(rel)s.%(id2)s = %(tbl)s.id |
789 | + %(where_c)s |
790 | + %(order_by)s |
791 | + %(limit)s |
792 | + OFFSET %(offset)d''' % {'rel': self._rel, # not_a_user_entry |
793 | 'from_c': from_c, |
794 | 'tbl': obj._table, |
795 | 'id1': self._id1, |
796 | @@ -103,15 +102,15 @@ |
797 | |
798 | # JIRA UTP-334 |
799 | if self._rel == 'account_destination_link': |
800 | - cr.execute('select id from '+self._rel+' where '+self._id1+'=%s AND '+self._id2+' IN (SELECT '+self._rel+'.'+self._id2+' FROM '+self._rel+', '+','.join(tables)+' WHERE '+self._rel+'.'+self._id1+'=%s AND '+self._rel+'.'+self._id2+' = '+obj._table+'.id '+ d1 +' and '+self._rel+'.'+self._id2+' not in %s)', args) |
801 | + cr.execute('select id from '+self._rel+' where '+self._id1+'=%s AND '+self._id2+' IN (SELECT '+self._rel+'.'+self._id2+' FROM '+self._rel+', '+','.join(tables)+' WHERE '+self._rel+'.'+self._id1+'=%s AND '+self._rel+'.'+self._id2+' = '+obj._table+'.id '+ d1 +' and '+self._rel+'.'+self._id2+' not in %s)', args) # not_a_user_entry |
802 | unlink_obj = pooler.get_pool(cr.dbname).get('account.destination.link') |
803 | for unlinked_id in cr.fetchall(): |
804 | unlink_obj.unlink(cr, user, unlinked_id[0]) |
805 | else: |
806 | - cr.execute('delete from '+self._rel+' where '+self._id1+'=%s AND '+self._id2+' IN (SELECT '+self._rel+'.'+self._id2+' FROM '+self._rel+', '+','.join(tables)+' WHERE '+self._rel+'.'+self._id1+'=%s AND '+self._rel+'.'+self._id2+' = '+obj._table+'.id '+ d1 +' and '+self._rel+'.'+self._id2+' not in %s)', args) |
807 | - |
808 | - |
809 | - cr.execute('select '+self._id2+' from '+self._rel+' where '+self._id1+'=%s', [m_id, ]) |
810 | + cr.execute('delete from '+self._rel+' where '+self._id1+'=%s AND '+self._id2+' IN (SELECT '+self._rel+'.'+self._id2+' FROM '+self._rel+', '+','.join(tables)+' WHERE '+self._rel+'.'+self._id1+'=%s AND '+self._rel+'.'+self._id2+' = '+obj._table+'.id '+ d1 +' and '+self._rel+'.'+self._id2+' not in %s)', args) # not_a_user_entry |
811 | + |
812 | + |
813 | + cr.execute('select '+self._id2+' from '+self._rel+' where '+self._id1+'=%s', [m_id, ]) # not_a_user_entry |
814 | existing = [x[0] for x in cr.fetchall()] |
815 | |
816 | for act_nbr in act[2]: |
817 | @@ -120,7 +119,7 @@ |
818 | link_obj = pooler.get_pool(cr.dbname).get('account.destination.link') |
819 | link_obj.create(cr, user, {self._id1: m_id, self._id2: act_nbr}) |
820 | else: |
821 | - cr.execute('insert into '+self._rel+' ('+self._id1+','+self._id2+') values (%s, %s)', (m_id, act_nbr)) |
822 | + cr.execute('insert into '+self._rel+' ('+self._id1+','+self._id2+') values (%s, %s)', (m_id, act_nbr)) # not_a_user_entry |
823 | |
824 | else: |
825 | newargs.append(act) |
826 | |
827 | === modified file 'bin/addons/analytic_override/analytic_account.py' |
828 | --- bin/addons/analytic_override/analytic_account.py 2017-07-28 12:30:15 +0000 |
829 | +++ bin/addons/analytic_override/analytic_account.py 2017-10-06 09:59:17 +0000 |
830 | @@ -156,10 +156,8 @@ |
831 | COALESCE(SUM(l.amount),0) AS balance, |
832 | COALESCE(SUM(l.unit_amount),0) AS quantity |
833 | FROM account_analytic_account a |
834 | - LEFT JOIN account_analytic_line l ON (a.id = l.""" + default_field + """) |
835 | - WHERE a.id IN %s |
836 | - """ + where_date + """ |
837 | - GROUP BY a.id""", where_clause_args) |
838 | + LEFT JOIN account_analytic_line l ON (a.id = l.%s) |
839 | + WHERE a.id IN %%s %s GROUP BY a.id""" % (default_field, where_date), where_clause_args) # ignore_sql_check |
840 | for ac_id, debit, credit, balance, quantity in cr.fetchall(): |
841 | res[ac_id] = {'debit': debit, 'credit': credit, 'balance': balance, 'quantity': quantity} |
842 | tmp_res = self._compute_level_tree(cr, uid, ids, child_ids, res, name, context) |
843 | @@ -277,11 +275,11 @@ |
844 | ids = [ids] |
845 | if not ids: |
846 | return True |
847 | - cr.execute('''select a.code, a.name, d.name from |
848 | - '''+self._table+''' d |
849 | + cr.execute(''' |
850 | + select a.code, a.name, d.name from %s d |
851 | left join account_account a on a.default_destination_id = d.id |
852 | left join account_destination_link l on l.destination_id = d.id and l.account_id = a.id and l.disabled='f' |
853 | - where a.default_destination_id is not null and l.destination_id is null and d.id in %s ''', (tuple(ids),) |
854 | + where a.default_destination_id is not null and l.destination_id is null and d.id in %%s''' % self._table, (tuple(ids),) # not_a_user_entry |
855 | ) |
856 | error = [] |
857 | for x in cr.fetchall(): |
858 | |
859 | === modified file 'bin/addons/analytic_override/analytic_line.py' |
860 | --- bin/addons/analytic_override/analytic_line.py 2016-08-22 13:55:25 +0000 |
861 | +++ bin/addons/analytic_override/analytic_line.py 2017-10-06 09:59:17 +0000 |
862 | @@ -242,7 +242,7 @@ |
863 | br = self.browse(cr, uid, res,context) |
864 | if entry_sequence_sync is not None: |
865 | if entry_sequence_sync != br.entry_sequence: |
866 | - cr.execute('''update account_analytic_line set entry_sequence = '%s' where id = %s''' % (entry_sequence_sync,res)) |
867 | + cr.execute('''UPDATE account_analytic_line SET entry_sequence=%s WHERE id=%s''', (entry_sequence_sync, res)) |
868 | return res |
869 | |
870 | def write(self, cr, uid, ids, vals, context=None): |
871 | |
872 | === modified file 'bin/addons/base/ir/ir_attachment.py' |
873 | --- bin/addons/base/ir/ir_attachment.py 2017-02-08 16:42:57 +0000 |
874 | +++ bin/addons/base/ir/ir_attachment.py 2017-10-06 09:59:17 +0000 |
875 | @@ -61,10 +61,11 @@ |
876 | for model, mids in res_ids.items(): |
877 | # ignore attachments that are not attached to a resource anymore when checking access rights |
878 | # (resource was deleted but attachment was not) |
879 | - cr.execute('select id from '+self.pool.get(model)._table+' where id in %s', (tuple(mids),)) |
880 | + model_obj = self.pool.get(model) |
881 | + cr.execute('select id from '+model_obj._table+' where id in %s', (tuple(mids),)) # not_a_user_entry |
882 | mids = [x[0] for x in cr.fetchall()] |
883 | ima.check(cr, uid, model, mode, context=context) |
884 | - self.pool.get(model).check_access_rule(cr, uid, mids, mode, context=context) |
885 | + model_obj.check_access_rule(cr, uid, mids, mode, context=context) |
886 | |
887 | def search(self, cr, uid, args, offset=0, limit=None, order=None, |
888 | context=None, count=False): |
889 | |
890 | === modified file 'bin/addons/base/ir/ir_cron.py' |
891 | --- bin/addons/base/ir/ir_cron.py 2017-08-21 09:41:05 +0000 |
892 | +++ bin/addons/base/ir/ir_cron.py 2017-10-06 09:59:17 +0000 |
893 | @@ -133,7 +133,7 @@ |
894 | addsql = '' |
895 | if not numbercall: |
896 | addsql = ', active=False' |
897 | - cr.execute("update ir_cron set nextcall=%s, numbercall=%s"+addsql+" where id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id'])) |
898 | + cr.execute("update ir_cron set nextcall=%s, numbercall=%s"+addsql+" where id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id'])) # not_a_user_entry |
899 | cr.commit() |
900 | |
901 | |
902 | |
903 | === modified file 'bin/addons/base/ir/ir_model.py' |
904 | --- bin/addons/base/ir/ir_model.py 2017-05-12 15:07:08 +0000 |
905 | +++ bin/addons/base/ir/ir_model.py 2017-10-06 09:59:17 +0000 |
906 | @@ -392,7 +392,7 @@ |
907 | res = super(ir_model_fields,self).write(cr, user, ids, vals, context=context) |
908 | |
909 | if column_rename: |
910 | - cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % column_rename[1]) |
911 | + cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % column_rename[1]) # not_a_user_entry |
912 | # This is VERY risky, but let us have this feature: |
913 | # we want to change the key of column in obj._columns dict |
914 | col = column_rename[0]._columns.pop(column_rename[1][1]) # take object out, w/o copy |
915 | @@ -498,17 +498,17 @@ |
916 | if isinstance(group_ids, (int, long)): |
917 | group_ids = [group_ids] |
918 | for group_id in group_ids: |
919 | - cr.execute("SELECT perm_" + mode + " " |
920 | - " FROM ir_model_access a " |
921 | - " JOIN ir_model m ON (m.id = a.model_id) " |
922 | - " WHERE m.model = %s AND a.group_id = %s", (model_name, group_id) |
923 | + cr.execute("""SELECT perm_%s |
924 | + FROM ir_model_access a |
925 | + JOIN ir_model m ON (m.id = a.model_id) |
926 | + WHERE m.model = %%s AND a.group_id = %%s""" % mode, (model_name, group_id) # not_a_user_entry |
927 | ) |
928 | r = cr.fetchone() |
929 | if r is None: |
930 | - cr.execute("SELECT perm_" + mode + " " |
931 | - " FROM ir_model_access a " |
932 | - " JOIN ir_model m ON (m.id = a.model_id) " |
933 | - " WHERE m.model = %s AND a.group_id IS NULL", (model_name, ) |
934 | + cr.execute("""SELECT perm_%s |
935 | + FROM ir_model_access a |
936 | + JOIN ir_model m ON (m.id = a.model_id) |
937 | + WHERE m.model = %%s AND a.group_id IS NULL""" % mode, (model_name, ) # not_a_user_entry |
938 | ) |
939 | r = cr.fetchone() |
940 | |
941 | @@ -539,23 +539,23 @@ |
942 | return True |
943 | |
944 | # We check if a specific rule exists |
945 | - cr.execute('SELECT MAX(CASE WHEN perm_' + mode + ' THEN 1 ELSE 0 END) ' |
946 | - ' FROM ir_model_access a ' |
947 | - ' JOIN ir_model m ON (m.id = a.model_id) ' |
948 | - ' JOIN res_groups_users_rel gu ON (gu.gid = a.group_id) ' |
949 | - ' WHERE m.model = %s ' |
950 | - ' AND gu.uid = %s ' |
951 | + cr.execute('''SELECT MAX(CASE WHEN perm_%s THEN 1 ELSE 0 END) |
952 | + FROM ir_model_access a |
953 | + JOIN ir_model m ON (m.id = a.model_id) |
954 | + JOIN res_groups_users_rel gu ON (gu.gid = a.group_id) |
955 | + WHERE m.model = %%s |
956 | + AND gu.uid = %%s''' % mode # not_a_user_entry |
957 | , (model_name, uid,) |
958 | ) |
959 | r = cr.fetchone()[0] |
960 | |
961 | if r is None: |
962 | # there is no specific rule. We check the generic rule |
963 | - cr.execute('SELECT MAX(CASE WHEN perm_' + mode + ' THEN 1 ELSE 0 END) ' |
964 | - ' FROM ir_model_access a ' |
965 | - ' JOIN ir_model m ON (m.id = a.model_id) ' |
966 | - ' WHERE a.group_id IS NULL ' |
967 | - ' AND m.model = %s ' |
968 | + cr.execute('''SELECT MAX(CASE WHEN perm_%s THEN 1 ELSE 0 END) |
969 | + FROM ir_model_access a |
970 | + JOIN ir_model m ON (m.id = a.model_id) |
971 | + WHERE a.group_id IS NULL |
972 | + AND m.model = %%s ''' % mode # not_a_user_entry |
973 | , (model_name,) |
974 | ) |
975 | r = cr.fetchone()[0] |
976 | @@ -564,12 +564,12 @@ |
977 | cr.execute('''select |
978 | g.name |
979 | from |
980 | - ir_model_access a |
981 | - left join ir_model m on (a.model_id=m.id) |
982 | + ir_model_access a |
983 | + left join ir_model m on (a.model_id=m.id) |
984 | left join res_groups g on (a.group_id=g.id) |
985 | where |
986 | - m.model=%s and |
987 | - a.group_id is not null and perm_''' + mode, (model_name, )) |
988 | + m.model=%%s and |
989 | + a.group_id is not null and perm_%s''' % mode, (model_name, )) # not_a_user_entry |
990 | groups = ', '.join(map(lambda x: x[0], cr.fetchall())) or '/' |
991 | msgs = { |
992 | 'read': _("You can not read this document (%s) ! Be sure your user belongs to one of these groups: %s."), |
993 | @@ -730,7 +730,7 @@ |
994 | if xml_id: |
995 | cr.execute('''SELECT imd.id, imd.res_id, md.id |
996 | FROM ir_model_data imd LEFT JOIN %s md ON (imd.res_id = md.id) |
997 | - WHERE imd.module=%%s AND imd.name=%%s''' % model_obj._table, |
998 | + WHERE imd.module=%%s AND imd.name=%%s''' % model_obj._table, # not_a_user_entry |
999 | (module, xml_id)) |
1000 | results = cr.fetchall() |
1001 | for imd_id2,res_id2,real_id2 in results: |
1002 | @@ -831,20 +831,19 @@ |
1003 | else: |
1004 | where += ' and (key2 is null)' |
1005 | |
1006 | - cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name)) |
1007 | + cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name)) # not_a_user_entry |
1008 | res = cr.fetchone() |
1009 | if not res: |
1010 | res = ir.ir_set(cr, uid, key, key2, name, models, value, replace, isobject, meta) |
1011 | elif xml_id: |
1012 | - cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name)) |
1013 | + cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name)) # not_a_user_entry |
1014 | return True |
1015 | |
1016 | def _process_end(self, cr, uid, modules): |
1017 | if not modules: |
1018 | return True |
1019 | modules = list(modules) |
1020 | - module_in = ",".join(["%s"] * len(modules)) |
1021 | - cr.execute('select id,name,model,res_id,module from ir_model_data where module IN (' + module_in + ') and noupdate=%s', modules + [False]) |
1022 | + cr.execute('select id,name,model,res_id,module from ir_model_data where module IN %s and noupdate=%s', (tuple(modules), False)) |
1023 | wkf_todo = [] |
1024 | for (id, name, model, res_id,module) in cr.fetchall(): |
1025 | if (module,name) not in self.loads: |
1026 | |
1027 | === modified file 'bin/addons/base/ir/ir_rule.py' |
1028 | --- bin/addons/base/ir/ir_rule.py 2017-05-12 14:01:09 +0000 |
1029 | +++ bin/addons/base/ir/ir_rule.py 2017-10-06 09:59:17 +0000 |
1030 | @@ -106,11 +106,11 @@ |
1031 | cr.execute("""SELECT r.id |
1032 | FROM ir_rule r |
1033 | JOIN ir_model m ON (r.model_id = m.id) |
1034 | - WHERE m.model = %s |
1035 | - AND r.perm_""" + mode + """ |
1036 | + WHERE m.model = %%s |
1037 | + AND r.perm_%s |
1038 | AND (r.id IN (SELECT rule_group_id FROM rule_group_rel g_rel |
1039 | JOIN res_groups_users_rel u_rel ON (g_rel.group_id = u_rel.gid) |
1040 | - WHERE u_rel.uid = %s) OR r.global)""", (model_name, uid)) |
1041 | + WHERE u_rel.uid = %%s) OR r.global)""" % mode, (model_name, uid)) # not_a_user_entry |
1042 | ids = map(lambda x: x[0], cr.fetchall()) |
1043 | if ids: |
1044 | for rule in self.browse(cr, uid, ids): |
1045 | |
1046 | === modified file 'bin/addons/base/ir/ir_sequence.py' |
1047 | --- bin/addons/base/ir/ir_sequence.py 2017-06-14 13:37:10 +0000 |
1048 | +++ bin/addons/base/ir/ir_sequence.py 2017-10-06 09:59:17 +0000 |
1049 | @@ -46,7 +46,7 @@ |
1050 | else: |
1051 | # currval can't be used as it returns the value |
1052 | # most recently obtained by nextval for this sequence in the current session |
1053 | - cr.execute("select last_value, is_called, increment_by from ir_sequence_%03d" % seq['id']) |
1054 | + cr.execute("select last_value, is_called, increment_by from ir_sequence_%03d" % seq['id']) # not_a_user_entry |
1055 | data = cr.fetchone() |
1056 | ret[seq['id']] = data[0] |
1057 | if data[1]: |
1058 | @@ -104,7 +104,7 @@ |
1059 | if number_increment == 0: |
1060 | raise osv.except_osv(_('Warning!'),_("Increment number must not be zero.")) |
1061 | assert isinstance(id, (int, long)) |
1062 | - sql = "CREATE SEQUENCE ir_sequence_%03d INCREMENT BY %%s START WITH %%s" % id |
1063 | + sql = "CREATE SEQUENCE ir_sequence_%03d INCREMENT BY %%s START WITH %%s" % id # not_a_user_entry |
1064 | cr.execute(sql, (number_increment, number_next)) |
1065 | |
1066 | def _drop_sequence(self, cr, ids): |
1067 | @@ -117,7 +117,7 @@ |
1068 | |
1069 | # RESTRICT is the default; it prevents dropping the sequence if an |
1070 | # object depends on it. |
1071 | - cr.execute("DROP SEQUENCE IF EXISTS %s RESTRICT " % names) |
1072 | + cr.execute("DROP SEQUENCE IF EXISTS %s RESTRICT " % names) # not_a_user_entry |
1073 | |
1074 | def _alter_sequence(self, cr, id, number_increment): |
1075 | """ Alter a PostreSQL sequence. |
1076 | @@ -129,7 +129,7 @@ |
1077 | assert isinstance(id, (int, long)) |
1078 | cr.execute(""" |
1079 | ALTER SEQUENCE ir_sequence_%03d INCREMENT BY %%s |
1080 | - """ % id, (number_increment,)) |
1081 | + """ % id, (number_increment,)) # not_a_user_entry |
1082 | |
1083 | def create(self, cr, uid, values, context=None): |
1084 | values = self._add_missing_default_values(cr, uid, values, context) |
1085 | @@ -162,7 +162,7 @@ |
1086 | self._alter_sequence(cr, row['id'], i) |
1087 | else: |
1088 | self._drop_sequence(cr, row['id']) |
1089 | - cr.execute('update %s set number_next=%%s where id=%%s ' % (self._table, ), (n, row['id'])) |
1090 | + cr.execute('update %s set number_next=%%s where id=%%s ' % (self._table, ), (n, row['id'])) # not_a_user_entry |
1091 | else: |
1092 | if new_implementation in ('no_gap', None): |
1093 | pass |
1094 | @@ -201,7 +201,7 @@ |
1095 | if context is None: |
1096 | context = {} |
1097 | if seq['implementation'] == 'psql': |
1098 | - cr.execute("SELECT nextval('ir_sequence_%03d')" % seq['id']) |
1099 | + cr.execute("SELECT nextval('ir_sequence_%03d')" % seq['id']) # not_a_user_entry |
1100 | seq['number_next'] = cr.fetchone() |
1101 | else: |
1102 | cr.execute("SELECT number_next FROM ir_sequence WHERE id=%s FOR UPDATE NOWAIT", (seq['id'],)) |
1103 | @@ -229,7 +229,7 @@ |
1104 | WHERE %s=%%s |
1105 | AND active=true |
1106 | AND (company_id in %%s or company_id is NULL) |
1107 | - ORDER BY company_id, id''' % code_or_id, (sequence_code_or_id, tuple(company_ids))) |
1108 | + ORDER BY company_id, id''' % code_or_id, (sequence_code_or_id, tuple(company_ids))) # not_a_user_entry |
1109 | res = cr.dictfetchone() |
1110 | return self._next(cr, uid, res, context=context) |
1111 | |
1112 | |
1113 | === modified file 'bin/addons/base/ir/ir_values.py' |
1114 | --- bin/addons/base/ir/ir_values.py 2017-04-06 09:08:50 +0000 |
1115 | +++ bin/addons/base/ir/ir_values.py 2017-10-06 09:59:17 +0000 |
1116 | @@ -213,7 +213,7 @@ |
1117 | where.append('(user_id=%s or (user_id IS NULL)) order by sequence,id') |
1118 | params.append(uid) |
1119 | clause = ' and '.join(where) |
1120 | - cr.execute('select id,name,value,object,meta, key from ir_values where ' + clause, params) |
1121 | + cr.execute('select id,name,value,object,meta, key from ir_values where ' + clause, params) # not_a_user_entry |
1122 | result = cr.fetchall() |
1123 | if result: |
1124 | break |
1125 | |
1126 | === modified file 'bin/addons/base/ir/workflow/print_instance.py' |
1127 | --- bin/addons/base/ir/workflow/print_instance.py 2011-03-03 12:55:06 +0000 |
1128 | +++ bin/addons/base/ir/workflow/print_instance.py 2017-10-06 09:59:17 +0000 |
1129 | @@ -27,7 +27,7 @@ |
1130 | |
1131 | def graph_get(cr, graph, wkf_ids, nested, workitem, processed_subflows): |
1132 | import pydot |
1133 | - cr.execute('select * from wkf_activity where wkf_id in ('+','.join(['%s']*len(wkf_ids))+')', wkf_ids) |
1134 | + cr.execute('select * from wkf_activity where wkf_id in %s', (tuple(wkf_ids),)) |
1135 | nodes = cr.dictfetchall() |
1136 | activities = {} |
1137 | actfrom = {} |
1138 | @@ -92,9 +92,9 @@ |
1139 | activity_to = actto[t['act_to']][1].get(t['signal'], actto[t['act_to']][0]) |
1140 | graph.add_edge(pydot.Edge( str(activity_from) ,str(activity_to), fontsize='10', **args)) |
1141 | nodes = cr.dictfetchall() |
1142 | - cr.execute('select * from wkf_activity where flow_start=True and wkf_id in ('+','.join(['%s']*len(wkf_ids))+')', wkf_ids) |
1143 | + cr.execute('select * from wkf_activity where flow_start=True and wkf_id in %s', (tuple(wkf_ids),)) |
1144 | start = cr.fetchone()[0] |
1145 | - cr.execute("select 'subflow.'||name,id from wkf_activity where flow_stop=True and wkf_id in ("+','.join(['%s']*len(wkf_ids))+')', wkf_ids) |
1146 | + cr.execute("select 'subflow.'||name,id from wkf_activity where flow_stop=True and wkf_id in %s", (tuple(wkf_ids),)) |
1147 | stop = cr.fetchall() |
1148 | if (stop): |
1149 | stop = (stop[0][1], dict(stop)) |
1150 | |
1151 | === modified file 'bin/addons/base/res/res_log.py' |
1152 | --- bin/addons/base/res/res_log.py 2012-04-03 15:17:47 +0000 |
1153 | +++ bin/addons/base/res/res_log.py 2017-10-06 09:59:17 +0000 |
1154 | @@ -48,8 +48,7 @@ |
1155 | cr.execute('SELECT 1 FROM pg_indexes WHERE indexname=%s', |
1156 | (self._index_name,)) |
1157 | if not cr.fetchone(): |
1158 | - cr.execute('CREATE INDEX %s ON res_log (user_id, read)' % |
1159 | - self._index_name) |
1160 | + cr.execute('CREATE INDEX %s ON res_log (user_id, read)' % self._index_name) # not_a_user_entry |
1161 | |
1162 | def create(self, cr, uid, vals, context=None): |
1163 | create_context = context and dict(context) or {} |
1164 | |
1165 | === modified file 'bin/addons/consumption_calculation/consumption_calculation.py' |
1166 | --- bin/addons/consumption_calculation/consumption_calculation.py 2017-05-24 16:07:06 +0000 |
1167 | +++ bin/addons/consumption_calculation/consumption_calculation.py 2017-10-06 09:59:17 +0000 |
1168 | @@ -270,7 +270,7 @@ |
1169 | LEFT JOIN product_product p ON l.product_id = p.id |
1170 | LEFT JOIN product_template t ON p.product_tmpl_id = t.id |
1171 | LEFT JOIN real_average_consumption rac ON l.rac_id = rac.id |
1172 | - WHERE (t.type != 'service_recep' %s) AND rac.id in %%s LIMIT 1''' % transport_cat, |
1173 | + WHERE (t.type != 'service_recep' %s) AND rac.id in %%s LIMIT 1''' % transport_cat, # not_a_user_entry |
1174 | (tuple(ids),)) |
1175 | res = cr.fetchall() |
1176 | |
1177 | @@ -802,7 +802,7 @@ |
1178 | # uf-1344 "quantity NOT in stock with this ED => line should be in red, no batch picked up" |
1179 | prodlot_id = None |
1180 | #recursion: can't use write |
1181 | - cr.execute('UPDATE '+self._table+' SET product_qty=%s, batch_mandatory=%s, date_mandatory=%s, asset_mandatory=%s, prodlot_id=%s, expiry_date=%s, asset_id=%s where id=%s', (product_qty, batch_mandatory, date_mandatory, asset_mandatory, prodlot_id, expiry_date, asset_id, obj.id)) |
1182 | + cr.execute('UPDATE '+self._table+' SET product_qty=%s, batch_mandatory=%s, date_mandatory=%s, asset_mandatory=%s, prodlot_id=%s, expiry_date=%s, asset_id=%s where id=%s', (product_qty, batch_mandatory, date_mandatory, asset_mandatory, prodlot_id, expiry_date, asset_id, obj.id)) # not_a_user_entry |
1183 | |
1184 | return True |
1185 | |
1186 | |
1187 | === modified file 'bin/addons/data_finance/import_analytic_lines.py' |
1188 | --- bin/addons/data_finance/import_analytic_lines.py 2015-11-03 12:55:51 +0000 |
1189 | +++ bin/addons/data_finance/import_analytic_lines.py 2017-10-06 09:59:17 +0000 |
1190 | @@ -106,7 +106,7 @@ |
1191 | ('type', '=', 'bank'), |
1192 | ('is_current_instance', '=', True)]) |
1193 | if not jids: |
1194 | - cr.execute('SELECT max(code) from account_account where parent_id=%s'%(parent_account,)) |
1195 | + cr.execute('SELECT max(code) from account_account where parent_id=%s', (parent_account,)) |
1196 | |
1197 | code = int(cr.fetchone()[0])+1 |
1198 | acc_id = account_obj.create(cr, uid, { |
1199 | |
1200 | === modified file 'bin/addons/finance/wizard/account_report_partner_balance_tree.py' |
1201 | --- bin/addons/finance/wizard/account_report_partner_balance_tree.py 2017-06-12 09:56:20 +0000 |
1202 | +++ bin/addons/finance/wizard/account_report_partner_balance_tree.py 2017-10-06 09:59:17 +0000 |
1203 | @@ -76,7 +76,7 @@ |
1204 | " " + self.PARTNER_REQUEST + " " |
1205 | " " + self.TAX_REQUEST + " " |
1206 | " " + self.IB_JOURNAL_REQUEST + " " |
1207 | - " " + self.IB_DATE_TO + " " |
1208 | + " " + self.IB_DATE_TO + " " # not_a_user_entry |
1209 | "GROUP BY ac.type, p.id, p.ref, p.name, ac.name, ac.code " |
1210 | "ORDER BY ac.type, p.name ", |
1211 | (tuple(self.ib_move_state_list),)) |
1212 | @@ -216,25 +216,23 @@ |
1213 | # inspired from account_report_balance.py report query |
1214 | # but group only per 'account type'/'partner' |
1215 | where = where and 'AND %s' % where or '' |
1216 | - query = "SELECT ac.type as account_type," \ |
1217 | - " p.id as partner_id, p.ref as partner_ref, p.name as partner_name," \ |
1218 | - " COALESCE(sum(debit),0) AS debit, COALESCE(sum(credit), 0) AS credit," \ |
1219 | - " CASE WHEN sum(debit) > sum(credit) THEN sum(debit) - sum(credit) ELSE 0 END AS sdebit," \ |
1220 | - " CASE WHEN sum(debit) < sum(credit) THEN sum(credit) - sum(debit) ELSE 0 END AS scredit" \ |
1221 | - " FROM account_move_line l INNER JOIN res_partner p ON (l.partner_id=p.id)" \ |
1222 | - " JOIN account_account ac ON (l.account_id = ac.id)" \ |
1223 | - " JOIN account_move am ON (am.id = l.move_id)" \ |
1224 | - " JOIN account_account_type at ON (ac.user_type = at.id)" \ |
1225 | - " WHERE ac.type IN " + self.account_type + "" \ |
1226 | - " AND am.state IN " + move_state + "" \ |
1227 | - " " + where + "" \ |
1228 | - " " + self.INSTANCE_REQUEST + " " \ |
1229 | - " " + self.TAX_REQUEST + " " \ |
1230 | - " " + self.PARTNER_REQUEST + " " \ |
1231 | - " " + self.ACCOUNT_REQUEST + " " \ |
1232 | - " " + self.RECONCILE_REQUEST + " " \ |
1233 | - " GROUP BY ac.type,p.id,p.ref,p.name" \ |
1234 | - " ORDER BY ac.type,p.name" |
1235 | + query = """SELECT ac.type as account_type, |
1236 | + p.id as partner_id, p.ref as partner_ref, p.name as partner_name, |
1237 | + COALESCE(sum(debit),0) AS debit, COALESCE(sum(credit), 0) AS credit, |
1238 | + CASE WHEN sum(debit) > sum(credit) THEN sum(debit) - sum(credit) ELSE 0 END AS sdebit, |
1239 | + CASE WHEN sum(debit) < sum(credit) THEN sum(credit) - sum(debit) ELSE 0 END AS scredit |
1240 | + FROM account_move_line l INNER JOIN res_partner p ON (l.partner_id=p.id) |
1241 | + JOIN account_account ac ON (l.account_id = ac.id) |
1242 | + JOIN account_move am ON (am.id = l.move_id) |
1243 | + JOIN account_account_type at ON (ac.user_type = at.id) |
1244 | + WHERE ac.type IN %s |
1245 | + AND am.state IN %s |
1246 | + %s %s %s %s %s %s |
1247 | + GROUP BY ac.type,p.id,p.ref,p.name |
1248 | + ORDER BY ac.type,p.name""" % (self.account_type, move_state, # not_a_user_entry |
1249 | + where, self.INSTANCE_REQUEST, self.TAX_REQUEST, |
1250 | + self.PARTNER_REQUEST, self.ACCOUNT_REQUEST, |
1251 | + self.RECONCILE_REQUEST) |
1252 | cr.execute(query) |
1253 | res = cr.dictfetchall() |
1254 | |
1255 | @@ -460,13 +458,14 @@ |
1256 | # recalculate the result only if the criteria have changed |
1257 | if not self.total_debit_credit_balance or account_type != self.total_debit_credit_balance['account_type'] \ |
1258 | or data != self.total_debit_credit_balance['data']: |
1259 | - query = "SELECT" \ |
1260 | - " sum(debit) AS debit, sum(credit) AS credit, sum(balance) as balance" \ |
1261 | - " FROM account_partner_balance_tree" \ |
1262 | - " WHERE account_type IN ('" + account_type + "')" \ |
1263 | - " AND uid = " + str(uid) + "" \ |
1264 | - " AND build_ts='" + data['build_ts'] + "'" |
1265 | - cr.execute(query) |
1266 | + query = """SELECT |
1267 | + sum(debit) AS debit, sum(credit) AS credit, sum(balance) as balance |
1268 | + FROM account_partner_balance_tree |
1269 | + WHERE account_type IN ('%s') |
1270 | + AND uid = %%s |
1271 | + AND build_ts=%%s |
1272 | + """ % account_type # not_a_user_entry |
1273 | + cr.execute(query, (uid, data['build_ts'])) |
1274 | res = cr.dictfetchall() |
1275 | self.total_debit_credit_balance['account_type'] = account_type |
1276 | self.total_debit_credit_balance['data'] = data |
1277 | |
1278 | === modified file 'bin/addons/financing_contract/contract.py' |
1279 | --- bin/addons/financing_contract/contract.py 2017-03-06 08:36:33 +0000 |
1280 | +++ bin/addons/financing_contract/contract.py 2017-10-06 09:59:17 +0000 |
1281 | @@ -656,7 +656,7 @@ |
1282 | |
1283 | res = super(financing_contract_contract, self).write(cr, uid, ids, vals, context=context) |
1284 | if fp_added_flag: # if the previous save has been recovered thanks to the flag set to True, then reset it back to False |
1285 | - cr.execute('''update financing_contract_contract set fp_added_flag = 'f' where id = %s''' % (ids[0])) |
1286 | + cr.execute('''update financing_contract_contract set fp_added_flag = 'f' where id = %s''', (ids[0],)) |
1287 | |
1288 | # uf-2342 delete any assigned quads that are no longer valid due to changes in the contract |
1289 | # get list of all valid ids for this contract |
1290 | |
1291 | === modified file 'bin/addons/financing_contract/financing_contract_account_quadruplet.py' |
1292 | --- bin/addons/financing_contract/financing_contract_account_quadruplet.py 2017-02-10 08:51:30 +0000 |
1293 | +++ bin/addons/financing_contract/financing_contract_account_quadruplet.py 2017-10-06 09:59:17 +0000 |
1294 | @@ -68,7 +68,7 @@ |
1295 | cr.execute('''select account_quadruplet_id |
1296 | from financing_contract_actual_account_quadruplets |
1297 | where actual_line_id in (select id from financing_contract_format_line |
1298 | - where format_id = %s and is_quadruplet is true)''' % (contract.format_id.id)) |
1299 | + where format_id = %s and is_quadruplet is true)''', (contract.format_id.id,)) |
1300 | rows = cr.fetchall() |
1301 | for id in [x[0] for x in rows]: |
1302 | exclude[id] = True |
1303 | @@ -111,7 +111,7 @@ |
1304 | from financing_contract_cost_center cc |
1305 | where cc.contract_id = %s |
1306 | and cc.cost_center_id = |
1307 | - financing_contract_account_quadruplet.cost_center_id)''' % (contract.format_id.id,contract.format_id.id)) |
1308 | + financing_contract_account_quadruplet.cost_center_id)''', (contract.format_id.id,contract.format_id.id)) |
1309 | for id in [x[0] for x in cr.fetchall()]: |
1310 | exclude[id] = True |
1311 | for id in ids: |
1312 | @@ -140,7 +140,7 @@ |
1313 | from financing_contract_cost_center cc |
1314 | where cc.contract_id = %s |
1315 | and cc.cost_center_id = |
1316 | - financing_contract_account_quadruplet.cost_center_id)''' % (contract.format_id.id,contract.format_id.id)) |
1317 | + financing_contract_account_quadruplet.cost_center_id)''', (contract.format_id.id,contract.format_id.id)) |
1318 | someids = [] |
1319 | someids += [x[0] for x in cr.fetchall()] |
1320 | return [('id','in',someids)] |
1321 | @@ -170,7 +170,7 @@ |
1322 | financing_contract_format_line l |
1323 | where c.id = %s |
1324 | and f.id = c.format_id |
1325 | - and l.format_id = f.id)''' % (contract.format_id.id)) |
1326 | + and l.format_id = f.id)''', (contract.format_id.id,)) |
1327 | exclude += [x[0] for x in cr.fetchall()] |
1328 | for account_quadruplet in line.account_quadruplet_ids: |
1329 | exclude.append(account_quadruplet.id) |
1330 | |
1331 | === modified file 'bin/addons/mission_stock/mission_stock.py' |
1332 | --- bin/addons/mission_stock/mission_stock.py 2017-06-01 07:31:06 +0000 |
1333 | +++ bin/addons/mission_stock/mission_stock.py 2017-10-06 09:59:17 +0000 |
1334 | @@ -579,7 +579,7 @@ |
1335 | FROM |
1336 | stock_mission_report_line smrl WHERE mission_report_id = %s |
1337 | AND p.id = smrl.product_id) |
1338 | - ''' % report['id']) |
1339 | + ''', (report['id'],)) |
1340 | for product, prod_state, prod_active, prod_state_ud, prod_creator in cr.fetchall(): |
1341 | line_obj.create(cr, uid, { |
1342 | 'product_id': product, |
1343 | @@ -725,11 +725,10 @@ |
1344 | FROM stock_move |
1345 | WHERE state = 'done' |
1346 | AND id not in (SELECT move_id FROM mission_move_rel WHERE mission_id = %s) |
1347 | - ''' % (report_id)) |
1348 | + ''', (report_id,)) |
1349 | res = cr.fetchall() |
1350 | for move in res: |
1351 | - cr.execute('INSERT INTO mission_move_rel VALUES (%s, %s)' % |
1352 | - (report_id, move[0])) |
1353 | + cr.execute('INSERT INTO mission_move_rel VALUES (%s, %s)', (report_id, move[0])) |
1354 | product = product_obj.browse(cr, uid, move[1], |
1355 | fields_to_fetch=['uom_id', 'standard_price']) |
1356 | line_id = line_obj.search(cr, uid, [('product_id', '=', move[1]), |
1357 | @@ -1155,7 +1154,7 @@ |
1358 | central_qty=%s, cross_qty=%s, secondary_qty=%s, |
1359 | cu_qty=%s, in_pipe_qty=%s, in_pipe_coor_qty=%s, |
1360 | wh_qty=%s |
1361 | - WHERE id=%s""" % (line[1] or 0.00, line[2] or 0.00, |
1362 | + WHERE id=%s""" % (line[1] or 0.00, line[2] or 0.00, # not_a_user_entry |
1363 | line[3] or 0.00,line[4] or 0.00, line[5] or 0.00,line[6] or 0.00,line[7] or 0.00,line[8] or 0.00, (line[2] or 0.00) + (line[3] or 0.00), line_id)) |
1364 | return True |
1365 | |
1366 | |
1367 | === modified file 'bin/addons/msf_budget/msf_budget_line.py' |
1368 | --- bin/addons/msf_budget/msf_budget_line.py 2017-02-20 09:54:19 +0000 |
1369 | +++ bin/addons/msf_budget/msf_budget_line.py 2017-10-06 09:59:17 +0000 |
1370 | @@ -319,10 +319,10 @@ |
1371 | if budget_ok: |
1372 | month_names = self._get_month_names(month_number) |
1373 | sql = """ |
1374 | - SELECT id, COALESCE(""" + '+'.join(month_names) + """, 0.0) |
1375 | + SELECT id, COALESCE(%s, 0.0) |
1376 | FROM msf_budget_line |
1377 | - WHERE id IN %s; |
1378 | - """ |
1379 | + WHERE id IN %%s; |
1380 | + """ % '+'.join(month_names) # not_a_user_entry |
1381 | cr.execute(sql, (tuple(ids),)) |
1382 | tmp_res = cr.fetchall() |
1383 | if tmp_res: |
1384 | @@ -366,9 +366,9 @@ |
1385 | # Prepare some values |
1386 | res = {} |
1387 | sql = """ |
1388 | - SELECT id, COALESCE(""" + '+'.join(month_names) + """, 0.0) |
1389 | + SELECT id, COALESCE(%s, 0.0) |
1390 | FROM msf_budget_line |
1391 | - WHERE id IN %s""" |
1392 | + WHERE id IN %%s""" % '+'.join(month_names) # not_a_user_entry |
1393 | cr.execute(sql, (tuple(ids),)) |
1394 | tmp_res = cr.fetchall() |
1395 | if tmp_res: |
1396 | |
1397 | === modified file 'bin/addons/msf_cross_docking/cross_docking.py' |
1398 | --- bin/addons/msf_cross_docking/cross_docking.py 2016-08-18 08:32:03 +0000 |
1399 | +++ bin/addons/msf_cross_docking/cross_docking.py 2017-10-06 09:59:17 +0000 |
1400 | @@ -200,7 +200,7 @@ |
1401 | LEFT JOIN product_product p ON l.product_id = p.id |
1402 | LEFT JOIN product_template t ON p.product_tmpl_id = t.id |
1403 | LEFT JOIN purchase_order po ON l.order_id = po.id |
1404 | - WHERE (t.type != 'service_recep' %s) AND po.id in %%s LIMIT 1''' % transport_cat, |
1405 | + WHERE (t.type != 'service_recep' %s) AND po.id in %%s LIMIT 1''' % transport_cat, # not_a_user_entry |
1406 | (tuple(ids),)) |
1407 | res = cr.fetchall() |
1408 | |
1409 | |
1410 | === modified file 'bin/addons/msf_outgoing/msf_outgoing.py' |
1411 | --- bin/addons/msf_outgoing/msf_outgoing.py 2017-09-03 15:15:05 +0000 |
1412 | +++ bin/addons/msf_outgoing/msf_outgoing.py 2017-10-06 09:59:17 +0000 |
1413 | @@ -267,7 +267,7 @@ |
1414 | ) t |
1415 | group by t.id |
1416 | having sum(case when t.tp != 0 then t.tp - t.fp + 1 else 0 end) %s %s |
1417 | -''' % (args[0][1], args[0][2])) |
1418 | +''' % (args[0][1], args[0][2])) # not_a_user_entry |
1419 | return [('id', 'in', [x[0] for x in cr.fetchall()])] |
1420 | |
1421 | def _get_is_company(self, cr, uid, ids, field_name, args, context=None): |
1422 | @@ -1661,7 +1661,7 @@ |
1423 | pick_obj._hook_create_sync_messages(cr, uid, packing.id, context) # UF-1617: Create the sync message for batch and asset before shipping |
1424 | |
1425 | # UF-1617: set the flag to this packing object to indicate that the SHIP has been done, for synchronisation purpose |
1426 | - cr.execute('update stock_picking set already_shipped=\'t\' where id=%s' % packing.id) |
1427 | + cr.execute('update stock_picking set already_shipped=\'t\' where id=%s', (packing.id,)) |
1428 | |
1429 | |
1430 | # Create automatically the invoice |
1431 | @@ -2681,26 +2681,15 @@ |
1432 | return created_ids |
1433 | |
1434 | def get_current_pick_sequence_for_rw(self, cr, uid, picking_type, context=None): |
1435 | - cr.execute('SELECT id FROM ir_sequence WHERE code=\'' + picking_type + '\' and active=true ORDER BY id') |
1436 | + cr.execute('SELECT id FROM ir_sequence WHERE code=%s and active=true ORDER BY id', (picking_type,)) |
1437 | res = cr.dictfetchone() |
1438 | if res and res['id']: |
1439 | - cr.execute("SELECT last_value from ir_sequence_%03d" % res['id']) |
1440 | + cr.execute("SELECT last_value from ir_sequence_%03d" % res['id']) # not_a_user_entry |
1441 | res = cr.dictfetchone() |
1442 | if res and res['last_value']: |
1443 | return res['last_value'] |
1444 | return False |
1445 | |
1446 | - def alter_sequence_for_rw_pick(self, cr, uid, picking_type, value_to_force, context=None): |
1447 | - if not self._get_usb_entity_type(cr, uid, context): |
1448 | - return |
1449 | - |
1450 | - cr.execute('SELECT id FROM ir_sequence WHERE code=\'' + picking_type + '\' and active=true ORDER BY id') |
1451 | - res = cr.dictfetchone() |
1452 | - if res and res['id']: |
1453 | - seq = 'ir_sequence_%03d' % res['id'] |
1454 | - cr.execute("ALTER SEQUENCE " + seq +" RESTART WITH " + str(value_to_force)) |
1455 | - return |
1456 | - |
1457 | def create(self, cr, uid, vals, context=None): |
1458 | ''' |
1459 | creation of a stock.picking of subtype 'packing' triggers |
1460 | @@ -5212,7 +5201,7 @@ |
1461 | if isinstance(ids, (int, long)): |
1462 | ids = [ids] |
1463 | |
1464 | - cr.execute('select id, move_lines from ' + self._table + ' where id in %s', (tuple(ids),)) |
1465 | + cr.execute('select id, move_lines from ' + self._table + ' where id in %s', (tuple(ids),)) # not_a_user_entry |
1466 | for q_result in cr.fetchall(): |
1467 | result[q_result[0]]['move_lines'] = q_result[1] or [] |
1468 | return result |
1469 | |
1470 | === modified file 'bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py' |
1471 | --- bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2017-01-23 16:14:11 +0000 |
1472 | +++ bin/addons/msf_outgoing/wizard/incoming_shipment_processor.py 2017-10-06 09:59:17 +0000 |
1473 | @@ -504,7 +504,7 @@ |
1474 | Just used to not break default OpenERP behaviour |
1475 | """ |
1476 | if name and value: |
1477 | - sql = "UPDATE "+ self._table + " SET " + name + " = %s WHERE id = %s" |
1478 | + sql = "UPDATE "+ self._table + " SET " + name + " = %s WHERE id = %s" # not_a_user_entry |
1479 | cr.execute(sql, (value, ml_id)) |
1480 | return True |
1481 | |
1482 | |
1483 | === modified file 'bin/addons/msf_profile/msf_profile.py' |
1484 | --- bin/addons/msf_profile/msf_profile.py 2017-09-08 15:51:54 +0000 |
1485 | +++ bin/addons/msf_profile/msf_profile.py 2017-10-06 09:59:17 +0000 |
1486 | @@ -287,7 +287,7 @@ |
1487 | self._logger.warn('The objects linked to the model(s) %s will be removed from ir_model_data.' % model_to_remove_pp) |
1488 | |
1489 | for model in model_to_remove: |
1490 | - cr.execute("DELETE FROM ir_model_data WHERE model='%s' AND module='sd'" % model) |
1491 | + cr.execute("DELETE FROM ir_model_data WHERE model=%s AND module='sd'", (model,)) |
1492 | current_count = cr.rowcount |
1493 | removed_obj += current_count |
1494 | self._logger.warn('ir.model.data, model=%s, %s objects deleted.' % (model, current_count)) |
1495 | @@ -366,7 +366,7 @@ |
1496 | and d.model='res.partner' |
1497 | and name not in ('msf_doc_import_supplier_tbd', 'order_types_res_partner_local_market') |
1498 | and name not like '%s%%' |
1499 | - ) """ % (identifier, )) |
1500 | + ) """ % (identifier, )) # not_a_user_entry |
1501 | self._logger.warn('%s non local partners updated' % (cr.rowcount,)) |
1502 | return True |
1503 | |
1504 | @@ -1277,22 +1277,22 @@ |
1505 | cr.execute("""update ir_model_data set last_modification=NOW() where module='sd' and model='ir.translation' and res_id in ( |
1506 | select id from ir_translation t where t.lang in ('en_MF', 'fr_MF') and name='product.template,name' and res_id in |
1507 | (select t.id from product_template t, product_product p where p.product_tmpl_id = t.id and international_status=6) |
1508 | - and name like '"""+instance_id+"""%' |
1509 | - )""") |
1510 | + and name like '%s%%' |
1511 | + )""" % instance_id) # not_a_user_entry |
1512 | cr.execute("""delete from ir_translation t |
1513 | where t.lang in ('en_MF', 'fr_MF') and name='product.template,name' and res_id in |
1514 | (select t.id from product_template t, product_product p where p.product_tmpl_id = t.id and international_status=6) |
1515 | and id in |
1516 | - (select d.res_id from ir_model_data d where d.module='sd' and d.model='ir.translation' and name like '"""+instance_id+"""%') |
1517 | - """) |
1518 | + (select d.res_id from ir_model_data d where d.module='sd' and d.model='ir.translation' and name like '%s%%') |
1519 | + """ % instance_id) # not_a_user_entry |
1520 | if coordo_id and instance_name in ('OCBHT118', 'OCBHT143'): |
1521 | # also remove old UniData trans sent by coordo |
1522 | cr.execute("""delete from ir_translation t |
1523 | where t.lang in ('en_MF', 'fr_MF') and name='product.template,name' and res_id in |
1524 | (select t.id from product_template t, product_product p where p.product_tmpl_id = t.id and international_status=6) |
1525 | and id in |
1526 | - (select d.res_id from ir_model_data d where d.module='sd' and d.model='ir.translation' and name like '"""+coordo_id+"""%') |
1527 | - """) |
1528 | + (select d.res_id from ir_model_data d where d.module='sd' and d.model='ir.translation' and name like '%s%%') |
1529 | + """ % coordo_id) # not_a_user_entry |
1530 | |
1531 | self._logger.warn('%s local translation for UniData products deleted' % (cr.rowcount,)) |
1532 | |
1533 | @@ -1423,7 +1423,7 @@ |
1534 | SET touched = '[''state_ud'', ''product_active'', ''international_status_code'']', last_modification = now() |
1535 | WHERE model = 'stock.mission.report.line' AND res_id IN ( |
1536 | SELECT id FROM stock_mission_report_line WHERE mission_report_id IN %s) |
1537 | - ''' % (tuple(smr_ids),)) |
1538 | + ''', (tuple(smr_ids),)) |
1539 | |
1540 | return True |
1541 | |
1542 | @@ -1711,10 +1711,8 @@ |
1543 | |
1544 | def set_config(self, cr): |
1545 | data = ['smtp_server', 'email_from', 'smtp_port', 'smtp_ssl', 'smtp_user', 'smtp_password'] |
1546 | - cr.execute("""select """+','.join(data)+""" |
1547 | - from email_configuration |
1548 | - limit 1 |
1549 | - """) |
1550 | + cr.execute("""select %s from email_configuration |
1551 | + limit 1""" % ','.join(data)) # not_a_user_entry |
1552 | res = cr.fetchone() |
1553 | if res: |
1554 | for i, key in enumerate(data): |
1555 | |
1556 | === modified file 'bin/addons/msf_profile/user_access_configurator.py' |
1557 | --- bin/addons/msf_profile/user_access_configurator.py 2017-02-20 14:38:29 +0000 |
1558 | +++ bin/addons/msf_profile/user_access_configurator.py 2017-10-06 09:59:17 +0000 |
1559 | @@ -686,10 +686,10 @@ |
1560 | models_to_clean = ['ir.model.access', 'ir.rule'] |
1561 | for model in models_to_clean: |
1562 | m_obj = self.pool.get(model) |
1563 | - cr.execute('''select m.id from '''+ m_obj._table+''' m |
1564 | - left join ir_model_data d on d.res_id = m.id and d.model = %s |
1565 | + cr.execute('''select m.id from %s m |
1566 | + left join ir_model_data d on d.res_id = m.id and d.model = %%s |
1567 | where module not in ('sd', 'sync_client', 'sync_server', 'sync_common', 'sync_so', 'update_client', 'update_server', '') |
1568 | - ''', (model,)) |
1569 | + ''' % m_obj._table, (model,)) # not_a_user_entry |
1570 | ids_to_del = [x[0] for x in cr.fetchall()] |
1571 | if ids_to_del: |
1572 | m_obj.unlink(cr, 1, ids_to_del) |
1573 | |
1574 | === modified file 'bin/addons/msf_tools/msf_tools.py' |
1575 | --- bin/addons/msf_tools/msf_tools.py 2017-06-08 07:14:54 +0000 |
1576 | +++ bin/addons/msf_tools/msf_tools.py 2017-10-06 09:59:17 +0000 |
1577 | @@ -221,7 +221,7 @@ |
1578 | res_record = cr.fetchone() |
1579 | if res_record and res_record[0]: |
1580 | # drop existing constraint |
1581 | - tpl_drop_const = "alter table %s drop constraint %s" % sql_params |
1582 | + tpl_drop_const = "alter table %s drop constraint %s" % sql_params # not_a_user_entry |
1583 | cr.execute(tpl_drop_const) |
1584 | |
1585 | def domain_get_field_index(self, domain, field_name): |
1586 | @@ -424,7 +424,7 @@ |
1587 | previous_values = dest_obj.read(cr, uid, [item_data[i]['id']], [seq_field], context=context) |
1588 | audit_obj.audit_log(cr, uid, to_trace, dest_obj, [item_data[i]['id']], 'write', previous_values, {item_data[i]['id']: {seq_field: start_num}}, context=context) |
1589 | |
1590 | - cr.execute("update "+dest_obj._table+" set "+seq_field+"=%s where id=%s", (start_num, item_data[i]['id'])) |
1591 | + cr.execute("update "+dest_obj._table+" set "+seq_field+"=%s where id=%s", (start_num, item_data[i]['id'])) # not_a_user_entry |
1592 | #dest_obj.write(cr, uid, [item_data[i]['id']], {seq_field: start_num}, context=context) |
1593 | |
1594 | # reset sequence to start_num + 1 all time, checking if needed would take much time |
1595 | @@ -729,9 +729,8 @@ |
1596 | parent_name = translation.name.split(',')[0] |
1597 | |
1598 | obj = self.pool.get(parent_name) |
1599 | - sql = "SELECT id FROM " + obj._table + \ |
1600 | - " WHERE id=" + str(translation.res_id) |
1601 | - cr.execute(sql) |
1602 | + sql = "SELECT id FROM " + obj._table + " WHERE id=%s" # not_a_user_entry |
1603 | + cr.execute(sql, (translation.res_id,)) |
1604 | res = cr.fetchall() |
1605 | if not res: |
1606 | unlink_ids.append(translation.id) |
1607 | |
1608 | === modified file 'bin/addons/object_query/query.py' |
1609 | --- bin/addons/object_query/query.py 2016-08-03 17:53:09 +0000 |
1610 | +++ bin/addons/object_query/query.py 2017-10-06 09:59:17 +0000 |
1611 | @@ -350,7 +350,7 @@ |
1612 | if to_del: |
1613 | self.pool.get('object.query.selection_data').unlink(cr, uid, to_del) |
1614 | if obj.object_id: |
1615 | - cr.execute("update "+self._table+" set newquery='f'"); |
1616 | + cr.execute("update "+self._table+" set newquery='f'"); # not_a_user_entry |
1617 | return True |
1618 | |
1619 | _constraints = [ |
1620 | |
1621 | === modified file 'bin/addons/product/pricelist.py' |
1622 | --- bin/addons/product/pricelist.py 2016-01-04 13:35:41 +0000 |
1623 | +++ bin/addons/product/pricelist.py 2017-10-06 09:59:17 +0000 |
1624 | @@ -257,7 +257,7 @@ |
1625 | 'product_pricelist_version AS v, product_pricelist AS pl ' |
1626 | 'WHERE (product_tmpl_id IS NULL OR product_tmpl_id = %s) ' |
1627 | 'AND (product_id IS NULL OR product_id = %s) ' |
1628 | - 'AND (' + categ_where + ' OR (categ_id IS NULL)) ' |
1629 | + 'AND (' + categ_where + ' OR (categ_id IS NULL)) ' # not_a_user_entry |
1630 | 'AND price_version_id = %s ' |
1631 | 'AND (min_quantity IS NULL OR min_quantity <= %s) ' |
1632 | 'AND i.price_version_id = v.id AND v.pricelist_id = pl.id ' |
1633 | @@ -399,7 +399,7 @@ |
1634 | 'product_pricelist_version AS v, product_pricelist AS pl ' |
1635 | 'WHERE (product_tmpl_id IS NULL OR product_tmpl_id = %s) ' |
1636 | 'AND (product_id IS NULL OR product_id = %s) ' |
1637 | - 'AND (' + categ_where + ' OR (categ_id IS NULL)) ' |
1638 | + 'AND (' + categ_where + ' OR (categ_id IS NULL)) ' # not_a_user_entry |
1639 | 'AND price_version_id = %s ' |
1640 | 'AND (min_quantity IS NULL OR min_quantity <= %s) ' |
1641 | 'AND i.price_version_id = v.id AND v.pricelist_id = pl.id ' |
1642 | @@ -517,7 +517,7 @@ |
1643 | |
1644 | cursor.execute('SELECT id ' \ |
1645 | 'FROM product_pricelist_version ' \ |
1646 | - 'WHERE '+' and '.join(where) + (where and ' and ' or '')+ |
1647 | + 'WHERE '+' and '.join(where) + (where and ' and ' or '')+ # not_a_user_entry |
1648 | 'pricelist_id = %s ' \ |
1649 | 'AND active ' \ |
1650 | 'AND id <> %s', ( |
1651 | |
1652 | === modified file 'bin/addons/product_attributes/product_attributes.py' |
1653 | --- bin/addons/product_attributes/product_attributes.py 2017-06-12 16:07:45 +0000 |
1654 | +++ bin/addons/product_attributes/product_attributes.py 2017-10-06 09:59:17 +0000 |
1655 | @@ -330,19 +330,19 @@ |
1656 | touched='[''%s'']' |
1657 | WHERE model = 'product.product' |
1658 | AND res_id IN (%s) |
1659 | - ''' % (new_column, ids_req)) |
1660 | + ''' % (new_column, ids_req)) # not_a_user_entry |
1661 | |
1662 | # Make the migration |
1663 | if new_column == 'standard_ok': |
1664 | - request = 'UPDATE product_product SET standard_ok = \'True\' WHERE %s = True' % moved_column |
1665 | + request = 'UPDATE product_product SET standard_ok = \'True\' WHERE %s = True' % moved_column # not_a_user_entry |
1666 | cr.execute(request) |
1667 | |
1668 | if new_column == 'dangerous_goods': |
1669 | - request = 'UPDATE product_product SET is_dg = True, dg_txt = \'X\', dangerous_goods = \'True\' WHERE %s = True' % moved_column |
1670 | + request = 'UPDATE product_product SET is_dg = True, dg_txt = \'X\', dangerous_goods = \'True\' WHERE %s = True' % moved_column # not_a_user_entry |
1671 | cr.execute(request) |
1672 | |
1673 | if new_column == 'short_shelf_life': |
1674 | - request = 'UPDATE product_product SET is_ssl = True, ssl_txt = \'X\', short_shelf_life = \'True\' WHERE %s = True' % moved_column |
1675 | + request = 'UPDATE product_product SET is_ssl = True, ssl_txt = \'X\', short_shelf_life = \'True\' WHERE %s = True' % moved_column # not_a_user_entry |
1676 | cr.execute(request) |
1677 | |
1678 | if new_column == 'controlled_substance': |
1679 | @@ -351,7 +351,7 @@ |
1680 | controlled_substance = 'True', |
1681 | is_cs = True, |
1682 | cs_txt = 'X' |
1683 | - WHERE %s = True OR narcotic = True''' % moved_column |
1684 | + WHERE %s = True OR narcotic = True''' % moved_column # not_a_user_entry |
1685 | cr.execute(request) |
1686 | |
1687 | return |
1688 | @@ -1792,7 +1792,7 @@ |
1689 | ''' |
1690 | res = {} |
1691 | if default_code: |
1692 | - cr.execute("SELECT * FROM product_product pp where pp.default_code = '%s'" % default_code) |
1693 | + cr.execute("SELECT * FROM product_product pp where pp.default_code = %s", (default_code,)) |
1694 | duplicate = cr.fetchall() |
1695 | if duplicate: |
1696 | res.update({'warning': {'title': 'Warning', 'message':'The Code already exists'}}) |
1697 | |
1698 | === modified file 'bin/addons/product_expiry/product_expiry.py' |
1699 | --- bin/addons/product_expiry/product_expiry.py 2016-08-18 08:32:03 +0000 |
1700 | +++ bin/addons/product_expiry/product_expiry.py 2017-10-06 09:59:17 +0000 |
1701 | @@ -152,7 +152,7 @@ |
1702 | # 1. Table stock_move |
1703 | sql_x = 'select lot.name, move.product_id, move.' + field_expiry_name + ' as life_date from ' + table_name + ' move, stock_production_lot lot, product_product prod where move.' |
1704 | sql_x = sql_x + field_id + ' = lot.id and move.product_id != lot.product_id and move.product_id = prod.id and ' |
1705 | - sql_x = sql_x + ' (prod.batch_management = \'t\' OR prod.perishable = \'t\') group by lot.name, move.product_id, move.' + field_expiry_name +' order by lot.name;' |
1706 | + sql_x = sql_x + ' (prod.batch_management = \'t\' OR prod.perishable = \'t\') group by lot.name, move.product_id, move.' + field_expiry_name +' order by lot.name;' # not_a_user_entry |
1707 | |
1708 | cr.execute(sql_x) |
1709 | |
1710 | @@ -180,7 +180,7 @@ |
1711 | batch_id = batch_id[0] |
1712 | |
1713 | sql_up = 'update ' + table_name + ' set ' + field_id + '=' + str(batch_id) + ' where id in (select move.id from ' + table_name |
1714 | - sql_up = sql_up + ' move, stock_production_lot lot where move.' + field_id + " = lot.id and lot.name= '" + batch_name + "' and move.product_id = " + str(prod_id) + " and move." + field_expiry_name + "='" + life_date + "');" |
1715 | + sql_up = sql_up + ' move, stock_production_lot lot where move.' + field_id + " = lot.id and lot.name= '" + batch_name + "' and move.product_id = " + str(prod_id) + " and move." + field_expiry_name + "='" + life_date + "');" # not_a_user_entry |
1716 | |
1717 | cr.execute(sql_up) |
1718 | self._logger.info("--- Step 3: Batch already created. Now assign all the ref lines of table %s with wrong batch references to the new batch: %s\n"%(table_name, batch_id)) |
1719 | @@ -214,7 +214,7 @@ |
1720 | self._logger.info("--- Step 3: Now assign all the ref lines of table %s with wrong batch references to the new batch: %s\n"%(table_name, batch_id)) |
1721 | if batch_id: |
1722 | sql_up = 'update ' + table_name + ' set ' + field_id + ' = ' + str(batch_id) + ' where product_id=' + str(prod_id) + ' and ' + field_id + '=' + str(existing_batch.id) |
1723 | - sql_up = sql_up + " and " + field_expiry_name + "='" + life_date + "';" |
1724 | + sql_up = sql_up + " and " + field_expiry_name + "='" + life_date + "';" # not_a_user_entry |
1725 | cr.execute(sql_up) |
1726 | |
1727 | self._logger.info("__________Finish the migration task on duplicate batch objects for table: %s\n", table_name) |
1728 | @@ -337,7 +337,7 @@ |
1729 | self.update_table(cr, uid, element[0] , element[1], wrong_id, lead_id, batch_name) |
1730 | |
1731 | def update_table(self, cr, uid, table_name, field_id, wrong_id, lead_id, batch_name): |
1732 | - cr.execute('select count(*) as amount from ' + table_name + ' where ' + field_id + ' = %s;' %(wrong_id,)) |
1733 | + cr.execute('select count(*) as amount from ' + table_name + ' where ' + field_id + ' =%s', (wrong_id,)) # not_a_user_entry |
1734 | count = cr.fetchone()[0] |
1735 | if count > 0: # Only update the table if wrong bn exists |
1736 | self._logger.info("Table %s has %s batch objects (%s) and will be-mapped.\n" %(table_name, count, batch_name,)) |
1737 | @@ -353,8 +353,8 @@ |
1738 | del_sql_update = "DELETE FROM real_average_consumption_line WHERE prodlot_id = %s" |
1739 | cr.execute(del_sql_update, (wrong_id,)) |
1740 | else: |
1741 | - sql_update = "update " + table_name + " set " + field_id + "=" + str(lead_id) + " where " + field_id + "=" + str(wrong_id) |
1742 | - cr.execute(sql_update) |
1743 | + sql_update = "update " + table_name + " set " + field_id + "=%s" + " where " + field_id + "=%s" # not_a_user_entry |
1744 | + cr.execute(sql_update, (lead_id, wrong_id)) |
1745 | else: |
1746 | self._logger.info("Table %s has NO duplicate batch (%s).\n" %(table_name, batch_name,)) |
1747 | |
1748 | |
1749 | === modified file 'bin/addons/register_accounting/account_analytic_line.py' |
1750 | --- bin/addons/register_accounting/account_analytic_line.py 2017-04-05 13:57:20 +0000 |
1751 | +++ bin/addons/register_accounting/account_analytic_line.py 2017-10-06 09:59:17 +0000 |
1752 | @@ -66,7 +66,7 @@ |
1753 | return True |
1754 | if isinstance(ids, (int, long)): |
1755 | ids = [ids] |
1756 | - sql = "UPDATE " + self._table + " SET partner_txt = %s WHERE id in %s" |
1757 | + sql = "UPDATE " + self._table + " SET partner_txt = %s WHERE id in %s" # not_a_user_entry |
1758 | cr.execute(sql, (value or None, tuple(ids))) |
1759 | return True |
1760 | |
1761 | |
1762 | === modified file 'bin/addons/register_accounting/register_tools.py' |
1763 | --- bin/addons/register_accounting/register_tools.py 2017-09-01 13:58:46 +0000 |
1764 | +++ bin/addons/register_accounting/register_tools.py 2017-10-06 09:59:17 +0000 |
1765 | @@ -65,7 +65,7 @@ |
1766 | if value: |
1767 | fields = value.split(",") |
1768 | element = fields[0] |
1769 | - sql = "UPDATE %s SET " % self._table |
1770 | + sql = "UPDATE %s SET " % self._table # not_a_user_entry |
1771 | emp_val = 'Null' |
1772 | par_val = 'Null' |
1773 | tra_val = 'Null' |
1774 | @@ -80,7 +80,7 @@ |
1775 | cr.execute(sql) |
1776 | # Delete values for Third Parties if no value given |
1777 | elif name == 'partner_type' and not value: |
1778 | - cr.execute("UPDATE %s SET employee_id = Null, partner_id = Null, transfer_journal_id = Null WHERE id = %s" % (self._table, obj_id)) |
1779 | + cr.execute("UPDATE %s SET employee_id = Null, partner_id = Null, transfer_journal_id = Null WHERE id = %%s" % self._table, (obj_id,)) # not_a_user_entry |
1780 | return True |
1781 | |
1782 | |
1783 | |
1784 | === modified file 'bin/addons/register_accounting/report/report_open_advances.py' |
1785 | --- bin/addons/register_accounting/report/report_open_advances.py 2014-03-07 11:05:37 +0000 |
1786 | +++ bin/addons/register_accounting/report/report_open_advances.py 2017-10-06 09:59:17 +0000 |
1787 | @@ -61,10 +61,10 @@ |
1788 | account.type_for_register = 'advance' AND |
1789 | line.state = 'valid' AND |
1790 | line.reconcile_id IS NULL AND |
1791 | - line.date <= '%s' |
1792 | + line.date <= %s |
1793 | ORDER BY account_name, booking_currency.name, line.partner_txt, line.date |
1794 | - """ % (time.strftime('%Y-%m-%d')) |
1795 | - cr.execute(sql_open_advances) |
1796 | + """ |
1797 | + cr.execute(sql_open_advances, (time.strftime('%Y-%m-%d'),)) |
1798 | res = header + cr.fetchall() |
1799 | |
1800 | b = StringIO.StringIO() |
1801 | |
1802 | === modified file 'bin/addons/register_accounting/wizard/wizard_liquidity_position.py' |
1803 | --- bin/addons/register_accounting/wizard/wizard_liquidity_position.py 2016-07-20 13:59:13 +0000 |
1804 | +++ bin/addons/register_accounting/wizard/wizard_liquidity_position.py 2017-10-06 09:59:17 +0000 |
1805 | @@ -64,8 +64,8 @@ |
1806 | LEFT JOIN account_journal aj ON abs.journal_id = aj.id |
1807 | WHERE aj.type != 'cheque' |
1808 | AND abs.state != 'draft' |
1809 | - AND abs.period_id = """ + str(context['period_id']) |
1810 | - cr.execute(sql_register_ids) |
1811 | + AND abs.period_id = %s""" |
1812 | + cr.execute(sql_register_ids, (str(context['period_id']),)) |
1813 | |
1814 | if not cr.fetchall(): |
1815 | # No registers found |
1816 | |
1817 | === modified file 'bin/addons/res_currency_functional/account_move_compute_currency.py' |
1818 | --- bin/addons/res_currency_functional/account_move_compute_currency.py 2016-11-17 08:46:41 +0000 |
1819 | +++ bin/addons/res_currency_functional/account_move_compute_currency.py 2017-10-06 09:59:17 +0000 |
1820 | @@ -76,7 +76,7 @@ |
1821 | for arg in args: |
1822 | if args[0] and args[0][1] and args[0][1] in ['in', '='] and args[0][2]: |
1823 | # create SQL request |
1824 | - sql = sql_base + ' in %s\nGROUP BY ml.id' |
1825 | + sql = sql_base + ' in %s\nGROUP BY ml.id' # not_a_user_entry |
1826 | second = args[0][2] |
1827 | # execute it and fetch result |
1828 | if isinstance(second, (int, long)): |
1829 | |
1830 | === modified file 'bin/addons/sale/sale.py' |
1831 | --- bin/addons/sale/sale.py 2017-08-03 15:16:40 +0000 |
1832 | +++ bin/addons/sale/sale.py 2017-10-06 09:59:17 +0000 |
1833 | @@ -171,7 +171,7 @@ |
1834 | |
1835 | cursor.execute('SELECT rel.order_id ' \ |
1836 | 'FROM sale_order_invoice_rel AS rel, account_invoice AS inv '+ sale_clause + \ |
1837 | - 'WHERE rel.invoice_id = inv.id ' + clause) |
1838 | + 'WHERE rel.invoice_id = inv.id ' + clause) # not_a_user_entry |
1839 | res = cursor.fetchall() |
1840 | if no_invoiced: |
1841 | cursor.execute('SELECT sale.id ' \ |
1842 | |
1843 | === modified file 'bin/addons/sale_override/sale.py' |
1844 | --- bin/addons/sale_override/sale.py 2017-09-21 12:57:16 +0000 |
1845 | +++ bin/addons/sale_override/sale.py 2017-10-06 09:59:17 +0000 |
1846 | @@ -257,7 +257,7 @@ |
1847 | slsdr.document_id IN %%s |
1848 | %s |
1849 | GROUP BY sol.type |
1850 | - ''' % where_sql |
1851 | + ''' % where_sql # not_a_user_entry |
1852 | cr.execute(sql, where_params) |
1853 | res = cr.dictfetchall() |
1854 | |
1855 | @@ -685,7 +685,7 @@ |
1856 | |
1857 | cursor.execute('SELECT rel.order_id ' \ |
1858 | 'FROM sale_order_invoice_rel AS rel, account_invoice AS inv, sale_order AS sale, res_partner AS part ' + sale_clause + \ |
1859 | - 'WHERE rel.invoice_id = inv.id AND rel.order_id = sale.id AND sale.partner_id = part.id ' + clause) |
1860 | + 'WHERE rel.invoice_id = inv.id AND rel.order_id = sale.id AND sale.partner_id = part.id ' + clause) # not_a_user_entry |
1861 | res = cursor.fetchall() |
1862 | if no_invoiced: |
1863 | cursor.execute('SELECT sale.id ' \ |
1864 | @@ -1091,7 +1091,7 @@ |
1865 | LEFT JOIN product_product p ON l.product_id = p.id |
1866 | LEFT JOIN product_template t ON p.product_tmpl_id = t.id |
1867 | LEFT JOIN sale_order fo ON l.order_id = fo.id |
1868 | - WHERE (t.type != 'service_recep' %s) AND fo.id in %%s LIMIT 1''' % transport_cat, |
1869 | + WHERE (t.type != 'service_recep' %s) AND fo.id in %%s LIMIT 1''' % transport_cat, # not_a_user_entry |
1870 | (tuple(ids),)) |
1871 | res = cr.fetchall() |
1872 | |
1873 | |
1874 | === modified file 'bin/addons/specific_rules/specific_rules.py' |
1875 | --- bin/addons/specific_rules/specific_rules.py 2017-08-23 14:10:02 +0000 |
1876 | +++ bin/addons/specific_rules/specific_rules.py 2017-10-06 09:59:17 +0000 |
1877 | @@ -1093,7 +1093,7 @@ |
1878 | stock_report_prodlots_virtual |
1879 | where |
1880 | location_id IN %s group by prodlot_id |
1881 | - having sum(qty) '''+ str(args[0][1]) + str(args[0][2]),(tuple(locations),)) |
1882 | + having sum(qty) '''+ str(args[0][1]) + str(args[0][2]),(tuple(locations),)) # not_a_user_entry |
1883 | res = cr.fetchall() |
1884 | ids = [('id', 'in', map(lambda x: x[0], res))] |
1885 | return ids |
1886 | @@ -1600,7 +1600,7 @@ |
1887 | l.inventory_id in %%s |
1888 | GROUP BY l.product_id, l.location_id, l.%s, l.expiry_date |
1889 | HAVING count(l.id) > 1 |
1890 | - ORDER BY count(l.id) DESC""" % ( |
1891 | + ORDER BY count(l.id) DESC""" % ( # not_a_user_entry |
1892 | self._name.replace('.', '_'), |
1893 | self._name == 'stock.inventory' and 'prod_lot_id' or 'prodlot_name', |
1894 | ) |
1895 | |
1896 | === modified file 'bin/addons/specific_rules/unconsistent_stock_report.py' |
1897 | --- bin/addons/specific_rules/unconsistent_stock_report.py 2016-02-05 09:13:42 +0000 |
1898 | +++ bin/addons/specific_rules/unconsistent_stock_report.py 2017-10-06 09:59:17 +0000 |
1899 | @@ -88,7 +88,7 @@ |
1900 | %s |
1901 | GROUP BY |
1902 | srp.prodlot_id, srp.location_id, srp.product_id, spl.life_date |
1903 | - ''' % extra_where |
1904 | + ''' % extra_where # not_a_user_entry |
1905 | cr.execute(request, (bm, perishable)) |
1906 | |
1907 | for r in cr.dictfetchall(): |
1908 | @@ -198,7 +198,7 @@ |
1909 | AND |
1910 | pp.perishable = %%s |
1911 | %s |
1912 | - ''' % extra_where |
1913 | + ''' % extra_where # not_a_user_entry |
1914 | cr.execute(request, (bm, perishable)) |
1915 | |
1916 | for r in cr.dictfetchall(): |
1917 | @@ -264,7 +264,7 @@ |
1918 | ) |
1919 | GROUP BY |
1920 | sil.product_id, sil.prod_lot_id, sil.expiry_date, sil.location_id, document_number |
1921 | - ''' % (name, model, model) |
1922 | + ''' % (name, model, model) # not_a_user_entry |
1923 | cr.execute(request) |
1924 | |
1925 | for r in cr.dictfetchall(): |
1926 | |
1927 | === modified file 'bin/addons/stock/product.py' |
1928 | --- bin/addons/stock/product.py 2016-08-26 14:25:42 +0000 |
1929 | +++ bin/addons/stock/product.py 2017-10-06 09:59:17 +0000 |
1930 | @@ -253,27 +253,25 @@ |
1931 | date_str = date_str and ' AND %s '% date_str or '' |
1932 | if 'in' in what: |
1933 | # all moves from a location out of the set to a location in the set |
1934 | - cr.execute( |
1935 | - 'select sum(product_qty), product_id, product_uom '\ |
1936 | - 'from stock_move '\ |
1937 | - 'where location_id NOT IN %s '\ |
1938 | - 'and location_dest_id IN %s '\ |
1939 | - 'and product_id IN %s '\ |
1940 | - '' + prodlot_id_str + ' '\ |
1941 | - 'and state IN %s ' + date_str +' '\ |
1942 | - 'group by product_id,product_uom',tuple(where)) |
1943 | + cr.execute(""" |
1944 | + select sum(product_qty), product_id, product_uom |
1945 | + from stock_move |
1946 | + where location_id NOT IN %%s |
1947 | + and location_dest_id IN %%s |
1948 | + and product_id IN %%s %s |
1949 | + and state in %%s %s |
1950 | + group by product_id,product_uom""" % (prodlot_id_str, date_str),tuple(where)) # not_a_user_entry |
1951 | results = cr.fetchall() |
1952 | if 'out' in what: |
1953 | # all moves from a location in the set to a location out of the set |
1954 | - cr.execute( |
1955 | - 'select sum(product_qty), product_id, product_uom '\ |
1956 | - 'from stock_move '\ |
1957 | - 'where location_id IN %s '\ |
1958 | - 'and location_dest_id NOT IN %s '\ |
1959 | - 'and product_id IN %s '\ |
1960 | - '' + prodlot_id_str + ' '\ |
1961 | - 'and state in %s ' + date_str + ' '\ |
1962 | - 'group by product_id,product_uom',tuple(where)) |
1963 | + cr.execute(""" |
1964 | + select sum(product_qty), product_id, product_uom |
1965 | + from stock_move |
1966 | + where location_id IN %%s |
1967 | + and location_dest_id NOT IN %%s |
1968 | + and product_id IN %%s %s |
1969 | + and state in %%s %s |
1970 | + group by product_id,product_uom""" % (prodlot_id_str, date_str),tuple(where)) # not_a_user_entry |
1971 | results2 = cr.fetchall() |
1972 | |
1973 | if results or results2: |
1974 | |
1975 | === modified file 'bin/addons/stock/stock.py' |
1976 | --- bin/addons/stock/stock.py 2017-08-23 14:10:02 +0000 |
1977 | +++ bin/addons/stock/stock.py 2017-10-06 09:59:17 +0000 |
1978 | @@ -560,13 +560,14 @@ |
1979 | ids = [ids] |
1980 | for pick in self.read(cr, uid, ids,['max_date'], context=context): |
1981 | sql_str = """update stock_move set |
1982 | - date='%s' |
1983 | + date=%s |
1984 | where |
1985 | - picking_id=%d """ % (value, pick['id']) |
1986 | - |
1987 | + picking_id=%s""" |
1988 | + sql_params = (value, pick['id']) |
1989 | if pick['max_date']: |
1990 | - sql_str += " and (date='" + pick['max_date'] + "' or date>'" + value + "')" |
1991 | - cr.execute(sql_str) |
1992 | + sql_str += " and (date=%s or date>%s)" |
1993 | + sql_params.extend((pick['max_date'], value)) |
1994 | + cr.execute(sql_str, sql_params) |
1995 | return True |
1996 | |
1997 | def _set_minimum_date(self, cr, uid, ids, name, value, arg, context=None): |
1998 | @@ -582,12 +583,14 @@ |
1999 | ids = [ids] |
2000 | for pick in self.read(cr, uid, ids, ['min_date'], context=context): |
2001 | sql_str = """update stock_move set |
2002 | - date='%s' |
2003 | + date=%s |
2004 | where |
2005 | - picking_id=%s """ % (value, pick['id']) |
2006 | + picking_id=%s""" |
2007 | + sql_params = (value, pick['id']) |
2008 | if pick['min_date']: |
2009 | - sql_str += " and (date='" + pick['min_date'] + "' or date<'" + value + "')" |
2010 | - cr.execute(sql_str) |
2011 | + sql_str += " and (date=%s or date<%s)" |
2012 | + sql_params.extend((pick['min_date'], value)) |
2013 | + cr.execute(sql_str, sql_params) |
2014 | return True |
2015 | |
2016 | def get_min_max_date(self, cr, uid, ids, field_name, arg, context=None): |
2017 | @@ -1656,7 +1659,7 @@ |
2018 | stock_report_prodlots |
2019 | where |
2020 | location_id IN %s group by prodlot_id |
2021 | - having sum(qty) '''+ str(args[0][1]) + str(args[0][2]),(tuple(locations),)) |
2022 | + having sum(qty) '''+ str(args[0][1]) + str(args[0][2]),(tuple(locations),)) # not_a_user_entry |
2023 | res = cr.fetchall() |
2024 | ids = [('id', 'in', map(lambda x: x[0], res))] |
2025 | return ids |
2026 | |
2027 | === modified file 'bin/addons/supplier_catalogue/supplier_catalogue.py' |
2028 | --- bin/addons/supplier_catalogue/supplier_catalogue.py 2017-08-23 21:01:21 +0000 |
2029 | +++ bin/addons/supplier_catalogue/supplier_catalogue.py 2017-10-06 09:59:17 +0000 |
2030 | @@ -279,7 +279,7 @@ |
2031 | # should be updated accordingly (that could be long operation) |
2032 | cr.execute('''SELECT partner_info_id |
2033 | FROM supplier_catalogue_line |
2034 | - WHERE catalogue_id = %s ''' % (ids[0])) |
2035 | + WHERE catalogue_id = %s ''', (ids[0],)) |
2036 | pricelist_ids = [x[0] for x in cr.fetchall() if x[0]] |
2037 | price_obj.write(cr, uid, pricelist_ids, new_price_vals, context=context) |
2038 | |
2039 | @@ -360,13 +360,13 @@ |
2040 | cr.execute('''delete from pricelist_partnerinfo |
2041 | where id in (select partner_info_id |
2042 | from supplier_catalogue_line |
2043 | - where catalogue_id = %s)''' % (ids[0])) |
2044 | + where catalogue_id = %s)''', (ids[0],)) |
2045 | cr.execute('''delete from product_supplierinfo |
2046 | where id in (select supplier_info_id |
2047 | from supplier_catalogue_line |
2048 | where catalogue_id = %s) |
2049 | and id not in (select suppinfo_id from |
2050 | - pricelist_partnerinfo ) ''' % (ids[0])) |
2051 | + pricelist_partnerinfo ) ''', (ids[0],)) |
2052 | |
2053 | |
2054 | return True |
2055 | @@ -1013,13 +1013,14 @@ |
2056 | cr.execute('''delete from pricelist_partnerinfo |
2057 | where id in (select partner_info_id |
2058 | from supplier_catalogue_line |
2059 | - where catalogue_id = %s)''' % (ids[0])) |
2060 | + where catalogue_id = %s)''', (ids[0],)) |
2061 | cr.execute('''delete from product_supplierinfo |
2062 | where id in (select supplier_info_id |
2063 | from supplier_catalogue_line |
2064 | where catalogue_id = %s) |
2065 | and id not in (select suppinfo_id from |
2066 | - pricelist_partnerinfo ) ''' % (ids[0])) |
2067 | + pricelist_partnerinfo ) ''', |
2068 | + (ids[0],)) |
2069 | |
2070 | res = super(supplier_catalogue_line, self).write(cr, uid, [line.id], new_vals, context=context) |
2071 | |
2072 | |
2073 | === modified file 'bin/addons/sync_client/ir_model_data.py' |
2074 | --- bin/addons/sync_client/ir_model_data.py 2016-11-28 16:19:14 +0000 |
2075 | +++ bin/addons/sync_client/ir_model_data.py 2017-10-06 09:59:17 +0000 |
2076 | @@ -70,8 +70,7 @@ |
2077 | FROM ir_model_data |
2078 | LEFT JOIN %(table)s ON %(table)s.id = ir_model_data.res_id |
2079 | WHERE ir_model_data.model = %%s AND ir_model_data.res_id IN %%s AND ir_model_data.id IN %%s |
2080 | - GROUP BY ir_model_data.model, ir_model_data.res_id HAVING COUNT(%(table)s.id) = 0""" \ |
2081 | - % {'table':self.pool.get(model)._table}, [model, tuple(res_ids), tuple(ids)]) |
2082 | + GROUP BY ir_model_data.model, ir_model_data.res_id HAVING COUNT(%(table)s.id) = 0""" % {'table':self.pool.get(model)._table}, [model, tuple(res_ids), tuple(ids)]) # not_a_user_entry |
2083 | for data_ids, exists in cr.fetchall(): |
2084 | res.update(dict((id, not exists) for id in data_ids)) |
2085 | return res |
2086 | @@ -125,7 +124,7 @@ |
2087 | FROM %s r |
2088 | LEFT JOIN ir_model_data data ON data.module = 'sd' AND |
2089 | data.model = %%s AND r.id = data.res_id |
2090 | - WHERE data.res_id IS NULL;""" % obj._table, [obj._name]) |
2091 | + WHERE data.res_id IS NULL;""" % obj._table, [obj._name]) # not_a_user_entry |
2092 | record_ids = map(lambda x: x[0], cr.fetchall()) |
2093 | |
2094 | # if we have some records that doesn't have an sdref |
2095 | @@ -192,7 +191,7 @@ |
2096 | DELETE FROM ir_model_data WHERE id IN %s""", [tuple(to_delete)]) |
2097 | for id, rec in to_write: |
2098 | cr.execute("""\ |
2099 | -UPDATE ir_model_data SET """+", ".join("%s = %%s" % k for k in rec.keys())+""" WHERE id = %s""", rec.values() + [id]) |
2100 | +UPDATE ir_model_data SET """+", ".join("%s = %%s" % k for k in rec.keys())+""" WHERE id = %s""", rec.values() + [id]) # not_a_user_entry |
2101 | cr.execute("""CREATE UNIQUE INDEX unique_sdref_constraint ON ir_model_data (model, res_id) WHERE module = 'sd'""") |
2102 | cr.commit() |
2103 | self._logger.info("%d sdref(s) deleted, %d kept." % (len(to_delete), len(to_write))) |
2104 | |
2105 | === modified file 'bin/addons/sync_client/log_sale_purchase.py' |
2106 | --- bin/addons/sync_client/log_sale_purchase.py 2014-01-14 08:37:50 +0000 |
2107 | +++ bin/addons/sync_client/log_sale_purchase.py 2017-10-06 09:59:17 +0000 |
2108 | @@ -53,13 +53,12 @@ |
2109 | |
2110 | def _get_model_id_from_document(self, cr, uid, ids, field, args, context=None): |
2111 | self.read(cr, uid, ids, ['model'], context=context) |
2112 | - cr.execute("""\ |
2113 | + cr.execute(""" |
2114 | SELECT log.id, m.id |
2115 | FROM %s log |
2116 | LEFT JOIN ir_model m |
2117 | ON log.model = m.model |
2118 | - WHERE log.id IN %%s;""" \ |
2119 | - % (self._table,), [tuple(ids)]) |
2120 | + WHERE log.id IN %%s;""" % (self._table,), [tuple(ids)]) # not_a_user_entry |
2121 | return dict(cr.fetchall()) |
2122 | |
2123 | @check |
2124 | |
2125 | === modified file 'bin/addons/sync_client/orm.py' |
2126 | --- bin/addons/sync_client/orm.py 2017-09-28 08:23:33 +0000 |
2127 | +++ bin/addons/sync_client/orm.py 2017-10-06 09:59:17 +0000 |
2128 | @@ -122,8 +122,7 @@ |
2129 | WHERE module = 'sd' AND |
2130 | model = %s AND |
2131 | """+add_sql+""" |
2132 | - ("""+field+""" < last_modification OR """+field+""" IS NULL) |
2133 | - """, |
2134 | + ("""+field+""" < last_modification OR """+field+""" IS NULL)""", # not_a_user_entry |
2135 | sql_params) |
2136 | result = [row[0] for row in cr.fetchall()] |
2137 | else: |
2138 | @@ -134,8 +133,7 @@ |
2139 | WHERE module = 'sd' AND |
2140 | model = %s AND |
2141 | """+add_sql+""" |
2142 | - ("""+field+""" < last_modification OR """+field+""" IS NULL) |
2143 | - """, |
2144 | + ("""+field+""" < last_modification OR """+field+""" IS NULL)""", # not_a_user_entry |
2145 | sql_params) |
2146 | result = [row[0] for row in cr.fetchall() |
2147 | if row[1] is None \ |
2148 | @@ -439,11 +437,10 @@ |
2149 | field, real_field = ('id' if field == 'is_deleted' else field), field |
2150 | if self._name == "ir.model.data": |
2151 | cr.execute("""\ |
2152 | -SELECT name, %s FROM ir_model_data WHERE module = 'sd' AND name IN %%s""" % field, [sdrefs]) |
2153 | +SELECT name, %s FROM ir_model_data WHERE module = 'sd' AND name IN %%s""" % field, [sdrefs]) # not_a_user_entry |
2154 | else: |
2155 | cr.execute("""\ |
2156 | -SELECT name, %s FROM ir_model_data WHERE module = 'sd' AND model = %%s AND name IN %%s""" \ |
2157 | -% field, [self._name,sdrefs]) |
2158 | +SELECT name, %s FROM ir_model_data WHERE module = 'sd' AND model = %%s AND name IN %%s""" % field, [self._name,sdrefs]) # not_a_user_entry |
2159 | try: |
2160 | result = RejectingDict(cr.fetchall()) |
2161 | except DuplicateKey, e: |
2162 | @@ -666,7 +663,7 @@ |
2163 | cr.execute(''' |
2164 | select d.res_id from ir_model_data d |
2165 | left join '''+self._table+''' t on t.id = d.res_id and d.model=%(model)s |
2166 | - where t.id is null and d.model=%(model)s'''+sql_add, sql_params) |
2167 | + where t.id is null and d.model=%(model)s'''+sql_add, sql_params) # not_a_user_entry |
2168 | return [x[0] for x in cr.fetchall()] |
2169 | |
2170 | def search_ext(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): |
2171 | |
2172 | === modified file 'bin/addons/sync_client/special_handling.py' |
2173 | --- bin/addons/sync_client/special_handling.py 2017-04-07 12:49:38 +0000 |
2174 | +++ bin/addons/sync_client/special_handling.py 2017-10-06 09:59:17 +0000 |
2175 | @@ -252,7 +252,7 @@ |
2176 | value_to_write.append(value[i]) |
2177 | |
2178 | value_to_write.append(obj_id) |
2179 | - cr.execute('UPDATE '+pool_obj._table+' SET ('+','.join(fields)+') = ('+','.join(to_replace)+') WHERE id=%s', tuple(value_to_write)) |
2180 | + cr.execute('UPDATE '+pool_obj._table+' SET ('+','.join(fields)+') = ('+','.join(to_replace)+') WHERE id=%s', tuple(value_to_write)) # not_a_user_entry |
2181 | return True |
2182 | |
2183 | |
2184 | |
2185 | === modified file 'bin/addons/sync_client/sync_client.py' |
2186 | --- bin/addons/sync_client/sync_client.py 2017-09-25 17:32:34 +0000 |
2187 | +++ bin/addons/sync_client/sync_client.py 2017-10-06 09:59:17 +0000 |
2188 | @@ -1258,15 +1258,15 @@ |
2189 | |
2190 | # delete sync_client_update_received older than 6 month |
2191 | cr.execute("""DELETE FROM sync_client_update_received |
2192 | - WHERE create_date < now() - interval '%d month' AND |
2193 | - execution_date IS NOT NULL AND run='t'""" % nb_month_to_clean) |
2194 | + WHERE create_date < now() - interval '%s month' AND |
2195 | + execution_date IS NOT NULL AND run='t'""", (nb_month_to_clean,)) |
2196 | deleted_update_received = cr.rowcount |
2197 | self._logger.info('clean_updates method has deleted %d sync_client_update_received' % deleted_update_received) |
2198 | |
2199 | # delete sync_client_update_to_send older than 6 month |
2200 | cr.execute("""DELETE FROM sync_client_update_to_send |
2201 | - WHERE create_date < now() - interval '%d month' AND |
2202 | - sent_date IS NOT NULL AND sent='t'""" % nb_month_to_clean) |
2203 | + WHERE create_date < now() - interval '%s month' AND |
2204 | + sent_date IS NOT NULL AND sent='t'""", (nb_month_to_clean,)) |
2205 | deleted_update_to_send = cr.rowcount |
2206 | self._logger.info('clean_updates method has deleted %d sync_client_update_to_send' % deleted_update_to_send) |
2207 | |
2208 | |
2209 | === modified file 'bin/addons/sync_common/migration_scripts.py' |
2210 | --- bin/addons/sync_common/migration_scripts.py 2013-06-10 14:59:51 +0000 |
2211 | +++ bin/addons/sync_common/migration_scripts.py 2017-10-06 09:59:17 +0000 |
2212 | @@ -36,7 +36,7 @@ |
2213 | WHERE %(table)s.%(column)s = %(rel_table)s.id; |
2214 | ALTER TABLE %(table)s DROP COLUMN %(column)s; |
2215 | ALTER TABLE %(table)s RENAME COLUMN new_%(column)s TO %(column)s; |
2216 | -COMMIT;""" % format_keys) |
2217 | +COMMIT;""" % format_keys) # not_a_user_entry |
2218 | return fn(self, cr, context=context) |
2219 | return wrapper |
2220 | return decorator |
2221 | @@ -53,7 +53,7 @@ |
2222 | column_sdref_exists = bool( cr.fetchone() ) |
2223 | result = fn(self, cr, context=context) |
2224 | if not column_sdref_exists: |
2225 | - cr.execute("SELECT COUNT(*) FROM %s" % self._table) |
2226 | + cr.execute("SELECT COUNT(*) FROM %s" % self._table) # not_a_user_entry |
2227 | count = cr.fetchone()[0] |
2228 | if count > 0: |
2229 | cr.commit() |
2230 | @@ -68,7 +68,7 @@ |
2231 | data = dict(zip(eval(fields), eval(values))) |
2232 | assert 'id' in data, "Cannot find column 'id' on model=%s id=%d" % (self._name, id) |
2233 | sdref = xmlid_to_sdref(data['id']) |
2234 | - cr.execute("UPDATE %s SET sdref = %%s WHERE id = %%s" % self._table, [sdref, id]) |
2235 | + cr.execute("UPDATE %s SET sdref = %%s WHERE id = %%s" % self._table, [sdref, id]) # not_a_user_entry |
2236 | except AssertionError, e: |
2237 | _logger.error("Cannot find SD ref on model=%s id=%d: %s" % (self._name, id, e.message)) |
2238 | cr.execute("ROLLBACK TO SAVEPOINT make_sdref") |
2239 | @@ -96,14 +96,14 @@ |
2240 | FROM information_schema.columns c1 |
2241 | LEFT JOIN information_schema.columns c2 |
2242 | ON c2.table_name = c1.table_name AND c2.column_name = 'sequence_number' |
2243 | -WHERE c1.table_name = '%s' AND c1.column_name = 'sequence' AND c2.column_name IS NULL;""" % self._table) |
2244 | +WHERE c1.table_name = %s AND c1.column_name = 'sequence' AND c2.column_name IS NULL;""", (self._table,)) # not_a_user_entry |
2245 | if cr.fetchone(): |
2246 | _logger.info("Replacing column sequence by sequence_number for table %s..." % self._table) |
2247 | cr.execute("""\ |
2248 | ALTER TABLE %(table)s ADD COLUMN "sequence_number" INTEGER; |
2249 | UPDATE %(table)s SET sequence_number = sequence; |
2250 | ALTER TABLE %(table)s DROP COLUMN "sequence"; |
2251 | -""" % {'table':self._table}) |
2252 | +""" % {'table':self._table}) # not_a_user_entry |
2253 | return fn(self, cr, context=context) |
2254 | return wrapper |
2255 | |
2256 | |
2257 | === modified file 'bin/addons/sync_remote_warehouse/orm.py' |
2258 | --- bin/addons/sync_remote_warehouse/orm.py 2015-11-23 21:48:33 +0000 |
2259 | +++ bin/addons/sync_remote_warehouse/orm.py 2017-10-06 09:59:17 +0000 |
2260 | @@ -52,10 +52,10 @@ |
2261 | SELECT res_id |
2262 | FROM ir_model_data |
2263 | WHERE module = 'sd' AND |
2264 | - model = %%s AND |
2265 | - (last_modification > '%(clone_date)s' OR sync_date > '%(clone_date)s' OR (last_modification is null and sync_date is null)) |
2266 | + model = %s AND |
2267 | + (last_modification > %s OR sync_date > %s OR (last_modification is null and sync_date is null)) |
2268 | AND |
2269 | - (usb_sync_date < last_modification OR usb_sync_date < sync_date OR usb_sync_date IS NULL)""" % {'clone_date' : clone_date},[self._name]) |
2270 | + (usb_sync_date < last_modification OR usb_sync_date < sync_date OR usb_sync_date IS NULL)""", (self._name, clone_date, clone_date)) |
2271 | |
2272 | return [row[0] for row in cr.fetchall()] |
2273 | |
2274 | |
2275 | === modified file 'bin/addons/sync_remote_warehouse/wizard/setup_remote_warehouse.py' |
2276 | --- bin/addons/sync_remote_warehouse/wizard/setup_remote_warehouse.py 2015-11-17 08:55:49 +0000 |
2277 | +++ bin/addons/sync_remote_warehouse/wizard/setup_remote_warehouse.py 2017-10-06 09:59:17 +0000 |
2278 | @@ -117,19 +117,19 @@ |
2279 | temp = 'ir_sequence_%03d' % seq['id'] |
2280 | if suffix == '-RW': |
2281 | # US-27: Reset the sequence for the RW instance |
2282 | - cr.execute("SELECT 0 FROM pg_class where relname = '%s'" % temp) |
2283 | + cr.execute("SELECT 0 FROM pg_class where relname = '%s'" % temp) # not_a_user_entry |
2284 | res = cr.dictfetchone() |
2285 | if res: |
2286 | - cr.execute("select last_value from %s" % temp) |
2287 | + cr.execute("select last_value from %s" % temp) # not_a_user_entry |
2288 | res = cr.dictfetchone() |
2289 | if res: |
2290 | dict_seq_values[temp] = res['last_value'] |
2291 | - cr.execute("ALTER SEQUENCE " + temp +" RESTART WITH " + str(1)) |
2292 | + cr.execute("ALTER SEQUENCE " + temp +" RESTART WITH " + str(1)) # not_a_user_entry |
2293 | else: |
2294 | # US-27: Revert all the sequence that has been set before |
2295 | value = dict_seq_values.get(temp, False) |
2296 | if value: |
2297 | - cr.execute("ALTER SEQUENCE " + temp +" RESTART WITH " + str(value + 1)) |
2298 | + cr.execute("ALTER SEQUENCE " + temp +" RESTART WITH " + str(value + 1)) # not_a_user_entry |
2299 | new_suffix = '%s%s' % (old_suffix, suffix) |
2300 | seq_obj.write(cr, uid, [seq.id], {'suffix': new_suffix}, context=context) |
2301 | |
2302 | |
2303 | === modified file 'bin/addons/sync_server/sync_server.py' |
2304 | --- bin/addons/sync_server/sync_server.py 2017-09-25 16:18:24 +0000 |
2305 | +++ bin/addons/sync_server/sync_server.py 2017-10-06 09:59:17 +0000 |
2306 | @@ -609,7 +609,7 @@ |
2307 | order = order.replace('last_dateactivity', 'datetime') |
2308 | limit_str = init_limit and ' limit %d' % init_limit or '' |
2309 | offset_str = init_offset and ' offset %d' % init_offset or '' |
2310 | - cr.execute('select entity_id from sync_server_entity_activity where entity_id in %s order by ' + order + limit_str + offset_str, (tuple(ids),)) |
2311 | + cr.execute('select entity_id from sync_server_entity_activity where entity_id in %s order by ' + order + limit_str + offset_str, (tuple(ids),)) # not_a_user_entry |
2312 | return [x[0] for x in cr.fetchall()] |
2313 | return ids |
2314 | |
2315 | |
2316 | === modified file 'bin/addons/sync_server/update.py' |
2317 | --- bin/addons/sync_server/update.py 2017-10-02 08:00:42 +0000 |
2318 | +++ bin/addons/sync_server/update.py 2017-10-06 09:59:17 +0000 |
2319 | @@ -105,7 +105,7 @@ |
2320 | for to_del in foreign_key_to_delete: |
2321 | cr.execute("SELECT conname FROM pg_constraint WHERE conname = %s", (to_del, )) |
2322 | if cr.fetchone(): |
2323 | - cr.execute("ALTER table sync_server_entity_rel DROP CONSTRAINT %s" % (to_del,)) |
2324 | + cr.execute("ALTER table sync_server_entity_rel DROP CONSTRAINT %s" % (to_del,)) # not_a_user_entry |
2325 | |
2326 | def init(self, cr): |
2327 | cr.execute("""\ |
2328 | @@ -131,7 +131,7 @@ |
2329 | ALTER COLUMN "id" SET NOT NULL; |
2330 | ALTER TABLE "public"."%(table)s" ADD UNIQUE ("id"); |
2331 | ALTER TABLE "public"."%(table)s" DROP CONSTRAINT "%(table)s_id_key" RESTRICT; |
2332 | -ALTER TABLE "public"."%(table)s" ADD PRIMARY KEY ("id");""" % {'table':self._table}) |
2333 | +ALTER TABLE "public"."%(table)s" ADD PRIMARY KEY ("id");""" % {'table':self._table}) # not_a_user_entry |
2334 | |
2335 | |
2336 | class update(osv.osv): |
2337 | @@ -203,10 +203,9 @@ |
2338 | [(puller_ids_rel._table, self._table)]); |
2339 | existing_tables = [row[0] for row in cr.fetchall()] |
2340 | if puller_ids_rel._table in existing_tables: |
2341 | - cr.execute("""DELETE FROM %s WHERE update_id IN (SELECT id FROM %s WHERE rule_id IS NULL)""" \ |
2342 | - % (puller_ids_rel._table, self._table)) |
2343 | + cr.execute("""DELETE FROM %s WHERE update_id IN (SELECT id FROM %s WHERE rule_id IS NULL)""" % (puller_ids_rel._table, self._table)) # not_a_user_entry |
2344 | if self._table in existing_tables: |
2345 | - cr.execute("""DELETE FROM %s WHERE rule_id IS NULL""" % self._table) |
2346 | + cr.execute("""DELETE FROM %s WHERE rule_id IS NULL""" % self._table) # not_a_user_entry |
2347 | super(update, self)._auto_init(cr, context=context) |
2348 | cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = 'sync_server_update_sequence_id_index'") |
2349 | if not cr.fetchone(): |
2350 | @@ -509,7 +508,7 @@ |
2351 | |
2352 | if not recover and init_sync: |
2353 | # first sync get only master data |
2354 | - cr.execute("select id from sync_server_sync_rule where id in (" + ','.join(map(str, rules)) + ") and master_data='t'"); |
2355 | + cr.execute("select id from sync_server_sync_rule where id in (" + ','.join(map(str, rules)) + ") and master_data='t'") # not_a_user_entry |
2356 | rules = [x[0] for x in cr.fetchall()] |
2357 | |
2358 | base_query = " ".join(("""SELECT "sync_server_update".id FROM "sync_server_update" INNER JOIN sync_server_sync_rule ON sync_server_sync_rule.id = rule_id WHERE""", |
2359 | @@ -547,8 +546,7 @@ |
2360 | self._logger.info("::::::::[%s] Data pull get package:: init sync = %s, last_seq = %s, max_seq = %s, offset = %s, max_size = %s" % (entity.name, init_sync, last_seq, max_seq, '/'.join(map(str, offset)), max_size)) |
2361 | |
2362 | while not ids or packet_size < max_size: |
2363 | - query = base_query % (offset[0], offset[1], max_size) |
2364 | - cr.execute(query) |
2365 | + cr.execute(base_query, (offset[0], offset[1], max_size)) |
2366 | ids = map(lambda x:x[0], cr.fetchall()) |
2367 | if not ids: |
2368 | break |
2369 | |
2370 | === modified file 'bin/addons/sync_so/so_po_common.py' |
2371 | --- bin/addons/sync_so/so_po_common.py 2017-08-03 15:16:40 +0000 |
2372 | +++ bin/addons/sync_so/so_po_common.py 2017-10-06 09:59:17 +0000 |
2373 | @@ -157,7 +157,7 @@ |
2374 | seq_tools = self.pool.get('sequence.tools') |
2375 | |
2376 | # Make sure that even if the FO/PO has no line, then the default value is 1 |
2377 | - cr.execute("select max(line_number) from " + order_line_object + " where order_id = " + str(order_id)) |
2378 | + cr.execute("select max(line_number) from %s where order_id = %%s" % order_line_object, (str(order_id),)) # not_a_user_entry |
2379 | for x in cr.fetchall(): |
2380 | # For the FO without any line |
2381 | val = 1 |
2382 | |
2383 | === modified file 'bin/addons/sync_so/specific_xml_id.py' |
2384 | --- bin/addons/sync_so/specific_xml_id.py 2017-07-31 09:56:56 +0000 |
2385 | +++ bin/addons/sync_so/specific_xml_id.py 2017-10-06 09:59:17 +0000 |
2386 | @@ -241,17 +241,17 @@ |
2387 | if isinstance(ids, (long, int)): |
2388 | ids = [ids] |
2389 | res = dict.fromkeys(ids, False) |
2390 | - for id in ids: |
2391 | - cr.execute("select instance_id from account_target_costcenter where cost_center_id = %s" % (id)) |
2392 | + for account_id in ids: |
2393 | + cr.execute("select instance_id from account_target_costcenter where cost_center_id = %s", (account_id,)) |
2394 | instance_ids = [x[0] for x in cr.fetchall()] |
2395 | if len(instance_ids) > 0: |
2396 | res_temp = [] |
2397 | for instance_id in instance_ids: |
2398 | - cr.execute("select instance from msf_instance where id = %s and state = 'active'" % (instance_id)) |
2399 | + cr.execute("select instance from msf_instance where id = %s and state = 'active'", (instance_id,)) |
2400 | result = cr.fetchone() |
2401 | if result: |
2402 | res_temp.append(result[0]) |
2403 | - res[id] = res_temp |
2404 | + res[account_id] = res_temp |
2405 | return res |
2406 | |
2407 | # UFTP-2: Get the children of the given instance and create manually sync updates for them, only when it is Coordo |
2408 | |
2409 | === modified file 'bin/addons/tender_flow/tender_flow.py' |
2410 | --- bin/addons/tender_flow/tender_flow.py 2016-12-06 13:41:44 +0000 |
2411 | +++ bin/addons/tender_flow/tender_flow.py 2017-10-06 09:59:17 +0000 |
2412 | @@ -264,8 +264,7 @@ |
2413 | LEFT JOIN product_product p ON l.product_id = p.id |
2414 | LEFT JOIN product_template pt ON p.product_tmpl_id = pt.id |
2415 | LEFT JOIN tender t ON l.tender_id = t.id |
2416 | - WHERE (pt.type != 'service_recep' %s) AND t.id in %%s LIMIT 1''' % transport_cat, |
2417 | - (tuple(ids),)) |
2418 | + WHERE (pt.type != 'service_recep' %s) AND t.id in %%s LIMIT 1''' % transport_cat, (tuple(ids),)) # not_a_user_entry |
2419 | res = cr.fetchall() |
2420 | |
2421 | if res: |
2422 | |
2423 | === modified file 'bin/addons/update_client/version.py' |
2424 | --- bin/addons/update_client/version.py 2016-04-25 15:54:40 +0000 |
2425 | +++ bin/addons/update_client/version.py 2017-10-06 09:59:17 +0000 |
2426 | @@ -33,8 +33,7 @@ |
2427 | |
2428 | def _patch_needs_to_be_downloaded(self, cr, uid, ids, name, args, context=None): |
2429 | cr.execute("""\ |
2430 | - SELECT id, patch IS NULL FROM %s WHERE id IN %%s""" % \ |
2431 | - self._table, [tuple(ids)]) |
2432 | + SELECT id, patch IS NULL FROM %s WHERE id IN %%s""" % self._table, [tuple(ids)]) # not_a_user_entry |
2433 | return dict(cr.fetchall()) |
2434 | |
2435 | _columns = { |
2436 | |
2437 | === modified file 'bin/osv/expression.py' |
2438 | --- bin/osv/expression.py 2017-05-18 13:47:43 +0000 |
2439 | +++ bin/osv/expression.py 2017-10-06 09:59:17 +0000 |
2440 | @@ -48,20 +48,19 @@ |
2441 | res = [] |
2442 | if ids: |
2443 | if op in ['<','>','>=','<=']: |
2444 | - cr.execute('SELECT "%s"' \ |
2445 | - ' FROM "%s"' \ |
2446 | - ' WHERE "%s" %s %%s' % (s, f, w, op), (ids[0],)) |
2447 | + cr.execute("""SELECT "%s" |
2448 | + FROM "%s" |
2449 | + WHERE "%s" %s %%s""" % (s, f, w, op), (ids[0],)) # not_a_user_entry |
2450 | res.extend([r[0] for r in cr.fetchall()]) |
2451 | else: |
2452 | for i in range(0, len(ids), cr.IN_MAX): |
2453 | subids = ids[i:i+cr.IN_MAX] |
2454 | - cr.execute('SELECT "%s"' \ |
2455 | - ' FROM "%s"' \ |
2456 | - ' WHERE "%s" IN %%s' % (s, f, w),(tuple(subids),)) |
2457 | + cr.execute("""SELECT "%s" |
2458 | + FROM "%s" |
2459 | + WHERE "%s" IN %%s""" % (s, f, w),(tuple(subids),)) # not_a_user_entry |
2460 | res.extend([r[0] for r in cr.fetchall()]) |
2461 | else: |
2462 | - cr.execute('SELECT distinct("%s")' \ |
2463 | - ' FROM "%s" where "%s" is not null' % (s, f, s)), |
2464 | + cr.execute('SELECT distinct("%s") FROM "%s" where "%s" is not null' % (s, f, s)) # not_a_user_entry |
2465 | res.extend([r[0] for r in cr.fetchall()]) |
2466 | return res |
2467 | |
2468 | |
2469 | === modified file 'bin/osv/fields.py' |
2470 | --- bin/osv/fields.py 2017-05-22 13:59:51 +0000 |
2471 | +++ bin/osv/fields.py 2017-10-06 09:59:17 +0000 |
2472 | @@ -105,7 +105,7 @@ |
2473 | pass |
2474 | |
2475 | def set(self, cr, obj, id, name, value, user=None, context=None): |
2476 | - cr.execute('update '+obj._table+' set '+name+'='+self._symbol_set[0]+' where id=%s', (self._symbol_set[1](value), id)) |
2477 | + cr.execute('update '+obj._table+' set '+name+'='+self._symbol_set[0]+' where id=%s', (self._symbol_set[1](value), id)) # not_a_user_entry |
2478 | |
2479 | def set_memory(self, cr, obj, id, name, value, user=None, context=None): |
2480 | raise Exception(_('Not implemented set_memory method !')) |
2481 | @@ -334,9 +334,9 @@ |
2482 | self._table = obj_src.pool.get(self._obj)._table |
2483 | if act[0] == 0: |
2484 | id_new = obj.create(cr, user, act[1]) |
2485 | - cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (id_new, id)) |
2486 | + cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (id_new, id)) # not_a_user_entry |
2487 | else: |
2488 | - cr.execute('select '+field+' from '+obj_src._table+' where id=%s', (act[0],)) |
2489 | + cr.execute('select '+field+' from '+obj_src._table+' where id=%s', (act[0],)) # not_a_user_entry |
2490 | id = cr.fetchone()[0] |
2491 | obj.write(cr, user, [id], act[1], context=context) |
2492 | |
2493 | @@ -401,20 +401,20 @@ |
2494 | for act in values: |
2495 | if act[0] == 0: |
2496 | id_new = obj.create(cr, act[2]) |
2497 | - cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (id_new, id)) |
2498 | + cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (id_new, id)) # not_a_user_entry |
2499 | elif act[0] == 1: |
2500 | obj.write(cr, [act[1]], act[2], context=context) |
2501 | elif act[0] == 2: |
2502 | - cr.execute('delete from '+self._table+' where id=%s', (act[1],)) |
2503 | + cr.execute('delete from '+self._table+' where id=%s', (act[1],)) # not_a_user_entry |
2504 | elif act[0] == 3 or act[0] == 5: |
2505 | - cr.execute('update '+obj_src._table+' set '+field+'=null where id=%s', (id,)) |
2506 | + cr.execute('update '+obj_src._table+' set '+field+'=null where id=%s', (id,)) # not_a_user_entry |
2507 | elif act[0] == 4: |
2508 | - cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (act[1], id)) |
2509 | + cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (act[1], id)) # not_a_user_entry |
2510 | else: |
2511 | if values: |
2512 | - cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (values, id)) |
2513 | + cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (values, id)) # not_a_user_entry |
2514 | else: |
2515 | - cr.execute('update '+obj_src._table+' set '+field+'=null where id=%s', (id,)) |
2516 | + cr.execute('update '+obj_src._table+' set '+field+'=null where id=%s', (id,)) # not_a_user_entry |
2517 | |
2518 | def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None): |
2519 | return obj.pool.get(self._obj).search(cr, uid, args+self._domain+[('name', 'like', value)], offset, limit, context=context) |
2520 | @@ -524,15 +524,15 @@ |
2521 | elif act[0] == 2: |
2522 | obj.unlink(cr, user, [act[1]], context=context) |
2523 | elif act[0] == 3: |
2524 | - cr.execute('update '+_table+' set '+self._fields_id+'=null where id=%s', (act[1],)) |
2525 | + cr.execute('update '+_table+' set '+self._fields_id+'=null where id=%s', (act[1],)) # not_a_user_entry |
2526 | elif act[0] == 4: |
2527 | - cr.execute('update '+_table+' set '+self._fields_id+'=%s where id=%s', (id, act[1])) |
2528 | + cr.execute('update '+_table+' set '+self._fields_id+'=%s where id=%s', (id, act[1])) # not_a_user_entry |
2529 | elif act[0] == 5: |
2530 | - cr.execute('update '+_table+' set '+self._fields_id+'=null where '+self._fields_id+'=%s', (id,)) |
2531 | + cr.execute('update '+_table+' set '+self._fields_id+'=null where '+self._fields_id+'=%s', (id,)) # not_a_user_entry |
2532 | elif act[0] == 6: |
2533 | obj.write(cr, user, act[2], {self._fields_id:id}, context=context or {}) |
2534 | ids2 = act[2] or [0] |
2535 | - cr.execute('select id from '+_table+' where '+self._fields_id+'=%s and id <> ALL (%s)', (id,ids2)) |
2536 | + cr.execute('select id from '+_table+' where '+self._fields_id+'=%s and id <> ALL (%s)', (id,ids2)) # not_a_user_entry |
2537 | ids3 = map(lambda x:x[0], cr.fetchall()) |
2538 | obj.write(cr, user, ids3, {self._fields_id:False}, context=context or {}) |
2539 | return result |
2540 | @@ -601,24 +601,25 @@ |
2541 | if self._limit is not None: |
2542 | limit_str = ' LIMIT %d' % self._limit |
2543 | |
2544 | - query = 'SELECT %(rel)s.%(id2)s, %(rel)s.%(id1)s \ |
2545 | - FROM %(rel)s, %(from_c)s \ |
2546 | - WHERE %(rel)s.%(id1)s IN %%s \ |
2547 | - AND %(rel)s.%(id2)s = %(tbl)s.id \ |
2548 | - %(where_c)s \ |
2549 | - %(order_by)s \ |
2550 | - %(limit)s \ |
2551 | - OFFSET %(offset)d' \ |
2552 | - % {'rel': self._rel, |
2553 | - 'from_c': from_c, |
2554 | - 'tbl': obj._table, |
2555 | - 'id1': self._id1, |
2556 | - 'id2': self._id2, |
2557 | - 'where_c': where_c, |
2558 | - 'limit': limit_str, |
2559 | - 'order_by': order_by, |
2560 | - 'offset': offset, |
2561 | - } |
2562 | + query = """ |
2563 | + SELECT %(rel)s.%(id2)s, %(rel)s.%(id1)s |
2564 | + FROM %(rel)s, %(from_c)s |
2565 | + WHERE %(rel)s.%(id1)s IN %%s |
2566 | + AND %(rel)s.%(id2)s = %(tbl)s.id |
2567 | + %(where_c)s |
2568 | + %(order_by)s |
2569 | + %(limit)s |
2570 | + OFFSET %(offset)d""" % { # not_a_user_entry |
2571 | + 'rel': self._rel, |
2572 | + 'from_c': from_c, |
2573 | + 'tbl': obj._table, |
2574 | + 'id1': self._id1, |
2575 | + 'id2': self._id2, |
2576 | + 'where_c': where_c, |
2577 | + 'limit': limit_str, |
2578 | + 'order_by': order_by, |
2579 | + 'offset': offset, |
2580 | + } |
2581 | cr.execute(query, [tuple(ids),] + where_params) |
2582 | for r in cr.fetchall(): |
2583 | res[r[1]].append(r[0]) |
2584 | @@ -635,20 +636,20 @@ |
2585 | continue |
2586 | if act[0] == 0: |
2587 | idnew = obj.create(cr, user, act[2]) |
2588 | - cr.execute('insert into '+self._rel+' ('+self._id1+','+self._id2+') values (%s,%s)', (id, idnew)) |
2589 | + cr.execute('insert into '+self._rel+' ('+self._id1+','+self._id2+') values (%s,%s)', (id, idnew)) # not_a_user_entry |
2590 | elif act[0] == 1: |
2591 | obj.write(cr, user, [act[1]], act[2], context=context) |
2592 | elif act[0] == 2: |
2593 | obj.unlink(cr, user, [act[1]], context=context) |
2594 | elif act[0] == 3: |
2595 | - cr.execute('delete from '+self._rel+' where ' + self._id1 + '=%s and '+ self._id2 + '=%s', (id, act[1])) |
2596 | + cr.execute('delete from '+self._rel+' where ' + self._id1 + '=%s and '+ self._id2 + '=%s', (id, act[1])) # not_a_user_entry |
2597 | elif act[0] == 4: |
2598 | # following queries are in the same transaction - so should be relatively safe |
2599 | - cr.execute('SELECT 1 FROM '+self._rel+' WHERE '+self._id1+' = %s and '+self._id2+' = %s', (id, act[1])) |
2600 | + cr.execute('SELECT 1 FROM '+self._rel+' WHERE '+self._id1+' = %s and '+self._id2+' = %s', (id, act[1])) # not_a_user_entry |
2601 | if not cr.fetchone(): |
2602 | - cr.execute('insert into '+self._rel+' ('+self._id1+','+self._id2+') values (%s,%s)', (id, act[1])) |
2603 | + cr.execute('insert into '+self._rel+' ('+self._id1+','+self._id2+') values (%s,%s)', (id, act[1])) # not_a_user_entry |
2604 | elif act[0] == 5: |
2605 | - cr.execute('update '+self._rel+' set '+self._id2+'=null where '+self._id2+'=%s', (id,)) |
2606 | + cr.execute('update '+self._rel+' set '+self._id2+'=null where '+self._id2+'=%s', (id,)) # not_a_user_entry |
2607 | elif act[0] == 6: |
2608 | |
2609 | d1, d2,tables = obj.pool.get('ir.rule').domain_get(cr, user, obj._name, context=context) |
2610 | @@ -656,10 +657,10 @@ |
2611 | d1 = ' and ' + ' and '.join(d1) |
2612 | else: |
2613 | d1 = '' |
2614 | - cr.execute('delete from '+self._rel+' where '+self._id1+'=%s AND '+self._id2+' IN (SELECT '+self._rel+'.'+self._id2+' FROM '+self._rel+', '+','.join(tables)+' WHERE '+self._rel+'.'+self._id1+'=%s AND '+self._rel+'.'+self._id2+' = '+obj._table+'.id '+ d1 +')', [id, id]+d2) |
2615 | + cr.execute('delete from '+self._rel+' where '+self._id1+'=%s AND '+self._id2+' IN (SELECT '+self._rel+'.'+self._id2+' FROM '+self._rel+', '+','.join(tables)+' WHERE '+self._rel+'.'+self._id1+'=%s AND '+self._rel+'.'+self._id2+' = '+obj._table+'.id '+ d1 +')', [id, id]+d2) # not_a_user_entry |
2616 | |
2617 | for act_nbr in act[2]: |
2618 | - cr.execute('insert into '+self._rel+' ('+self._id1+','+self._id2+') values (%s, %s)', (id, act_nbr)) |
2619 | + cr.execute('insert into '+self._rel+' ('+self._id1+','+self._id2+') values (%s, %s)', (id, act_nbr)) # not_a_user_entry |
2620 | |
2621 | # |
2622 | # TODO: use a name_search |
2623 | |
2624 | === modified file 'bin/osv/orm.py' |
2625 | --- bin/osv/orm.py 2017-08-18 14:26:52 +0000 |
2626 | +++ bin/osv/orm.py 2017-10-06 09:59:17 +0000 |
2627 | @@ -1907,7 +1907,7 @@ |
2628 | cr.execute(""" |
2629 | SELECT reltuples::BIGINT AS approximate_row_count |
2630 | FROM pg_class WHERE relname = '%s' |
2631 | - """ % self._table) |
2632 | + """ % self._table) # not_a_user_entry |
2633 | approximative_result = cr.fetchall() |
2634 | approximative_result = approximative_result and approximative_result[0][0] or 0 |
2635 | # check if approximative is big |
2636 | @@ -2679,7 +2679,7 @@ |
2637 | offset_str = offset and ' offset %d' % offset or '' |
2638 | if len(groupby_list) < 2 and context.get('group_by_no_leaf'): |
2639 | group_count = '_' |
2640 | - cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params) |
2641 | + cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params) # not_a_user_entry |
2642 | alldata = {} |
2643 | groupby = group_by |
2644 | for r in cr.dictfetchall(): |
2645 | @@ -2759,7 +2759,7 @@ |
2646 | where = self._parent_name+' IS NULL' |
2647 | if self._parent_order: |
2648 | where += ' order by '+self._parent_order |
2649 | - cr.execute('SELECT id FROM '+self._table+' WHERE '+where) |
2650 | + cr.execute('SELECT id FROM '+self._table+' WHERE '+where) # not_a_user_entry |
2651 | pos2 = pos + 1 |
2652 | for id in cr.fetchall(): |
2653 | pos2 = browse_rec(id[0], pos2) |
2654 | @@ -2780,8 +2780,8 @@ |
2655 | logger = netsvc.Logger() |
2656 | logger.notifyChannel('data', netsvc.LOG_INFO, "storing computed values of fields.function '%s'" % (k,)) |
2657 | ss = self._columns[k]._symbol_set |
2658 | - update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0]) |
2659 | - cr.execute('SELECT id FROM '+self._table) |
2660 | + update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0]) # not_a_user_entry |
2661 | + cr.execute('SELECT id FROM '+self._table) # not_a_user_entry |
2662 | ids_lst = map(lambda x: x[0], cr.fetchall()) |
2663 | |
2664 | migrate = False |
2665 | @@ -2842,7 +2842,7 @@ |
2666 | self.__logger.debug("column %s is in the table %s but not in the corresponding object %s", |
2667 | column['attname'], self._table, self._name) |
2668 | if column['attnotnull']: |
2669 | - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname'])) |
2670 | + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname'])) # not_a_user_entry |
2671 | self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint", |
2672 | self._table, column['attname']) |
2673 | |
2674 | @@ -2878,8 +2878,8 @@ |
2675 | if getattr(self, '_auto', True): |
2676 | cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,)) |
2677 | if not cr.rowcount: |
2678 | - cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,)) |
2679 | - cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''"))) |
2680 | + cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,)) # not_a_user_entry |
2681 | + cr.execute("COMMENT ON TABLE \"%s\" IS '%s'" % (self._table, self._description.replace("'", "''"))) # not_a_user_entry |
2682 | create = True |
2683 | self.__schema.debug("Table '%s': created", self._table) |
2684 | |
2685 | @@ -2890,8 +2890,8 @@ |
2686 | WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid |
2687 | """, (self._table, 'parent_left')) |
2688 | if not cr.rowcount: |
2689 | - cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,)) |
2690 | - cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,)) |
2691 | + cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,)) # not_a_user_entry |
2692 | + cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,)) # not_a_user_entry |
2693 | if 'parent_left' not in self._columns: |
2694 | self.__logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)', |
2695 | self._table) |
2696 | @@ -2929,7 +2929,7 @@ |
2697 | WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid |
2698 | """, (self._table, k)) |
2699 | if not cr.rowcount: |
2700 | - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k])) |
2701 | + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, logs[k])) # not_a_user_entry |
2702 | cr.commit() |
2703 | self.__schema.debug("Table '%s': added column '%s' with definition=%s", |
2704 | self._table, k, logs[k]) |
2705 | @@ -2969,7 +2969,7 @@ |
2706 | cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id)) |
2707 | res = cr.fetchone()[0] |
2708 | if not res: |
2709 | - cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY (%s) REFERENCES "%s" ON DELETE SET NULL' % (self._obj, f._fields_id, f._table)) |
2710 | + cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY (%s) REFERENCES "%s" ON DELETE SET NULL' % (self._obj, f._fields_id, f._table)) # not_a_user_entry |
2711 | self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE SET NULL", |
2712 | self._obj, f._fields_id, f._table) |
2713 | elif isinstance(f, fields.many2many): |
2714 | @@ -2979,10 +2979,10 @@ |
2715 | raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,)) |
2716 | ref = self.pool.get(f._obj)._table |
2717 | # ref = f._obj.replace('.', '_') |
2718 | - cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, UNIQUE("%s","%s")) WITH OIDS' % (f._rel, f._id1, self._table, f._id2, ref, f._id1, f._id2)) |
2719 | - cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1)) |
2720 | - cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2)) |
2721 | - cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref)) |
2722 | + cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, UNIQUE("%s","%s")) WITH OIDS' % (f._rel, f._id1, self._table, f._id2, ref, f._id1, f._id2)) # not_a_user_entry |
2723 | + cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1)) # not_a_user_entry |
2724 | + cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2)) # not_a_user_entry |
2725 | + cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref)) # not_a_user_entry |
2726 | cr.commit() |
2727 | self.__schema.debug("Create table '%s': relation between '%s' and '%s'", |
2728 | f._rel, self._table, ref) |
2729 | @@ -2998,7 +2998,7 @@ |
2730 | "AND a.atttypid=t.oid", (self._table, f.oldname)) |
2731 | res_old = cr.dictfetchall() |
2732 | if res_old and len(res_old) == 1: |
2733 | - cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k)) |
2734 | + cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k)) # not_a_user_entry |
2735 | res = res_old |
2736 | res[0]['attname'] = k |
2737 | self.__schema.debug("Table '%s': renamed column '%s' to '%s'", |
2738 | @@ -3013,7 +3013,7 @@ |
2739 | not getattr(f, 'nodrop', False): |
2740 | self.__logger.info('column %s (%s) in table %s removed: converted to a function !\n', |
2741 | k, f.string, self._table) |
2742 | - cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k)) |
2743 | + cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k)) # not_a_user_entry |
2744 | cr.commit() |
2745 | self.__schema.debug("Table '%s': dropped column '%s' with cascade", |
2746 | self._table, k) |
2747 | @@ -3033,10 +3033,10 @@ |
2748 | ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]), |
2749 | ] |
2750 | if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size: |
2751 | - cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k)) |
2752 | - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size)) |
2753 | - cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size)) |
2754 | - cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,)) |
2755 | + cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k)) # not_a_user_entry |
2756 | + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size)) # not_a_user_entry |
2757 | + cr.execute('UPDATE "%s" SET "%s"=temp_change_size::VARCHAR(%d)' % (self._table, k, f.size)) # not_a_user_entry |
2758 | + cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,)) # not_a_user_entry |
2759 | cr.commit() |
2760 | self.__schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s", |
2761 | self._table, k, f_pg_size, f.size) |
2762 | @@ -3044,10 +3044,10 @@ |
2763 | if (f_pg_type==c[0]) and (f._type==c[1]): |
2764 | if f_pg_type != f_obj_type: |
2765 | ok = True |
2766 | - cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k)) |
2767 | - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2])) |
2768 | - cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k)) |
2769 | - cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,)) |
2770 | + cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k)) # not_a_user_entry |
2771 | + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2])) # not_a_user_entry |
2772 | + cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k)) # not_a_user_entry |
2773 | + cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,)) # not_a_user_entry |
2774 | cr.commit() |
2775 | self.__schema.debug("Table '%s': column '%s' changed type from %s to %s", |
2776 | self._table, k, c[0], c[1]) |
2777 | @@ -3066,10 +3066,10 @@ |
2778 | break |
2779 | i += 1 |
2780 | if f_pg_notnull: |
2781 | - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k)) |
2782 | - cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname)) |
2783 | - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1])) |
2784 | - cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''"))) |
2785 | + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k)) # not_a_user_entry |
2786 | + cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname)) # not_a_user_entry |
2787 | + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1])) # not_a_user_entry |
2788 | + cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''"))) # not_a_user_entry |
2789 | self.__schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !", |
2790 | self._table, k, f_pg_type, f._type, newname) |
2791 | to_migrate.append((newname, k)) |
2792 | @@ -3085,12 +3085,12 @@ |
2793 | |
2794 | if default is not None: |
2795 | ss = self._columns[k]._symbol_set |
2796 | - query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k) |
2797 | + query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k) # not_a_user_entry |
2798 | cr.execute(query, (ss[1](default),)) |
2799 | # add the NOT NULL constraint |
2800 | cr.commit() |
2801 | try: |
2802 | - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False) |
2803 | + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False) # not_a_user_entry |
2804 | cr.commit() |
2805 | self.__schema.debug("Table '%s': column '%s': added NOT NULL constraint", |
2806 | self._table, k) |
2807 | @@ -3101,7 +3101,7 @@ |
2808 | self.__schema.warn(msg, self._table, k, self._table, k) |
2809 | cr.commit() |
2810 | elif not f.required and f_pg_notnull == 1: |
2811 | - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k)) |
2812 | + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k)) # not_a_user_entry |
2813 | cr.commit() |
2814 | self.__schema.debug("Table '%s': column '%s': dropped NOT NULL constraint", |
2815 | self._table, k) |
2816 | @@ -3110,7 +3110,7 @@ |
2817 | cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table)) |
2818 | res2 = cr.dictfetchall() |
2819 | if not res2 and f.select: |
2820 | - cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k)) |
2821 | + cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k)) # not_a_user_entry |
2822 | cr.commit() |
2823 | if f._type == 'text': |
2824 | # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context) |
2825 | @@ -3120,7 +3120,7 @@ |
2826 | "Use a search view instead if you simply want to make the field searchable." |
2827 | self.__schema.warn(msg, self._table, k, f._type) |
2828 | if res2 and not f.select: |
2829 | - cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k)) |
2830 | + cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k)) # not_a_user_entry |
2831 | cr.commit() |
2832 | msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore" |
2833 | self.__schema.debug(msg, self._table, k, f._type) |
2834 | @@ -3146,8 +3146,8 @@ |
2835 | res2 = cr.dictfetchall() |
2836 | if res2: |
2837 | if res2[0]['confdeltype'] != POSTGRES_CONFDELTYPES.get(f.ondelete.upper(), 'a'): |
2838 | - cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"') |
2839 | - cr.execute('ALTER TABLE "' + self._table + '" ADD FOREIGN KEY ("' + k + '") REFERENCES "' + ref + '" ON DELETE ' + f.ondelete) |
2840 | + cr.execute('ALTER TABLE "' + self._table + '" DROP CONSTRAINT "' + res2[0]['conname'] + '"') # not_a_user_entry |
2841 | + cr.execute('ALTER TABLE "' + self._table + '" ADD FOREIGN KEY ("' + k + '") REFERENCES "' + ref + '" ON DELETE ' + f.ondelete) # not_a_user_entry |
2842 | cr.commit() |
2843 | self.__schema.debug("Table '%s': column '%s': XXX", |
2844 | self._table, k) |
2845 | @@ -3156,8 +3156,8 @@ |
2846 | else: |
2847 | if not isinstance(f, fields.function) or f.store: |
2848 | # add the missing field |
2849 | - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1])) |
2850 | - cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''"))) |
2851 | + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1])) # not_a_user_entry |
2852 | + cr.execute("COMMENT ON COLUMN %s.%s IS '%s'" % (self._table, k, f.string.replace("'", "''"))) # not_a_user_entry |
2853 | self.__schema.debug("Table '%s': added column '%s' with definition=%s", |
2854 | self._table, k, get_pg_type(f)[1]) |
2855 | |
2856 | @@ -3169,7 +3169,7 @@ |
2857 | default = self._defaults[k] |
2858 | |
2859 | ss = self._columns[k]._symbol_set |
2860 | - query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0]) |
2861 | + query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0]) # not_a_user_entry |
2862 | cr.execute(query, (ss[1](default),)) |
2863 | cr.commit() |
2864 | netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k)) |
2865 | @@ -3188,15 +3188,15 @@ |
2866 | # ref = f._obj.replace('.', '_') |
2867 | # ir_actions is inherited so foreign key doesn't work on it |
2868 | if ref != 'ir_actions': |
2869 | - cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete)) |
2870 | + cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete)) # not_a_user_entry |
2871 | self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", |
2872 | self._table, k, ref, f.ondelete) |
2873 | if f.select: |
2874 | - cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k)) |
2875 | + cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k)) # not_a_user_entry |
2876 | if f.required: |
2877 | try: |
2878 | cr.commit() |
2879 | - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False) |
2880 | + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False) # not_a_user_entry |
2881 | self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint", |
2882 | self._table, k) |
2883 | except Exception: |
2884 | @@ -3514,7 +3514,7 @@ |
2885 | fields_pre2 = map(convert_field, fields_pre) |
2886 | order_by = self._parent_order or self._order |
2887 | select_fields = ','.join(fields_pre2 + [self._table + '.id']) |
2888 | - query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, |
2889 | + query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, # ignore_sql_check |
2890 | ','.join(tables), self._table) |
2891 | if rule_clause: |
2892 | query = ''.join((query, ' AND ', ' OR '.join(rule_clause))) |
2893 | @@ -3679,7 +3679,7 @@ |
2894 | query = '''SELECT %s, __imd.module, __imd.name |
2895 | FROM %s LEFT JOIN ir_model_data __imd |
2896 | ON (__imd.model = %%s and __imd.res_id = %s.id) |
2897 | - WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table) |
2898 | + WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table) # not_a_user_entry |
2899 | cr.execute(query, (self._name, tuple(ids))) |
2900 | res = cr.dictfetchall() |
2901 | for r in res: |
2902 | @@ -3711,7 +3711,7 @@ |
2903 | ids_to_check.extend([id, update_date]) |
2904 | if not ids_to_check: |
2905 | continue |
2906 | - cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check)) |
2907 | + cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check)) # not_a_user_entry |
2908 | res = cr.fetchone() |
2909 | if res: |
2910 | # mention the first one only to keep the error message readable |
2911 | @@ -3784,8 +3784,7 @@ |
2912 | cr.execute('DELETE FROM ir_attachment WHERE res_id IN %s AND res_model = %s', |
2913 | (sub_ids, self._name)) |
2914 | |
2915 | - cr.execute('DELETE FROM ' + self._table + ' ' \ |
2916 | - 'WHERE id IN %s', (sub_ids,)) |
2917 | + cr.execute('DELETE FROM ' + self._table + ' WHERE id IN %s', (sub_ids,)) # not_a_user_entry |
2918 | |
2919 | # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file, |
2920 | # as these are not connected with real database foreign keys, and would be dangling references. |
2921 | @@ -3809,7 +3808,7 @@ |
2922 | for order, current_obj, store_ids, field_list in result_store: |
2923 | if current_obj != self._name: |
2924 | obj = self.pool.get(current_obj) |
2925 | - cr.execute('SELECT id from '+obj._table+' WHERE id IN %s', (tuple(store_ids),)) |
2926 | + cr.execute('SELECT id from '+obj._table+' WHERE id IN %s', (tuple(store_ids),)) # not_a_user_entry |
2927 | rids = map(lambda x: x[0], cr.fetchall()) |
2928 | if rids: |
2929 | obj._store_set_values(cr, uid, rids, field_list, context) |
2930 | @@ -3926,8 +3925,7 @@ |
2931 | (self._table, self._parent_name, self._parent_name) |
2932 | cr.execute(query, (tuple(ids), parent_val)) |
2933 | else: |
2934 | - query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL)" % \ |
2935 | - (self._table, self._parent_name) |
2936 | + query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL)" % (self._table, self._parent_name) # not_a_user_entry |
2937 | cr.execute(query, (tuple(ids),)) |
2938 | parents_changed = map(operator.itemgetter(0), cr.fetchall()) |
2939 | |
2940 | @@ -4051,7 +4049,7 @@ |
2941 | clause, params = '%s IS NULL' % (self._parent_name,), () |
2942 | |
2943 | for current_id in parents_changed: |
2944 | - cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (current_id,)) |
2945 | + cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (current_id,)) # not_a_user_entry |
2946 | pleft, pright = cr.fetchone() |
2947 | distance = pright - pleft + 1 |
2948 | |
2949 | @@ -4059,7 +4057,7 @@ |
2950 | # this can _not_ be fetched outside the loop, as it needs to be refreshed |
2951 | # after each update, in case several nodes are sequentially inserted one |
2952 | # next to the other (i.e computed incrementally) |
2953 | - cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, order), params) |
2954 | + cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, order), params) # not_a_user_entry |
2955 | parents = cr.fetchall() |
2956 | |
2957 | # Find Position of the element |
2958 | @@ -4180,7 +4178,7 @@ |
2959 | # Try-except added to filter the creation of those records whose fields are readonly. |
2960 | # Example : any dashboard which has all the fields readonly.(due to Views(database views)) |
2961 | try: |
2962 | - cr.execute("SELECT nextval('"+self._sequence+"')") |
2963 | + cr.execute("SELECT nextval('"+self._sequence+"')") # not_a_user_entry |
2964 | except: |
2965 | raise except_orm(_('UserError'), |
2966 | _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.')) |
2967 | @@ -4222,8 +4220,7 @@ |
2968 | for group in groups: |
2969 | module = group.split(".")[0] |
2970 | grp = group.split(".")[1] |
2971 | - cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \ |
2972 | - (grp, module, 'res.groups', user)) |
2973 | + cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % (grp, module, 'res.groups', user)) # not_a_user_entry |
2974 | readonly = cr.fetchall() |
2975 | if readonly[0][0] >= 1: |
2976 | edit = True |
2977 | @@ -4269,15 +4266,15 @@ |
2978 | break |
2979 | pleft_old = pleft |
2980 | if not pleft_old: |
2981 | - cr.execute('select parent_left from '+self._table+' where id=%s', (parent,)) |
2982 | + cr.execute('select parent_left from '+self._table+' where id=%s', (parent,)) # not_a_user_entry |
2983 | pleft_old = cr.fetchone()[0] |
2984 | pleft = pleft_old |
2985 | else: |
2986 | - cr.execute('select max(parent_right) from '+self._table) |
2987 | + cr.execute('select max(parent_right) from '+self._table) # not_a_user_entry |
2988 | pleft = cr.fetchone()[0] or 0 |
2989 | - cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,)) |
2990 | - cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,)) |
2991 | - cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new)) |
2992 | + cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,)) # not_a_user_entry |
2993 | + cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,)) # not_a_user_entry |
2994 | + cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new)) # not_a_user_entry |
2995 | |
2996 | # default element in context must be remove when call a one2many or many2many |
2997 | result = [] |
2998 | @@ -4364,7 +4361,7 @@ |
2999 | field_flag = False |
3000 | field_dict = {} |
3001 | if self._log_access: |
3002 | - cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),)) |
3003 | + cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),)) # not_a_user_entry |
3004 | res = cr.fetchall() |
3005 | for r in res: |
3006 | if r[1]: |
3007 | @@ -4893,7 +4890,7 @@ |
3008 | if type(ids) in (int, long): |
3009 | ids = [ids] |
3010 | query = 'SELECT count(1) FROM "%s"' % (self._table) |
3011 | - cr.execute(query + "WHERE ID IN %s", (tuple(ids),)) |
3012 | + cr.execute(query + "WHERE ID IN %s", (tuple(ids),)) # not_a_user_entry |
3013 | return cr.fetchone()[0] == len(ids) |
3014 | |
3015 | def check_recursion(self, cr, uid, ids, context=None, parent=None): |
3016 | @@ -4919,7 +4916,7 @@ |
3017 | if not parent: |
3018 | parent = self._parent_name |
3019 | ids_parent = ids[:] |
3020 | - query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table) |
3021 | + query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table) # not_a_user_entry |
3022 | while ids_parent: |
3023 | ids_parent2 = [] |
3024 | for i in range(0, len(ids), cr.IN_MAX): |
3025 | @@ -4999,7 +4996,7 @@ |
3026 | for column, table in cr.fetchall(): |
3027 | if table == self._table: |
3028 | continue |
3029 | - cr.execute("""SELECT count(id) FROM %s WHERE %s IN %%s""" % (table, column), (tuple(ids),)) |
3030 | + cr.execute("""SELECT count(id) FROM %s WHERE %s IN %%s""" % (table, column), (tuple(ids),)) # not_a_user_entry |
3031 | res = cr.fetchall() |
3032 | if res[0][0]: |
3033 | return True |
3034 | |
3035 | === modified file 'bin/service/web_services.py' |
3036 | --- bin/service/web_services.py 2017-05-30 15:47:59 +0000 |
3037 | +++ bin/service/web_services.py 2017-10-06 09:59:17 +0000 |
3038 | @@ -38,6 +38,7 @@ |
3039 | import locale |
3040 | import logging |
3041 | import datetime |
3042 | +import string |
3043 | import csv |
3044 | import re |
3045 | from osv import osv |
3046 | @@ -52,6 +53,14 @@ |
3047 | from passlib.hash import bcrypt |
3048 | from report import report_sxw |
3049 | |
3050 | +def _check_db_name(name): |
3051 | + '''Raise if the name is composed with unauthorized characters |
3052 | + ''' |
3053 | + if name and isinstance(name, basestring): |
3054 | + # allow char, number, _ and - |
3055 | + if not re.match('^[a-zA-Z][a-zA-Z0-9_-]+$', name): |
3056 | + raise _("You must avoid all accents, space or special characters.") |
3057 | + |
3058 | def export_csv(fields, result, result_file_path): |
3059 | try: |
3060 | with open(result_file_path, 'wb') as result_file: |
3061 | @@ -131,11 +140,13 @@ |
3062 | cr = db.cursor() |
3063 | try: |
3064 | cr.autocommit(True) # avoid transaction block |
3065 | - cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % name) |
3066 | + _check_db_name(name) |
3067 | + cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % name) # ignore_sql_check |
3068 | finally: |
3069 | cr.close(True) |
3070 | |
3071 | def exp_create(self, db_name, demo, lang, user_password='admin'): |
3072 | + _check_db_name(db_name) |
3073 | self.id_protect.acquire() |
3074 | self.id += 1 |
3075 | id = self.id |
3076 | @@ -232,6 +243,7 @@ |
3077 | raise Exception, e |
3078 | |
3079 | def exp_drop(self, db_name): |
3080 | + _check_db_name(db_name) |
3081 | sql_db.close_db(db_name) |
3082 | logger = netsvc.Logger() |
3083 | |
3084 | @@ -241,7 +253,7 @@ |
3085 | drop_db = False |
3086 | try: |
3087 | try: |
3088 | - cr.execute('DROP DATABASE "%s"' % db_name) |
3089 | + cr.execute('DROP DATABASE "%s"' % db_name) # ignore_sql_check |
3090 | drop_db = True |
3091 | except Exception, e: |
3092 | logger.notifyChannel("web-services", netsvc.LOG_ERROR, |
3093 | @@ -265,6 +277,7 @@ |
3094 | del os.environ['PGPASSWORD'] |
3095 | |
3096 | def exp_dump_file(self, db_name): |
3097 | + _check_db_name(db_name) |
3098 | # get a tempfilename |
3099 | f = NamedTemporaryFile(delete=False) |
3100 | f_name = f.name |
3101 | @@ -276,6 +289,7 @@ |
3102 | |
3103 | |
3104 | def exp_dump(self, db_name): |
3105 | + _check_db_name(db_name) |
3106 | logger = netsvc.Logger() |
3107 | data, res = tools.pg_dump(db_name) |
3108 | if res: |
3109 | @@ -285,6 +299,7 @@ |
3110 | return base64.encodestring(data) |
3111 | |
3112 | def exp_restore_file(self, db_name, filename): |
3113 | + _check_db_name(db_name) |
3114 | try: |
3115 | logger = netsvc.Logger() |
3116 | |
3117 | @@ -321,6 +336,7 @@ |
3118 | raise |
3119 | |
3120 | def exp_restore(self, db_name, data): |
3121 | + _check_db_name(db_name) |
3122 | logging.getLogger('web-services').info("Restore DB from memory") |
3123 | buf=base64.decodestring(data) |
3124 | tmpfile = NamedTemporaryFile('w+b', delete=False) |
3125 | @@ -330,6 +346,8 @@ |
3126 | return self.exp_restore_file(db_name, tmpfile.name) |
3127 | |
3128 | def exp_rename(self, old_name, new_name): |
3129 | + _check_db_name(old_name) |
3130 | + _check_db_name(new_name) |
3131 | sql_db.close_db(old_name) |
3132 | logger = netsvc.Logger() |
3133 | |
3134 | @@ -338,7 +356,7 @@ |
3135 | cr.autocommit(True) # avoid transaction block |
3136 | try: |
3137 | try: |
3138 | - cr.execute('ALTER DATABASE "%s" RENAME TO "%s"' % (old_name, new_name)) |
3139 | + cr.execute('ALTER DATABASE "%s" RENAME TO "%s"' % (old_name, new_name)) # ignore_sql_check |
3140 | except Exception, e: |
3141 | logger.notifyChannel("web-services", netsvc.LOG_ERROR, |
3142 | 'RENAME DB: %s -> %s failed:\n%s' % (old_name, new_name, e)) |
3143 | @@ -355,6 +373,7 @@ |
3144 | return True |
3145 | |
3146 | def exp_db_exist(self, db_name): |
3147 | + _check_db_name(db_name) |
3148 | ## Not True: in fact, check if connection to database is possible. The database may exists |
3149 | return bool(sql_db.db_connect(db_name)) |
3150 | |
3151 | @@ -390,6 +409,7 @@ |
3152 | """Return True if db_name is connected to a production SYNC_SERVER, |
3153 | False otherwise""" |
3154 | |
3155 | + _check_db_name(db_name) |
3156 | connection = sql_db.db_connect(db_name) |
3157 | # it the db connnected to a sync_server ? |
3158 | server_connecion_module = pooler.get_pool(db_name, upgrade_modules=False).get('sync.client.sync_server_connection') |
3159 | @@ -427,6 +447,7 @@ |
3160 | """ |
3161 | if not dbname: |
3162 | return release.version |
3163 | + _check_db_name(dbname) |
3164 | |
3165 | db = sql_db.db_connect(dbname) |
3166 | cr = db.cursor() |
3167 | @@ -860,6 +881,7 @@ |
3168 | |
3169 | def exp_export(self, db_name, uid, fields, domain, model, fields_name, |
3170 | group_by=None, export_format='csv', ids=None, context=None): |
3171 | + _check_db_name(db_name) |
3172 | res = {'result': None} |
3173 | db, pool = pooler.get_db_and_pool(db_name) |
3174 | cr = db.cursor() |
3175 | @@ -1001,12 +1023,14 @@ |
3176 | cr.close() |
3177 | return result_file_path |
3178 | |
3179 | - def exp_report(self, db, uid, object, ids, datas=None, context=None): |
3180 | + def exp_report(self, db_name, uid, object, ids, datas=None, context=None): |
3181 | if not datas: |
3182 | datas={} |
3183 | if not context: |
3184 | context={} |
3185 | |
3186 | + _check_db_name(db_name) |
3187 | + |
3188 | self.id_protect.acquire() |
3189 | self.id += 1 |
3190 | id = self.id |
3191 | @@ -1015,7 +1039,7 @@ |
3192 | self._reports[id] = {'uid': uid, 'result': False, 'state': False, 'exception': None} |
3193 | |
3194 | def go(id, uid, ids, datas, context): |
3195 | - cr = pooler.get_db(db).cursor() |
3196 | + cr = pooler.get_db(db_name).cursor() |
3197 | import traceback |
3198 | import sys |
3199 | try: |
3200 | |
3201 | === modified file 'bin/tools/sql.py' |
3202 | --- bin/tools/sql.py 2009-10-20 10:52:23 +0000 |
3203 | +++ bin/tools/sql.py 2017-10-06 09:59:17 +0000 |
3204 | @@ -22,7 +22,7 @@ |
3205 | def drop_view_if_exists(cr, viewname): |
3206 | cr.execute("select count(1) from pg_class where relkind=%s and relname=%s", ('v', viewname,)) |
3207 | if cr.fetchone()[0]: |
3208 | - cr.execute("DROP view %s" % (viewname,)) |
3209 | + cr.execute("DROP view %s" % (viewname,)) # not_a_user_entry |
3210 | cr.commit() |
3211 | |
3212 | # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3213 | |
3214 | === removed directory 'doc/migrate' |
3215 | === removed directory 'doc/migrate/3.3.0-3.4.0' |
3216 | === removed file 'doc/migrate/3.3.0-3.4.0/README' |
3217 | --- doc/migrate/3.3.0-3.4.0/README 2006-12-07 13:41:40 +0000 |
3218 | +++ doc/migrate/3.3.0-3.4.0/README 1970-01-01 00:00:00 +0000 |
3219 | @@ -1,35 +0,0 @@ |
3220 | -This document describes the steps to follow to migrate from a version 3.3.0 of Tiny ERP server to a version 3.4.0 |
3221 | - |
3222 | -Warning: the migration scripts involved in this migration are only meant for |
3223 | -a standard Tiny ERP installation. It might not work or even break some data |
3224 | -if you added or modified some code to the default Tiny ERP distribution. |
3225 | - |
3226 | -To migrate a 3.3.0 server to version 3.4.0 you should: |
3227 | - |
3228 | -- stop Tiny ERP server 3.3.0 |
3229 | - |
3230 | -- backup your database |
3231 | - For example: pg_dump terp330 > backup330.sql |
3232 | - |
3233 | -- run the pre.py script (located in this directory) |
3234 | - You might need to pass it some optional arguments so that it can connect |
3235 | - to the database. |
3236 | - |
3237 | - For example: python pre.py -d terp330 |
3238 | - |
3239 | -- run TinyERP server 3.4.0 with "-u all" in the parameters |
3240 | - For example: ./tinyerp-server.py -d terp330 -u all |
3241 | - |
3242 | -- stop TinyERP server 3.4.0 |
3243 | - |
3244 | -- run the post.py script (located in this directory) |
3245 | - |
3246 | - You might need to pass it some optional arguments so that it can connect |
3247 | - to the database. |
3248 | - |
3249 | - For example: python post.py -d terp330 |
3250 | - |
3251 | -- run TinyERP server 3.4.0 again with "-u all" in the parameters |
3252 | - For example: ./tinyerp-server.py -d terp330 -u all |
3253 | - |
3254 | -- you are ready to work with the new version. |
3255 | |
3256 | === removed file 'doc/migrate/3.3.0-3.4.0/post.py' |
3257 | --- doc/migrate/3.3.0-3.4.0/post.py 2010-06-21 09:05:43 +0000 |
3258 | +++ doc/migrate/3.3.0-3.4.0/post.py 1970-01-01 00:00:00 +0000 |
3259 | @@ -1,146 +0,0 @@ |
3260 | -# -*- coding: utf-8 -*- |
3261 | -############################################################################## |
3262 | -# |
3263 | -# OpenERP, Open Source Management Solution |
3264 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
3265 | -# |
3266 | -# This program is free software: you can redistribute it and/or modify |
3267 | -# it under the terms of the GNU Affero General Public License as |
3268 | -# published by the Free Software Foundation, either version 3 of the |
3269 | -# License, or (at your option) any later version. |
3270 | -# |
3271 | -# This program is distributed in the hope that it will be useful, |
3272 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
3273 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
3274 | -# GNU Affero General Public License for more details. |
3275 | -# |
3276 | -# You should have received a copy of the GNU Affero General Public License |
3277 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
3278 | -# |
3279 | -############################################################################## |
3280 | - |
3281 | -__author__ = 'Gaetan de Menten, <ged@tiny.be>' |
3282 | -__version__ = '0.1.0' |
3283 | - |
3284 | -import psycopg |
3285 | -import optparse |
3286 | -import ConfigParser |
3287 | - |
3288 | -# ----- |
3289 | - |
3290 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
3291 | - |
3292 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
3293 | - |
3294 | -group = optparse.OptionGroup(parser, "Database related options") |
3295 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
3296 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
3297 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
3298 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
3299 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
3300 | -parser.add_option_group(group) |
3301 | - |
3302 | -options = optparse.Values() |
3303 | -options.db_name = 'terp' # default value |
3304 | -parser.parse_args(values=options) |
3305 | - |
3306 | -if hasattr(options, 'config'): |
3307 | - configparser = ConfigParser.ConfigParser() |
3308 | - configparser.read([options.config]) |
3309 | - for name, value in configparser.items('options'): |
3310 | - if not (hasattr(options, name) and getattr(options, name)): |
3311 | - if value in ('true', 'True'): |
3312 | - value = True |
3313 | - if value in ('false', 'False'): |
3314 | - value = False |
3315 | - setattr(options, name, value) |
3316 | - |
3317 | -# ----- |
3318 | - |
3319 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
3320 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
3321 | -name = "dbname=%s" % options.db_name |
3322 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
3323 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
3324 | - |
3325 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
3326 | -cr = db.cursor() |
3327 | - |
3328 | -# ------------------------------------------- # |
3329 | -# convert partner payment terms to properties # |
3330 | -# ------------------------------------------- # |
3331 | - |
3332 | -# setup |
3333 | - |
3334 | -cr.execute("select id from ir_model_fields where name='property_payment_term' and model='res.partner'") |
3335 | -fields_id = cr.fetchone()[0] |
3336 | - |
3337 | -cr.execute("select company_id from res_users where company_id is not null limit 1") |
3338 | -company_id = cr.fetchone()[0] |
3339 | - |
3340 | -# get partners |
3341 | -cr.execute("SELECT c.relname FROM pg_class c, pg_attribute a WHERE c.relname='res_partner' AND a.attname='payment_term' AND c.oid=a.attrelid") |
3342 | -partners=[] |
3343 | -drop_payment_term=False |
3344 | -if cr.rowcount: |
3345 | - drop_payment_term=True |
3346 | - cr.execute("select id, payment_term from res_partner where payment_term is not null") |
3347 | - partners = cr.dictfetchall() |
3348 | - |
3349 | -# loop over them |
3350 | - |
3351 | -for partner in partners: |
3352 | - value = 'account.payment.term,%d' % partner['payment_term'] |
3353 | - res_id = 'res.partner,%d' % partner['id'] |
3354 | - cr.execute( |
3355 | - "insert into ir_property(name, value, res_id, company_id, fields_id) "\ |
3356 | - "values(%s, %s, %s, %d, %d)", |
3357 | - ('property_payment_term', value, res_id, company_id, fields_id)) |
3358 | - |
3359 | -# remove the field |
3360 | -if drop_payment_term: |
3361 | - cr.execute("alter table res_partner drop column payment_term") |
3362 | -cr.execute("delete from ir_model_fields where model = 'res.partner' and name = 'payment_term'") |
3363 | - |
3364 | -cr.commit() |
3365 | - |
3366 | -# ------------------------ # |
3367 | -# remove duplicate reports # |
3368 | -# ------------------------ # |
3369 | - |
3370 | -cr.execute("select model, report_name from ir_act_report_xml group by model, report_name having count(*)>1") |
3371 | -reports_wh_duplicates = cr.dictfetchall() |
3372 | - |
3373 | -cr.execute("select res_id from ir_model_data where model='ir.actions.report.xml'") |
3374 | -registered_reports = cr.fetchall() |
3375 | -reg_reports_ids = ','.join([str(id) for (id,) in registered_reports]) |
3376 | - |
3377 | -for report in reports_wh_duplicates: |
3378 | - cr.execute("select id from ir_act_report_xml where model=%s and report_name=%s and id not in ("+reg_reports_ids+")", (report['model'], report['report_name'])) |
3379 | - (id,) = cr.fetchone() |
3380 | - cr.execute("delete from ir_act_report_xml where id=%d", (id,)) |
3381 | - cr.execute("delete from ir_values where value='ir.actions.report.xml,%d'", (id,)) |
3382 | - |
3383 | -cr.commit() |
3384 | - |
3385 | -# ------------------------------------- # |
3386 | -# remove duplicate workflow transitions # |
3387 | -# ------------------------------------- # |
3388 | - |
3389 | -# this removes all transitions which are not registered in ir_model_data |
3390 | - |
3391 | -cr.execute("delete from wkf_transition where id not in (select res_id from ir_model_data where model='workflow.transition')") |
3392 | -cr.commit() |
3393 | - |
3394 | -# -------------------------------- # |
3395 | -# remove bad "default" menu action # |
3396 | -# -------------------------------- # |
3397 | - |
3398 | -cr.execute("delete from ir_values where key='action' and model='ir.ui.menu' and res_id is null") |
3399 | -cr.commit() |
3400 | - |
3401 | -cr.close() |
3402 | - |
3403 | - |
3404 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3405 | - |
3406 | |
3407 | === removed file 'doc/migrate/3.3.0-3.4.0/pre.py' |
3408 | --- doc/migrate/3.3.0-3.4.0/pre.py 2009-10-20 10:52:23 +0000 |
3409 | +++ doc/migrate/3.3.0-3.4.0/pre.py 1970-01-01 00:00:00 +0000 |
3410 | @@ -1,112 +0,0 @@ |
3411 | -# -*- coding: utf-8 -*- |
3412 | -############################################################################## |
3413 | -# |
3414 | -# OpenERP, Open Source Management Solution |
3415 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
3416 | -# |
3417 | -# This program is free software: you can redistribute it and/or modify |
3418 | -# it under the terms of the GNU Affero General Public License as |
3419 | -# published by the Free Software Foundation, either version 3 of the |
3420 | -# License, or (at your option) any later version. |
3421 | -# |
3422 | -# This program is distributed in the hope that it will be useful, |
3423 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
3424 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
3425 | -# GNU Affero General Public License for more details. |
3426 | -# |
3427 | -# You should have received a copy of the GNU Affero General Public License |
3428 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
3429 | -# |
3430 | -############################################################################## |
3431 | - |
3432 | -__author__ = 'Gaetan de Menten, <ged@tiny.be>' |
3433 | -__version__ = '0.1.0' |
3434 | - |
3435 | -import psycopg |
3436 | -import optparse |
3437 | -import ConfigParser |
3438 | - |
3439 | -# ----- |
3440 | - |
3441 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
3442 | - |
3443 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
3444 | - |
3445 | -group = optparse.OptionGroup(parser, "Database related options") |
3446 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
3447 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
3448 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
3449 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
3450 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
3451 | -parser.add_option_group(group) |
3452 | - |
3453 | -options = optparse.Values() |
3454 | -options.db_name = 'terp' # default value |
3455 | -parser.parse_args(values=options) |
3456 | - |
3457 | -if hasattr(options, 'config'): |
3458 | - configparser = ConfigParser.ConfigParser() |
3459 | - configparser.read([options.config]) |
3460 | - for name, value in configparser.items('options'): |
3461 | - if not (hasattr(options, name) and getattr(options, name)): |
3462 | - if value in ('true', 'True'): |
3463 | - value = True |
3464 | - if value in ('false', 'False'): |
3465 | - value = False |
3466 | - setattr(options, name, value) |
3467 | - |
3468 | -# ----- |
3469 | - |
3470 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
3471 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
3472 | -name = "dbname=%s" % options.db_name |
3473 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
3474 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
3475 | - |
3476 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
3477 | -cr = db.cursor() |
3478 | - |
3479 | -# ------------------------- # |
3480 | -# change some columns types # |
3481 | -# ------------------------- # |
3482 | - |
3483 | -def change_column(cr, table, column, new_type, copy): |
3484 | - commands = [ |
3485 | - "ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column), |
3486 | - "ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type), |
3487 | - "ALTER TABLE %s DROP COLUMN temp_column" % table |
3488 | - ] |
3489 | - if copy: |
3490 | - commands.insert( |
3491 | - 2, |
3492 | - "UPDATE %s SET %s=temp_column::%s" % (table, column, new_type)) |
3493 | - |
3494 | - for command in commands: |
3495 | - cr.execute(command) |
3496 | - |
3497 | -change_column(cr, 'account_account_type', 'code_from', 'varchar(10)', False) |
3498 | -change_column(cr, 'account_account_type', 'code_to', 'varchar(10)', False) |
3499 | -cr.commit() |
3500 | - |
3501 | -# ----------------------------------------------------- # |
3502 | -# add some fields (which cannot be added automatically) # |
3503 | -# ----------------------------------------------------- # |
3504 | - |
3505 | -for line in ( |
3506 | - "alter table ir_model_fields add group_name varchar(64)", |
3507 | - "alter table ir_model_fields add view_load boolean", |
3508 | - "alter table ir_model_fields alter group_name set default ''", |
3509 | - "alter table ir_model_fields alter view_load set default False", |
3510 | - "delete from ir_values where value like '%,False'", |
3511 | - ): |
3512 | - try: |
3513 | - cr.execute(line) |
3514 | - except psycopg.ProgrammingError, e: |
3515 | - cr.commit() |
3516 | - print e |
3517 | - |
3518 | -cr.commit() |
3519 | -cr.close() |
3520 | - |
3521 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3522 | - |
3523 | |
3524 | === removed directory 'doc/migrate/3.4.0-4.0.0' |
3525 | === removed file 'doc/migrate/3.4.0-4.0.0/README' |
3526 | --- doc/migrate/3.4.0-4.0.0/README 2006-12-28 11:58:44 +0000 |
3527 | +++ doc/migrate/3.4.0-4.0.0/README 1970-01-01 00:00:00 +0000 |
3528 | @@ -1,29 +0,0 @@ |
3529 | -This document describes the steps to follow to migrate from a version 3.4.0 of Tiny ERP server to a version 4.0.0 |
3530 | - |
3531 | -Warning: the migration scripts involved in this migration are only meant for |
3532 | -a standard Tiny ERP installation. It might not work or even break some data |
3533 | -if you added or modified some code to the default Tiny ERP distribution. |
3534 | - |
3535 | -To migrate a 3.4.0 server to version 4.0.0 you should: |
3536 | - |
3537 | -- stop Tiny ERP server 3.4.0 |
3538 | - |
3539 | -- backup your database |
3540 | - For example: pg_dump terp340 > backup340.sql |
3541 | - |
3542 | -- run the pre.py script (located in this directory) |
3543 | - You might need to pass it some optional arguments so that it can connect |
3544 | - to the database. |
3545 | - |
3546 | - For example: python pre.py -d terp340 |
3547 | - |
3548 | -- run TinyERP server 4.0.0 with "-d terp340 -u all" in the parameters |
3549 | - For example: ./tinyerp-server.py -d terp340 -u all |
3550 | - |
3551 | -- stop TinyERP server 4.0.0 |
3552 | - |
3553 | -- run the post.py script (located in this directory) |
3554 | - You might need to pass it some optional arguments so that it can connect |
3555 | - to the database. |
3556 | - |
3557 | -- you are ready to work with the new version. |
3558 | |
3559 | === removed file 'doc/migrate/3.4.0-4.0.0/post-tiny.py' |
3560 | --- doc/migrate/3.4.0-4.0.0/post-tiny.py 2010-06-21 09:05:43 +0000 |
3561 | +++ doc/migrate/3.4.0-4.0.0/post-tiny.py 1970-01-01 00:00:00 +0000 |
3562 | @@ -1,87 +0,0 @@ |
3563 | -# -*- coding: utf-8 -*- |
3564 | -############################################################################## |
3565 | -# |
3566 | -# OpenERP, Open Source Management Solution |
3567 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
3568 | -# |
3569 | -# This program is free software: you can redistribute it and/or modify |
3570 | -# it under the terms of the GNU Affero General Public License as |
3571 | -# published by the Free Software Foundation, either version 3 of the |
3572 | -# License, or (at your option) any later version. |
3573 | -# |
3574 | -# This program is distributed in the hope that it will be useful, |
3575 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
3576 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
3577 | -# GNU Affero General Public License for more details. |
3578 | -# |
3579 | -# You should have received a copy of the GNU Affero General Public License |
3580 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
3581 | -# |
3582 | -############################################################################## |
3583 | - |
3584 | -__author__ = 'Gaetan de Menten, <ged@tiny.be>' |
3585 | -__version__ = '0.1.0' |
3586 | - |
3587 | -import psycopg |
3588 | -import optparse |
3589 | -import ConfigParser |
3590 | - |
3591 | -# ----- |
3592 | - |
3593 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
3594 | - |
3595 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
3596 | - |
3597 | -group = optparse.OptionGroup(parser, "Database related options") |
3598 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
3599 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
3600 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
3601 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
3602 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
3603 | -parser.add_option_group(group) |
3604 | - |
3605 | -options = optparse.Values() |
3606 | -options.db_name = 'terp' # default value |
3607 | -parser.parse_args(values=options) |
3608 | - |
3609 | -if hasattr(options, 'config'): |
3610 | - configparser = ConfigParser.ConfigParser() |
3611 | - configparser.read([options.config]) |
3612 | - for name, value in configparser.items('options'): |
3613 | - if not (hasattr(options, name) and getattr(options, name)): |
3614 | - if value in ('true', 'True'): |
3615 | - value = True |
3616 | - if value in ('false', 'False'): |
3617 | - value = False |
3618 | - setattr(options, name, value) |
3619 | - |
3620 | -# ----- |
3621 | - |
3622 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
3623 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
3624 | -name = "dbname=%s" % options.db_name |
3625 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
3626 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
3627 | - |
3628 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
3629 | -cr = db.cursor() |
3630 | - |
3631 | -# --------------- # |
3632 | -# remove old menu # |
3633 | -# --------------- # |
3634 | - |
3635 | -cr.execute("delete from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))") |
3636 | -cr.commit() |
3637 | - |
3638 | -# --------------- # |
3639 | -# remove ir_value # |
3640 | -# --------------- # |
3641 | - |
3642 | -cr.execute("delete from ir_values where model = 'ir.ui.menu' and res_id is null") |
3643 | -cr.commit() |
3644 | - |
3645 | -cr.close() |
3646 | - |
3647 | - |
3648 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3649 | - |
3650 | |
3651 | === removed file 'doc/migrate/3.4.0-4.0.0/post.py' |
3652 | --- doc/migrate/3.4.0-4.0.0/post.py 2010-06-21 09:05:43 +0000 |
3653 | +++ doc/migrate/3.4.0-4.0.0/post.py 1970-01-01 00:00:00 +0000 |
3654 | @@ -1,116 +0,0 @@ |
3655 | -# -*- coding: utf-8 -*- |
3656 | -############################################################################## |
3657 | -# |
3658 | -# OpenERP, Open Source Management Solution |
3659 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
3660 | -# |
3661 | -# This program is free software: you can redistribute it and/or modify |
3662 | -# it under the terms of the GNU Affero General Public License as |
3663 | -# published by the Free Software Foundation, either version 3 of the |
3664 | -# License, or (at your option) any later version. |
3665 | -# |
3666 | -# This program is distributed in the hope that it will be useful, |
3667 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
3668 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
3669 | -# GNU Affero General Public License for more details. |
3670 | -# |
3671 | -# You should have received a copy of the GNU Affero General Public License |
3672 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
3673 | -# |
3674 | -############################################################################## |
3675 | - |
3676 | -__author__ = 'Gaetan de Menten, <ged@tiny.be>' |
3677 | -__version__ = '0.1.0' |
3678 | - |
3679 | -import psycopg |
3680 | -import optparse |
3681 | -import ConfigParser |
3682 | - |
3683 | -# ----- |
3684 | - |
3685 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
3686 | - |
3687 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
3688 | - |
3689 | -group = optparse.OptionGroup(parser, "Database related options") |
3690 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
3691 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
3692 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
3693 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
3694 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
3695 | -parser.add_option_group(group) |
3696 | - |
3697 | -options = optparse.Values() |
3698 | -options.db_name = 'terp' # default value |
3699 | -parser.parse_args(values=options) |
3700 | - |
3701 | -if hasattr(options, 'config'): |
3702 | - configparser = ConfigParser.ConfigParser() |
3703 | - configparser.read([options.config]) |
3704 | - for name, value in configparser.items('options'): |
3705 | - if not (hasattr(options, name) and getattr(options, name)): |
3706 | - if value in ('true', 'True'): |
3707 | - value = True |
3708 | - if value in ('false', 'False'): |
3709 | - value = False |
3710 | - setattr(options, name, value) |
3711 | - |
3712 | -# ----- |
3713 | - |
3714 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
3715 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
3716 | -name = "dbname=%s" % options.db_name |
3717 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
3718 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
3719 | - |
3720 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
3721 | -cr = db.cursor() |
3722 | - |
3723 | -# ---------------------------------------------------------------- # |
3724 | -# move user id from hr_analytic_timesheet to account_analytic_line # |
3725 | -# ---------------------------------------------------------------- # |
3726 | - |
3727 | -cr.execute("UPDATE account_analytic_line SET user_id = hr_analytic_timesheet.user_id FROM hr_analytic_timesheet WHERE hr_analytic_timesheet.line_id = account_analytic_line.id") |
3728 | -cr.commit() |
3729 | - |
3730 | -# --------------- # |
3731 | -# remove old menu # |
3732 | -# --------------- # |
3733 | - |
3734 | -while True: |
3735 | - cr.execute("select id from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))") |
3736 | - if not cr.rowcount: |
3737 | - break |
3738 | - cr.execute("delete from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))") |
3739 | -cr.commit() |
3740 | - |
3741 | -# ----------------------------------------- # |
3742 | -# add default value for discount in invoice # |
3743 | -# ----------------------------------------- # |
3744 | - |
3745 | -cr.execute("update account_invoice_line set discount=0.0 where discount is null;") |
3746 | -cr.commit() |
3747 | - |
3748 | - |
3749 | -# -------------------------------------------------------------------------- # |
3750 | -# update constraint account_invoice_line_uos_id_fkey on account_invoice_line # |
3751 | -# -------------------------------------------------------------------------- # |
3752 | - |
3753 | -cr.execute("ALTER TABLE account_invoice_line DROP CONSTRAINT account_invoice_line_uos_id_fkey") |
3754 | -cr.execute("ALTER TABLE account_invoice_line ADD FOREIGN KEY (uos_id) REFERENCES product_uom(id) ON DELETE SET NULL") |
3755 | -cr.commit() |
3756 | - |
3757 | -print """ |
3758 | -WARNING: account_uos has been replaced by product_uom. |
3759 | -It is not possible to migrate the data automatically so you need to create the old account_uos in the new product_uom. |
3760 | -And then update the field uos_id of the table account_invoice to match the new id of product_uom. |
3761 | - |
3762 | -EXAMPLE: |
3763 | - UPDATE account_invoice SET uos_id = new_id WHERE uos_id = old_id; |
3764 | -""" |
3765 | - |
3766 | -cr.close() |
3767 | - |
3768 | - |
3769 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3770 | - |
3771 | |
3772 | === removed file 'doc/migrate/3.4.0-4.0.0/pre-tiny.py' |
3773 | --- doc/migrate/3.4.0-4.0.0/pre-tiny.py 2009-10-20 10:52:23 +0000 |
3774 | +++ doc/migrate/3.4.0-4.0.0/pre-tiny.py 1970-01-01 00:00:00 +0000 |
3775 | @@ -1,145 +0,0 @@ |
3776 | -# -*- coding: utf-8 -*- |
3777 | -############################################################################## |
3778 | -# |
3779 | -# OpenERP, Open Source Management Solution |
3780 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
3781 | -# |
3782 | -# This program is free software: you can redistribute it and/or modify |
3783 | -# it under the terms of the GNU Affero General Public License as |
3784 | -# published by the Free Software Foundation, either version 3 of the |
3785 | -# License, or (at your option) any later version. |
3786 | -# |
3787 | -# This program is distributed in the hope that it will be useful, |
3788 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
3789 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
3790 | -# GNU Affero General Public License for more details. |
3791 | -# |
3792 | -# You should have received a copy of the GNU Affero General Public License |
3793 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
3794 | -# |
3795 | -############################################################################## |
3796 | - |
3797 | -__author__ = 'Gaetan de Menten, <ged@tiny.be>' |
3798 | -__version__ = '0.1.0' |
3799 | - |
3800 | -import psycopg |
3801 | -import optparse |
3802 | -import ConfigParser |
3803 | - |
3804 | -# ----- |
3805 | - |
3806 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
3807 | - |
3808 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
3809 | - |
3810 | -group = optparse.OptionGroup(parser, "Database related options") |
3811 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
3812 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
3813 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
3814 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
3815 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
3816 | -parser.add_option_group(group) |
3817 | - |
3818 | -options = optparse.Values() |
3819 | -options.db_name = 'terp' # default value |
3820 | -parser.parse_args(values=options) |
3821 | - |
3822 | -if hasattr(options, 'config'): |
3823 | - configparser = ConfigParser.ConfigParser() |
3824 | - configparser.read([options.config]) |
3825 | - for name, value in configparser.items('options'): |
3826 | - if not (hasattr(options, name) and getattr(options, name)): |
3827 | - if value in ('true', 'True'): |
3828 | - value = True |
3829 | - if value in ('false', 'False'): |
3830 | - value = False |
3831 | - setattr(options, name, value) |
3832 | - |
3833 | -# ----- |
3834 | - |
3835 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
3836 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
3837 | -name = "dbname=%s" % options.db_name |
3838 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
3839 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
3840 | - |
3841 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
3842 | -cr = db.cursor() |
3843 | - |
3844 | -# ------------------------- # |
3845 | -# change some columns types # |
3846 | -# ------------------------- # |
3847 | - |
3848 | -def change_column(cr, table, column, new_type, copy): |
3849 | - commands = [ |
3850 | - "ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column), |
3851 | - "ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type), |
3852 | - "ALTER TABLE %s DROP COLUMN temp_column" % table |
3853 | - ] |
3854 | - if copy: |
3855 | - commands.insert( |
3856 | - 2, |
3857 | - "UPDATE %s SET %s=temp_column::%s" % (table, column, new_type)) |
3858 | - |
3859 | - for command in commands: |
3860 | - cr.execute(command) |
3861 | - |
3862 | -#change_column(cr, 'crm_case', 'date_closed', 'timestamp', True) |
3863 | -cr.commit() |
3864 | - |
3865 | -# -------------------- # |
3866 | -# add module if needed # |
3867 | -# -------------------- # |
3868 | - |
3869 | -cr.execute("SELECT name FROM ir_module_module") |
3870 | -if not cr.rowcount: |
3871 | - for module in set(['base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project', |
3872 | -'account_followup', |
3873 | -'account', |
3874 | -'audittrail', |
3875 | -'base_partner_relation', |
3876 | -'crm', |
3877 | -'delivery', |
3878 | -'edi', |
3879 | -'hr_evaluation', |
3880 | -'hr_expense', |
3881 | -'hr', |
3882 | -'hr_timesheet_invoice', |
3883 | -'hr_timesheet_project', |
3884 | -'hr_timesheet', |
3885 | -'l10n_simple', |
3886 | -'marketing', |
3887 | -'mrp', |
3888 | -'network', |
3889 | -'product', |
3890 | -'project', |
3891 | -'purchase', |
3892 | -'report_crm', |
3893 | -'report_project', |
3894 | -'report_purchase', |
3895 | -'report_sale', |
3896 | -'sale_crm', |
3897 | -'sale', |
3898 | -'sandwich', |
3899 | -'scrum', |
3900 | -'stock']): |
3901 | - cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module) |
3902 | - cr.commit() |
3903 | - |
3904 | - |
3905 | -# ----------------------------------------------------- # |
3906 | -# add some fields (which cannot be added automatically) # |
3907 | -# ----------------------------------------------------- # |
3908 | - |
3909 | -for line in ( |
3910 | - "ALTER TABLE ir_module_module ADD demo BOOLEAN DEFAULT False", |
3911 | - "delete from ir_values where value like '%,False'", |
3912 | - """UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""", |
3913 | - ): |
3914 | - cr.execute(line) |
3915 | - |
3916 | -cr.commit() |
3917 | -cr.close() |
3918 | - |
3919 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
3920 | - |
3921 | |
3922 | === removed file 'doc/migrate/3.4.0-4.0.0/pre.py' |
3923 | --- doc/migrate/3.4.0-4.0.0/pre.py 2010-06-21 09:05:43 +0000 |
3924 | +++ doc/migrate/3.4.0-4.0.0/pre.py 1970-01-01 00:00:00 +0000 |
3925 | @@ -1,127 +0,0 @@ |
3926 | -# -*- coding: utf-8 -*- |
3927 | -############################################################################## |
3928 | -# |
3929 | -# OpenERP, Open Source Management Solution |
3930 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
3931 | -# |
3932 | -# This program is free software: you can redistribute it and/or modify |
3933 | -# it under the terms of the GNU Affero General Public License as |
3934 | -# published by the Free Software Foundation, either version 3 of the |
3935 | -# License, or (at your option) any later version. |
3936 | -# |
3937 | -# This program is distributed in the hope that it will be useful, |
3938 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
3939 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
3940 | -# GNU Affero General Public License for more details. |
3941 | -# |
3942 | -# You should have received a copy of the GNU Affero General Public License |
3943 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
3944 | -# |
3945 | -############################################################################## |
3946 | - |
3947 | -__author__ = 'Gaetan de Menten, <ged@tiny.be>' |
3948 | -__version__ = '0.1.0' |
3949 | - |
3950 | -import psycopg |
3951 | -import optparse |
3952 | -import ConfigParser |
3953 | - |
3954 | -# ----- |
3955 | - |
3956 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
3957 | - |
3958 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
3959 | - |
3960 | -group = optparse.OptionGroup(parser, "Database related options") |
3961 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
3962 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
3963 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
3964 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
3965 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
3966 | -parser.add_option_group(group) |
3967 | - |
3968 | -options = optparse.Values() |
3969 | -options.db_name = 'terp' # default value |
3970 | -parser.parse_args(values=options) |
3971 | - |
3972 | -if hasattr(options, 'config'): |
3973 | - configparser = ConfigParser.ConfigParser() |
3974 | - configparser.read([options.config]) |
3975 | - for name, value in configparser.items('options'): |
3976 | - if not (hasattr(options, name) and getattr(options, name)): |
3977 | - if value in ('true', 'True'): |
3978 | - value = True |
3979 | - if value in ('false', 'False'): |
3980 | - value = False |
3981 | - setattr(options, name, value) |
3982 | - |
3983 | -# ----- |
3984 | - |
3985 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
3986 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
3987 | -name = "dbname=%s" % options.db_name |
3988 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
3989 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
3990 | - |
3991 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
3992 | -cr = db.cursor() |
3993 | - |
3994 | -# ------------------------- # |
3995 | -# change some columns types # |
3996 | -# ------------------------- # |
3997 | - |
3998 | -def change_column(cr, table, column, new_type, copy): |
3999 | - commands = [ |
4000 | - "ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column), |
4001 | - "ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type), |
4002 | - "ALTER TABLE %s DROP COLUMN temp_column" % table |
4003 | - ] |
4004 | - if copy: |
4005 | - commands.insert( |
4006 | - 2, |
4007 | - "UPDATE %s SET %s=temp_column::%s" % (table, column, new_type)) |
4008 | - |
4009 | - for command in commands: |
4010 | - cr.execute(command) |
4011 | - |
4012 | -change_column(cr, 'crm_case', 'date_closed', 'timestamp', True) |
4013 | -cr.commit() |
4014 | - |
4015 | -# -------------------- # |
4016 | -# add module if needed # |
4017 | -# -------------------- # |
4018 | - |
4019 | -cr.execute("SELECT name FROM ir_module_module") |
4020 | -if not cr.rowcount: |
4021 | - for module in ('base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project'): |
4022 | - cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module) |
4023 | - cr.commit() |
4024 | - |
4025 | -# --------------- # |
4026 | -# remove old menu # |
4027 | -# --------------- # |
4028 | - |
4029 | -while True: |
4030 | - cr.execute("select id from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')") |
4031 | - if not cr.rowcount: |
4032 | - break |
4033 | - cr.execute("delete from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')") |
4034 | -cr.commit() |
4035 | - |
4036 | -# ----------------------------------------------------- # |
4037 | -# add some fields (which cannot be added automatically) # |
4038 | -# ----------------------------------------------------- # |
4039 | - |
4040 | -for line in ( |
4041 | - "ALTER TABLE ir_module_module ADD demo BOOLEAN", |
4042 | - "ALTER TABLE ir_module_module SET demo DEFAULT False", |
4043 | - "DELETE FROM ir_values WHERE VALUE LIKE '%,False'", |
4044 | - """UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""", |
4045 | - ): |
4046 | - cr.execute(line) |
4047 | - |
4048 | -cr.commit() |
4049 | -cr.close() |
4050 | - |
4051 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
4052 | - |
4053 | |
4054 | === removed directory 'doc/migrate/4.0.0-4.2.0' |
4055 | === removed file 'doc/migrate/4.0.0-4.2.0/pre.py' |
4056 | --- doc/migrate/4.0.0-4.2.0/pre.py 2009-10-20 10:52:23 +0000 |
4057 | +++ doc/migrate/4.0.0-4.2.0/pre.py 1970-01-01 00:00:00 +0000 |
4058 | @@ -1,247 +0,0 @@ |
4059 | -# -*- coding: utf-8 -*- |
4060 | -############################################################################## |
4061 | -# |
4062 | -# OpenERP, Open Source Management Solution |
4063 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
4064 | -# |
4065 | -# This program is free software: you can redistribute it and/or modify |
4066 | -# it under the terms of the GNU Affero General Public License as |
4067 | -# published by the Free Software Foundation, either version 3 of the |
4068 | -# License, or (at your option) any later version. |
4069 | -# |
4070 | -# This program is distributed in the hope that it will be useful, |
4071 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
4072 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
4073 | -# GNU Affero General Public License for more details. |
4074 | -# |
4075 | -# You should have received a copy of the GNU Affero General Public License |
4076 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
4077 | -# |
4078 | -############################################################################## |
4079 | - |
4080 | -__author__ = 'Cédric Krier, <ced@tinyerp.com>' |
4081 | -__version__ = '0.1.0' |
4082 | - |
4083 | -import psycopg |
4084 | -import optparse |
4085 | -import ConfigParser |
4086 | - |
4087 | -# ----- |
4088 | - |
4089 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
4090 | - |
4091 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
4092 | - |
4093 | -group = optparse.OptionGroup(parser, "Database related options") |
4094 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
4095 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
4096 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
4097 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
4098 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
4099 | -parser.add_option_group(group) |
4100 | - |
4101 | -options = optparse.Values() |
4102 | -options.db_name = 'terp' # default value |
4103 | -parser.parse_args(values=options) |
4104 | - |
4105 | -if hasattr(options, 'config'): |
4106 | - configparser = ConfigParser.ConfigParser() |
4107 | - configparser.read([options.config]) |
4108 | - for name, value in configparser.items('options'): |
4109 | - if not (hasattr(options, name) and getattr(options, name)): |
4110 | - if value in ('true', 'True'): |
4111 | - value = True |
4112 | - if value in ('false', 'False'): |
4113 | - value = False |
4114 | - setattr(options, name, value) |
4115 | - |
4116 | -# ----- |
4117 | - |
4118 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
4119 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
4120 | -name = "dbname=%s" % options.db_name |
4121 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
4122 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
4123 | - |
4124 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
4125 | -cr = db.cursor() |
4126 | - |
4127 | -# ------------------------ # |
4128 | -# change currency rounding # |
4129 | -# ------------------------ # |
4130 | - |
4131 | -cr.execute("""SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size FROM pg_class c,pg_attribute a,pg_type t WHERE c.relname='res_currency' AND a.attname='rounding' AND c.oid=a.attrelid AND a.atttypid=t.oid""") |
4132 | -res = cr.dictfetchall() |
4133 | -if res[0]['typname'] != 'numeric': |
4134 | - for line in ( |
4135 | - "ALTER TABLE res_currency RENAME rounding TO rounding_bak", |
4136 | - "ALTER TABLE res_currency ADD rounding NUMERIC(12,6)", |
4137 | - "UPDATE res_currency SET rounding = power(10, - rounding_bak)", |
4138 | - "ALTER TABLE res_currency DROP rounding_bak", |
4139 | - ): |
4140 | - cr.execute(line) |
4141 | -cr.commit() |
4142 | - |
4143 | -# ----------------------------- # |
4144 | -# drop constraint on ir_ui_view # |
4145 | -# ----------------------------- # |
4146 | - |
4147 | -cr.execute('SELECT conname FROM pg_constraint where conname = \'ir_ui_view_type\'') |
4148 | -if cr.fetchall(): |
4149 | - cr.execute('ALTER TABLE ir_ui_view DROP CONSTRAINT ir_ui_view_type') |
4150 | -cr.commit() |
4151 | - |
4152 | -# ------------------------ # |
4153 | -# update res.partner.bank # |
4154 | -# ------------------------ # |
4155 | - |
4156 | -cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'res_partner_bank\' AND a.attname = \'iban\' AND c.oid = a.attrelid') |
4157 | -if cr.fetchall(): |
4158 | - cr.execute('ALTER TABLE res_partner_bank RENAME iban TO acc_number') |
4159 | -cr.commit() |
4160 | - |
4161 | -# ------------------------------------------- # |
4162 | -# Add perm_id to ir_model and ir_model_fields # |
4163 | -# ------------------------------------------- # |
4164 | - |
4165 | -cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'ir_model\' AND a.attname = \'perm_id\' AND c.oid = a.attrelid') |
4166 | -if not cr.fetchall(): |
4167 | - cr.execute("ALTER TABLE ir_model ADD perm_id int references perm on delete set null") |
4168 | -cr.commit() |
4169 | - |
4170 | -cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'ir_model_fields\' AND a.attname = \'perm_id\' AND c.oid = a.attrelid') |
4171 | -if not cr.fetchall(): |
4172 | - cr.execute("ALTER TABLE ir_model_fields ADD perm_id int references perm on delete set null") |
4173 | -cr.commit() |
4174 | - |
4175 | - |
4176 | -# --------------------------------- # |
4177 | -# remove name for all ir_act_window # |
4178 | -# --------------------------------- # |
4179 | - |
4180 | -cr.execute("UPDATE ir_act_window SET name = ''") |
4181 | -cr.commit() |
4182 | - |
4183 | -# ------------------------------------------------------------------------ # |
4184 | -# Create a "allow none" default access to keep the behaviour of the system # |
4185 | -# ------------------------------------------------------------------------ # |
4186 | - |
4187 | -cr.execute('SELECT model_id FROM ir_model_access') |
4188 | -res= cr.fetchall() |
4189 | -for r in res: |
4190 | - cr.execute('SELECT id FROM ir_model_access WHERE model_id = %d AND group_id IS NULL', (r[0],)) |
4191 | - if not cr.fetchall(): |
4192 | - cr.execute("INSERT into ir_model_access (name,model_id,group_id) VALUES ('Auto-generated access by migration',%d,NULL)",(r[0],)) |
4193 | -cr.commit() |
4194 | - |
4195 | -# ------------------------------------------------- # |
4196 | -# Drop view report_account_analytic_line_to_invoice # |
4197 | -# ------------------------------------------------- # |
4198 | - |
4199 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_line_to_invoice\'') |
4200 | -if cr.fetchall(): |
4201 | - cr.execute('DROP VIEW report_account_analytic_line_to_invoice') |
4202 | -cr.commit() |
4203 | - |
4204 | -# --------------------------- # |
4205 | -# Drop state from hr_employee # |
4206 | -# --------------------------- # |
4207 | - |
4208 | -cr.execute('SELECT * FROM pg_class c, pg_attribute a WHERE c.relname=\'hr_employee\' AND a.attname=\'state\' AND c.oid=a.attrelid') |
4209 | -if cr.fetchall(): |
4210 | - cr.execute('ALTER TABLE hr_employee DROP state') |
4211 | -cr.commit() |
4212 | - |
4213 | -# ------------ # |
4214 | -# Add timezone # |
4215 | -# ------------ # |
4216 | - |
4217 | -cr.execute('SELECT id FROM ir_values where model=\'res.users\' AND key=\'meta\' AND name=\'tz\'') |
4218 | -if not cr.fetchall(): |
4219 | - import pytz, pickle |
4220 | - meta = pickle.dumps({'type':'selection', 'string':'Timezone', 'selection': [(x, x) for x in pytz.all_timezones]}) |
4221 | - value = pickle.dumps(False) |
4222 | - cr.execute('INSERT INTO ir_values (name, key, model, meta, key2, object, value) VALUES (\'tz\', \'meta\', \'res.users\', %s, \'tz\', %s, %s)', (meta,False, value)) |
4223 | -cr.commit() |
4224 | - |
4225 | -# ------------------------- # |
4226 | -# change product_uom factor # |
4227 | -# ------------------------- # |
4228 | - |
4229 | -cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a, pg_type t WHERE c.relname = \'product_uom\' AND a.attname = \'factor\' AND c.oid = a.attrelid AND a.atttypid = t.oid AND t.typname = \'float8\'') |
4230 | -if cr.fetchall(): |
4231 | - cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_account\'') |
4232 | - if cr.fetchall(): |
4233 | - cr.execute('DROP VIEW report_account_analytic_planning_stat_account') |
4234 | - cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'') |
4235 | - if cr.fetchall(): |
4236 | - cr.execute('DROP VIEW report_account_analytic_planning_stat') |
4237 | - cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_user\'') |
4238 | - if cr.fetchall(): |
4239 | - cr.execute('DROP VIEW report_account_analytic_planning_stat_user') |
4240 | - cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'') |
4241 | - if cr.fetchall(): |
4242 | - cr.execute('DROP VIEW report_purchase_order_product') |
4243 | - cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'') |
4244 | - if cr.fetchall(): |
4245 | - cr.execute('DROP VIEW report_purchase_order_category') |
4246 | - cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'') |
4247 | - if cr.fetchall(): |
4248 | - cr.execute('DROP VIEW report_sale_order_product') |
4249 | - cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'') |
4250 | - if cr.fetchall(): |
4251 | - cr.execute('DROP VIEW report_sale_order_category') |
4252 | - cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_hr_timesheet_invoice_journal\'') |
4253 | - if cr.fetchall(): |
4254 | - cr.execute('DROP VIEW report_hr_timesheet_invoice_journal') |
4255 | - |
4256 | - cr.execute('ALTER TABLE product_uom RENAME COLUMN factor to temp_column') |
4257 | - cr.execute('ALTER TABLE product_uom ADD COLUMN factor NUMERIC(12,6)') |
4258 | - cr.execute('UPDATE product_uom SET factor = temp_column') |
4259 | - cr.execute('ALTER TABLE product_uom ALTER factor SET NOT NULL') |
4260 | - cr.execute('ALTER TABLE product_uom DROP COLUMN temp_column') |
4261 | -cr.commit() |
4262 | - |
4263 | - |
4264 | -# ------------------------------------------------- # |
4265 | -# Drop name_uniq constraint on stock_production_lot # |
4266 | -# ------------------------------------------------- # |
4267 | - |
4268 | -cr.execute('SELECT conname FROM pg_constraint where conname = \'stock_production_lot_name_uniq\'') |
4269 | -if cr.fetchall(): |
4270 | - cr.execute('ALTER TABLE stock_production_lot DROP CONSTRAINT stock_production_lot_name_uniq') |
4271 | -cr.commit() |
4272 | - |
4273 | -# ------------------------------------ # |
4274 | -# Put country/state code in upper case # |
4275 | -# ------------------------------------ # |
4276 | - |
4277 | -cr.execute('UPDATE res_country SET code = UPPER(code)') |
4278 | -cr.execute('UPDATE res_country_state SET code = UPPER(code)') |
4279 | -cr.commit() |
4280 | - |
4281 | -# --------------------------------------------- # |
4282 | -# Add primary key on tables inherits ir_actions # |
4283 | -# --------------------------------------------- # |
4284 | - |
4285 | -cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_report_xml_pkey\' and tablename = \'ir_act_report_xml\'') |
4286 | -if not cr.fetchall(): |
4287 | - cr.execute('ALTER TABLE ir_act_report_xml ADD PRIMARY KEY (id)') |
4288 | -cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_report_custom_pkey\' and tablename = \'ir_act_report_custom\'') |
4289 | -if not cr.fetchall(): |
4290 | - cr.execute('ALTER TABLE ir_act_report_custom ADD PRIMARY KEY (id)') |
4291 | -cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_group_pkey\' and tablename = \'ir_act_group\'') |
4292 | -if not cr.fetchall(): |
4293 | - cr.execute('ALTER TABLE ir_act_group ADD PRIMARY KEY (id)') |
4294 | -cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_execute_pkey\' and tablename = \'ir_act_execute\'') |
4295 | -if not cr.fetchall(): |
4296 | - cr.execute('ALTER TABLE ir_act_execute ADD PRIMARY KEY (id)') |
4297 | -cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_wizard_pkey\' and tablename = \'ir_act_wizard\'') |
4298 | -if not cr.fetchall(): |
4299 | - cr.execute('ALTER TABLE ir_act_wizard ADD PRIMARY KEY (id)') |
4300 | -cr.commit() |
4301 | - |
4302 | -cr.close |
4303 | - |
4304 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
4305 | - |
4306 | |
4307 | === removed directory 'doc/migrate/4.0.0-4.2.0/tiny' |
4308 | === removed file 'doc/migrate/4.0.0-4.2.0/tiny/README' |
4309 | --- doc/migrate/4.0.0-4.2.0/tiny/README 2007-10-05 13:31:17 +0000 |
4310 | +++ doc/migrate/4.0.0-4.2.0/tiny/README 1970-01-01 00:00:00 +0000 |
4311 | @@ -1,1 +0,0 @@ |
4312 | -Those scripts are provide as example of customization of migration scripts |
4313 | |
4314 | === removed file 'doc/migrate/4.0.0-4.2.0/tiny/pre-tiny.py' |
4315 | --- doc/migrate/4.0.0-4.2.0/tiny/pre-tiny.py 2010-06-21 09:05:43 +0000 |
4316 | +++ doc/migrate/4.0.0-4.2.0/tiny/pre-tiny.py 1970-01-01 00:00:00 +0000 |
4317 | @@ -1,188 +0,0 @@ |
4318 | -# -*- coding: utf-8 -*- |
4319 | -############################################################################## |
4320 | -# |
4321 | -# OpenERP, Open Source Management Solution |
4322 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
4323 | -# |
4324 | -# This program is free software: you can redistribute it and/or modify |
4325 | -# it under the terms of the GNU Affero General Public License as |
4326 | -# published by the Free Software Foundation, either version 3 of the |
4327 | -# License, or (at your option) any later version. |
4328 | -# |
4329 | -# This program is distributed in the hope that it will be useful, |
4330 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
4331 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
4332 | -# GNU Affero General Public License for more details. |
4333 | -# |
4334 | -# You should have received a copy of the GNU Affero General Public License |
4335 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
4336 | -# |
4337 | -############################################################################## |
4338 | - |
4339 | -__author__ = 'Cédric Krier, <ced@tinyerp.com>' |
4340 | -__version__ = '0.1.0' |
4341 | - |
4342 | -import psycopg |
4343 | -import optparse |
4344 | -import ConfigParser |
4345 | - |
4346 | -# ----- |
4347 | - |
4348 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
4349 | - |
4350 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
4351 | - |
4352 | -group = optparse.OptionGroup(parser, "Database related options") |
4353 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
4354 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
4355 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
4356 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
4357 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
4358 | -parser.add_option_group(group) |
4359 | - |
4360 | -options = optparse.Values() |
4361 | -options.db_name = 'terp' # default value |
4362 | -parser.parse_args(values=options) |
4363 | - |
4364 | -if hasattr(options, 'config'): |
4365 | - configparser = ConfigParser.ConfigParser() |
4366 | - configparser.read([options.config]) |
4367 | - for name, value in configparser.items('options'): |
4368 | - if not (hasattr(options, name) and getattr(options, name)): |
4369 | - if value in ('true', 'True'): |
4370 | - value = True |
4371 | - if value in ('false', 'False'): |
4372 | - value = False |
4373 | - setattr(options, name, value) |
4374 | - |
4375 | -raise Exception('This script is provided as an example, you must custom it before') |
4376 | - |
4377 | -# ----- |
4378 | - |
4379 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
4380 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
4381 | -name = "dbname=%s" % options.db_name |
4382 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
4383 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
4384 | - |
4385 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
4386 | -cr = db.cursor() |
4387 | - |
4388 | -# fix country |
4389 | - |
4390 | - |
4391 | -cr.execute('SELECT code from res_country where code is not null group by code') |
4392 | -res = cr.fetchall() |
4393 | - |
4394 | -for c in res: |
4395 | - cr.execute('SELECT max(id) from res_country where code = %s group by code', (c[0],)) |
4396 | - res2 = cr.fetchone() |
4397 | - cr.execute('SELECT id from res_country where code = %s', (c[0],)) |
4398 | - ids = ','.join(map(lambda x: str(x[0]), cr.fetchall())) |
4399 | - cr.execute('UPDATE res_partner_address set country_id = %d where country_id in ('+ids+')', (res2[0],)) |
4400 | - cr.execute('DELETE FROM res_country WHERE code = %s and id <> %d', (c[0], res2[0],)) |
4401 | -cr.commit() |
4402 | - |
4403 | - |
4404 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'') |
4405 | -if cr.fetchall(): |
4406 | - cr.execute('DROP VIEW report_account_analytic_planning_stat') |
4407 | -cr.commit() |
4408 | - |
4409 | - |
4410 | -cr.execute('SELECT name from ( SELECT name, count(name) AS n FROM res_partner GROUP BY name ) AS foo WHERE n > 1') |
4411 | -res = cr.fetchall() |
4412 | - |
4413 | - |
4414 | -for p in res: |
4415 | - cr.execute('SELECT max(id) FROM res_partner WHERE name = %s GROUP BY name', (p[0],)) |
4416 | - res2 = cr.fetchone() |
4417 | - cr.execute('UPDATE res_partner set active = False WHERE name = %s and id <> %d', (p[0], res2[0],)) |
4418 | - cr.execute('SELECT id FROM res_partner WHERE name = %s AND id <> %d', (p[0], res2[0],)) |
4419 | - res3 = cr.fetchall() |
4420 | - i = 0 |
4421 | - for id in res3: |
4422 | - name = p[0]+' old' |
4423 | - if i: |
4424 | - name = name + ' ' + str(i) |
4425 | - cr.execute('UPDATE res_partner set name = %s WHERE id = %d', (name, id[0])) |
4426 | - i += 1 |
4427 | -cr.commit() |
4428 | - |
4429 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_line_to_invoice\'') |
4430 | -if cr.fetchall(): |
4431 | - cr.execute('DROP VIEW report_account_analytic_line_to_invoice') |
4432 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_timesheet_invoice\'') |
4433 | -if cr.fetchall(): |
4434 | - cr.execute('drop VIEW report_timesheet_invoice') |
4435 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'') |
4436 | -if cr.fetchall(): |
4437 | - cr.execute('drop VIEW report_purchase_order_category') |
4438 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'') |
4439 | -if cr.fetchall(): |
4440 | - cr.execute('drop VIEW report_purchase_order_product') |
4441 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'') |
4442 | -if cr.fetchall(): |
4443 | - cr.execute('drop VIEW report_sale_order_category') |
4444 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'') |
4445 | -if cr.fetchall(): |
4446 | - cr.execute('drop VIEW report_sale_order_product') |
4447 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_timesheet_user\'') |
4448 | -if cr.fetchall(): |
4449 | - cr.execute('drop VIEW report_timesheet_user') |
4450 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_task_user_pipeline_open\'') |
4451 | -if cr.fetchall(): |
4452 | - cr.execute('drop VIEW report_task_user_pipeline_open') |
4453 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'hr_timesheet_sheet_sheet_day\'') |
4454 | -if cr.fetchall(): |
4455 | - cr.execute('drop VIEW hr_timesheet_sheet_sheet_day') |
4456 | -cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'hr_timesheet_sheet_sheet_account\'') |
4457 | -if cr.fetchall(): |
4458 | - cr.execute('drop VIEW hr_timesheet_sheet_sheet_account') |
4459 | -cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_sale_stats\'') |
4460 | -if cr.fetchall(): |
4461 | - cr.execute('drop VIEW sale_journal_sale_stats') |
4462 | -cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_picking_stats\'') |
4463 | -if cr.fetchall(): |
4464 | - cr.execute('drop VIEW sale_journal_picking_stats') |
4465 | -cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_invoice_type_stats\'') |
4466 | -if cr.fetchall(): |
4467 | - cr.execute('drop VIEW sale_journal_invoice_type_stats') |
4468 | - |
4469 | -cr.execute('ALTER TABLE product_template ALTER list_price TYPE numeric(16,2)') |
4470 | -cr.execute('ALTER TABLE product_template ALTER standard_price TYPE numeric(16,2)') |
4471 | -cr.execute('ALTER TABLE product_product ALTER price_extra TYPE numeric(16,2)') |
4472 | -cr.execute('ALTER TABLE product_product ALTER price_margin TYPE numeric(16,2)') |
4473 | -cr.execute('ALTER TABLE pricelist_partnerinfo ALTER price TYPE numeric(16,2)') |
4474 | -cr.execute('ALTER TABLE account_invoice_line ALTER price_unit TYPE numeric(16,2)') |
4475 | -cr.execute('ALTER TABLE purchase_order_line ALTER price_unit TYPE numeric(16,2)') |
4476 | -cr.execute('ALTER TABLE sale_order_line ALTER price_unit TYPE numeric(16,2)') |
4477 | -cr.commit() |
4478 | - |
4479 | - |
4480 | -cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_document_fields\'') |
4481 | -if cr.fetchall(): |
4482 | - cr.execute('DROP TABLE subscription_document_fields') |
4483 | -cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_document\'') |
4484 | -if cr.fetchall(): |
4485 | - cr.execute('DROP TABLE subscription_document') |
4486 | -cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_subscription_history\'') |
4487 | -if cr.fetchall(): |
4488 | - cr.execute('DROP TABLE subscription_subscription_history') |
4489 | -cr.commit() |
4490 | - |
4491 | -# -------------------- # |
4492 | -# Change currency rate # |
4493 | -# -------------------- # |
4494 | - |
4495 | -cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'res_currency_rate\' AND a.attname = \'rate_old\' AND c.oid = a.attrelid') |
4496 | -if not cr.fetchall(): |
4497 | - cr.execute('ALTER TABLE res_currency_rate ADD rate_old NUMERIC(12,6)') |
4498 | - cr.execute('UPDATE res_currency_rate SET rate_old = rate') |
4499 | - cr.execute('UPDATE res_currency_rate SET rate = (1 / rate_old)') |
4500 | -cr.commit() |
4501 | - |
4502 | -cr.close |
4503 | - |
4504 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
4505 | - |
4506 | |
4507 | === removed directory 'doc/migrate/4.2.0-4.4.0' |
4508 | === removed file 'doc/migrate/4.2.0-4.4.0/pre.py' |
4509 | --- doc/migrate/4.2.0-4.4.0/pre.py 2009-10-20 10:52:23 +0000 |
4510 | +++ doc/migrate/4.2.0-4.4.0/pre.py 1970-01-01 00:00:00 +0000 |
4511 | @@ -1,110 +0,0 @@ |
4512 | -# -*- coding: utf-8 -*- |
4513 | -############################################################################## |
4514 | -# |
4515 | -# OpenERP, Open Source Management Solution |
4516 | -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). |
4517 | -# |
4518 | -# This program is free software: you can redistribute it and/or modify |
4519 | -# it under the terms of the GNU Affero General Public License as |
4520 | -# published by the Free Software Foundation, either version 3 of the |
4521 | -# License, or (at your option) any later version. |
4522 | -# |
4523 | -# This program is distributed in the hope that it will be useful, |
4524 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
4525 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
4526 | -# GNU Affero General Public License for more details. |
4527 | -# |
4528 | -# You should have received a copy of the GNU Affero General Public License |
4529 | -# along with this program. If not, see <http://www.gnu.org/licenses/>. |
4530 | -# |
4531 | -############################################################################## |
4532 | - |
4533 | -__version__ = '0.1.0' |
4534 | - |
4535 | -import psycopg |
4536 | -import optparse |
4537 | -import ConfigParser |
4538 | - |
4539 | -# ----- |
4540 | - |
4541 | -parser = optparse.OptionParser(version="Tiny ERP server migration script " + __version__) |
4542 | - |
4543 | -parser.add_option("-c", "--config", dest="config", help="specify path to Tiny ERP config file") |
4544 | - |
4545 | -group = optparse.OptionGroup(parser, "Database related options") |
4546 | -group.add_option("--db_host", dest="db_host", help="specify the database host") |
4547 | -group.add_option("--db_port", dest="db_port", help="specify the database port") |
4548 | -group.add_option("-d", "--database", dest="db_name", help="specify the database name") |
4549 | -group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name") |
4550 | -group.add_option("-w", "--db_password", dest="db_password", help="specify the database password") |
4551 | -parser.add_option_group(group) |
4552 | - |
4553 | -options = optparse.Values() |
4554 | -options.db_name = 'terp' # default value |
4555 | -parser.parse_args(values=options) |
4556 | - |
4557 | -if hasattr(options, 'config'): |
4558 | - configparser = ConfigParser.ConfigParser() |
4559 | - configparser.read([options.config]) |
4560 | - for name, value in configparser.items('options'): |
4561 | - if not (hasattr(options, name) and getattr(options, name)): |
4562 | - if value in ('true', 'True'): |
4563 | - value = True |
4564 | - if value in ('false', 'False'): |
4565 | - value = False |
4566 | - setattr(options, name, value) |
4567 | - |
4568 | -# ----- |
4569 | - |
4570 | -host = hasattr(options, 'db_host') and "host=%s" % options.db_host or '' |
4571 | -port = hasattr(options, 'db_port') and "port=%s" % options.db_port or '' |
4572 | -name = "dbname=%s" % options.db_name |
4573 | -user = hasattr(options, 'db_user') and "user=%s" % options.db_user or '' |
4574 | -password = hasattr(options, 'db_password') and "password=%s" % options.db_password or '' |
4575 | - |
4576 | -db = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0) |
4577 | -cr = db.cursor() |
4578 | - |
4579 | -# ------------------------------ # |
4580 | -# drop not null on ir_attachment # |
4581 | -# ------------------------------ # |
4582 | - |
4583 | -cr.execute('ALTER TABLE ir_attachment \ |
4584 | - ALTER COLUMN res_model DROP NOT NULL, \ |
4585 | - ALTER COLUMN res_id DROP NOT NULL') |
4586 | -cr.commit() |
4587 | - |
4588 | -# ---------------------------------- # |
4589 | -# change case date_deadline rounding # |
4590 | -# ---------------------------------- # |
4591 | - |
4592 | -cr.execute("""SELECT |
4593 | -c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE |
4594 | -WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size FROM pg_class |
4595 | -c,pg_attribute a,pg_type t WHERE c.relname='crm_case' AND |
4596 | -a.attname='date_deadline' AND c.oid=a.attrelid AND a.atttypid=t.oid""") |
4597 | - |
4598 | -res = cr.dictfetchall() |
4599 | -if res[0]['typname'] != 'timestamp': |
4600 | - for line in ( |
4601 | - "ALTER TABLE crm_case RENAME date_deadline TO date_deadline_bak", |
4602 | - "ALTER TABLE crm_case ADD date_deadline timestamp", |
4603 | - "UPDATE crm_case SET date_deadline = date_deadline_bak", |
4604 | - "ALTER TABLE crm_case DROP date_deadline_bak", |
4605 | - ): |
4606 | - cr.execute(line) |
4607 | -cr.commit() |
4608 | - |
4609 | -cr.execute('drop view report_task_user_pipeline_open'); |
4610 | -cr.commit() |
4611 | - |
4612 | -cr.execute('alter table ir_model_fields add state varchar(26)') |
4613 | -cr.execute('alter table ir_model_fields add select_level varchar(3)') |
4614 | -cr.execute('alter table ir_act_wizard add primary key(id)') |
4615 | -cr.commit() |
4616 | - |
4617 | - |
4618 | -cr.close() |
4619 | - |
4620 | -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
4621 | - |
4622 | |
4623 | === added file 'tools/find_sql_injection.py' |
4624 | --- tools/find_sql_injection.py 1970-01-01 00:00:00 +0000 |
4625 | +++ tools/find_sql_injection.py 2017-10-06 09:59:17 +0000 |
4626 | @@ -0,0 +1,173 @@ |
4627 | +#!/usr/bin/env python |
4628 | + |
4629 | +# original code from https://github.com/uber/py-find-injection |
4630 | + |
4631 | +import argparse |
4632 | +import ast |
4633 | +import sys |
4634 | +import os |
4635 | +import re |
4636 | +import linecache |
4637 | +import glob |
4638 | + |
4639 | +version_info = (0, 1, 1) |
4640 | +__version__ = '.'.join(map(str, version_info)) |
4641 | + |
4642 | + |
4643 | +def stringify(node): |
4644 | + if isinstance(node, ast.Name): |
4645 | + return node.id |
4646 | + elif isinstance(node, ast.Attribute): |
4647 | + return '%s.%s' % (stringify(node.value), node.attr) |
4648 | + elif isinstance(node, ast.Subscript): |
4649 | + return '%s[%s]' % (stringify(node.value), stringify(node.slice)) |
4650 | + elif isinstance(node, ast.Index): |
4651 | + return stringify(node.value) |
4652 | + elif isinstance(node, ast.Call): |
4653 | + return '%s(%s, %s)' % (stringify(node.func), stringify(node.args), stringify(node.keywords)) |
4654 | + elif isinstance(node, list): |
4655 | + return '[%s]' % (', '.join(stringify(n) for n in node)) |
4656 | + elif isinstance(node, ast.Str): |
4657 | + return node.s |
4658 | + else: |
4659 | + return ast.dump(node) |
4660 | + |
4661 | + |
4662 | +class IllegalLine(object): |
4663 | + def __init__(self, reason, node, filename): |
4664 | + self.reason = reason |
4665 | + self.lineno = node.lineno |
4666 | + self.filename = filename |
4667 | + self.node = node |
4668 | + |
4669 | + def __str__(self): |
4670 | + return "%s:%d\t%s" % (self.filename, self.lineno, self.reason) |
4671 | + |
4672 | + def __repr__(self): |
4673 | + return "IllegalLine<%s, %s:%s>" % (self.reason, self.filename, self.lineno) |
4674 | + |
4675 | + |
4676 | +def find_assignment_in_context(variable, context): |
4677 | + if isinstance(context, (ast.FunctionDef, ast.Module, ast.For, ast.While, ast.With, ast.If)): |
4678 | + for node in reversed(list(ast.iter_child_nodes(context))): |
4679 | + if isinstance(node, ast.Assign): |
4680 | + if variable in (stringify(c) for c in node.targets): |
4681 | + return node |
4682 | + if getattr(context, 'parent', None): |
4683 | + return find_assignment_in_context(variable, context.parent) |
4684 | + else: |
4685 | + return None |
4686 | + |
4687 | + |
4688 | +class Checker(ast.NodeVisitor): |
4689 | + def __init__(self, filename, *args, **kwargs): |
4690 | + self.filename = filename |
4691 | + self.errors = [] |
4692 | + super(Checker, self).__init__(*args, **kwargs) |
4693 | + |
4694 | + def ignore_sql_check(self, line_number): |
4695 | + file_text_line = linecache.getline(self.filename, line_number) |
4696 | + res = re.search(' # (.*)', file_text_line) |
4697 | + if res and (res.group(1).startswith('ignore_sql_check') or |
4698 | + res.group(1).startswith('not_a_user_entry')): |
4699 | + return True |
4700 | + else: |
4701 | + return False |
4702 | + |
4703 | + |
4704 | + def check_execute(self, node): |
4705 | + if isinstance(node, ast.BinOp): |
4706 | + if isinstance(node.op, ast.Mod): |
4707 | + if not self.ignore_sql_check(node.lineno): |
4708 | + return IllegalLine('string interpolation of SQL query', node, self.filename) |
4709 | + elif isinstance(node.op, ast.Add): |
4710 | + if not self.ignore_sql_check(node.lineno): |
4711 | + return IllegalLine('string concatenation of SQL query', node, self.filename) |
4712 | + elif isinstance(node, ast.Call): |
4713 | + if isinstance(node.func, ast.Attribute): |
4714 | + if node.func.attr == 'format': |
4715 | + if not self.ignore_sql_check(node.lineno): |
4716 | + return IllegalLine('str.format called on SQL query', node, self.filename) |
4717 | + elif isinstance(node, ast.Name): |
4718 | + # now we need to figure out where that query is assigned. blargh. |
4719 | + assignment = find_assignment_in_context(node.id, node) |
4720 | + if assignment is not None: |
4721 | + return self.check_execute(assignment.value) |
4722 | + |
4723 | + def visit_Call(self, node): |
4724 | + function_name = stringify(node.func) |
4725 | + if function_name.lower() in ('cr.execute', 'cursor.execute'): |
4726 | + node.args[0].parent = node |
4727 | + node_error = self.check_execute(node.args[0]) |
4728 | + if node_error: |
4729 | + self.errors.append(node_error) |
4730 | + #elif function_name.lower() == 'eval': |
4731 | + # self.errors.append(IllegalLine('eval() is just generally evil', node, self.filename)) |
4732 | + self.generic_visit(node) |
4733 | + |
4734 | + def visit(self, node): |
4735 | + """Visit a node.""" |
4736 | + method = 'visit_' + node.__class__.__name__ |
4737 | + visitor = getattr(self, method, self.generic_visit) |
4738 | + return visitor(node) |
4739 | + |
4740 | + def generic_visit(self, node): |
4741 | + """Called if no explicit visitor function exists for a node.""" |
4742 | + for field, value in ast.iter_fields(node): |
4743 | + if isinstance(value, list): |
4744 | + for item in value: |
4745 | + if isinstance(item, ast.AST): |
4746 | + item.parent = node |
4747 | + self.visit(item,) |
4748 | + elif isinstance(value, ast.AST): |
4749 | + value.parent = node |
4750 | + self.visit(value) |
4751 | + |
4752 | + |
4753 | +def check(filename): |
4754 | + c = Checker(filename=filename) |
4755 | + with open(filename, 'r') as fobj: |
4756 | + try: |
4757 | + parsed = ast.parse(fobj.read(), filename) |
4758 | + c.visit(parsed) |
4759 | + except Exception: |
4760 | + raise |
4761 | + return c.errors |
4762 | + |
4763 | + |
4764 | +def main(): |
4765 | + parser = argparse.ArgumentParser( |
4766 | + description='Look for patterns in python source files that might indicate SQL injection vulnerabilities', |
4767 | + epilog='Exit status is 0 if all files are okay, 1 if any files have an error. Errors are printed to stdout' |
4768 | + ) |
4769 | + parser.add_argument('--version', action='version', version='%(prog)s ' + __version__) |
4770 | + parser.add_argument('files', nargs='+', help='file(s) or folder(s) to check. Folders are scanned recursivly.') |
4771 | + args = parser.parse_args() |
4772 | + |
4773 | + errors = [] |
4774 | + file_list = [] |
4775 | + for fname in args.files: |
4776 | + if not os.path.exists(fname): |
4777 | + errors.extend('%s don\'t exists' % fname) |
4778 | + if os.path.isdir(fname): |
4779 | + for root, directories, filenames in os.walk(fname): |
4780 | + for filename in filenames: |
4781 | + if filename.endswith('.py'): |
4782 | + file_list.append(os.path.join(root,filename)) |
4783 | + elif os.path.isfile(fname): |
4784 | + file_list.append(fname) |
4785 | + |
4786 | + for fname in file_list: |
4787 | + these_errors = check(fname) |
4788 | + if these_errors: |
4789 | + print '\n'.join(str(e) for e in these_errors) |
4790 | + errors.extend(these_errors) |
4791 | + if errors: |
4792 | + print '%d total errors' % len(errors) |
4793 | + return 1 |
4794 | + else: |
4795 | + return 0 |
4796 | + |
4797 | + |
4798 | +if __name__ == '__main__': |
4799 | + sys.exit(main()) |
to fix