Merge lp:~openerp-community/openobject-extension/import_odbc_base_external_dbsource into lp:openobject-extension/oerp6.1-stable

Status: Needs review
Proposed branch: lp:~openerp-community/openobject-extension/import_odbc_base_external_dbsource
Merge into: lp:openobject-extension/oerp6.1-stable
Diff against target: 823 lines (+746/-0)
13 files modified
base_external_dbsource/__init__.py (+24/-0)
base_external_dbsource/__openerp__.py (+62/-0)
base_external_dbsource/base_external_dbsource.py (+159/-0)
base_external_dbsource/base_external_dbsource_demo.xml (+15/-0)
base_external_dbsource/base_external_dbsource_view.xml (+54/-0)
base_external_dbsource/security/ir.model.access.csv (+2/-0)
base_external_dbsource/test/dbsource_connect.yml (+5/-0)
import_odbc/__init__.py (+24/-0)
import_odbc/__openerp__.py (+84/-0)
import_odbc/import_odbc.py (+214/-0)
import_odbc/import_odbc_demo.xml (+15/-0)
import_odbc/import_odbc_view.xml (+86/-0)
import_odbc/security/ir.model.access.csv (+2/-0)
To merge this branch: bzr merge lp:~openerp-community/openobject-extension/import_odbc_base_external_dbsource
Reviewer Review Type Date Requested Status
extra-addons-commiter Pending
Review via email: mp+118290@code.launchpad.net

Description of the change

[ADD] import_odbc and base_external_dbsource from lp:~dreis-pt/addons-tko/reis

To post a comment you must log in.
355. By webtec <webtec@webtec-dev>

ADD: new execute method that also returns the column list; execute can accept a parameter list; added demo connrction to local PosgreSQL

356. By webtec <webtec@webtec-dev>

CHG: adapted to base_external_dbsource; code refactored

357. By Daniel Reis

[CHG] simplified dbsource.execute api to a single method with two possible behaviors

Unmerged revisions

357. By Daniel Reis

[CHG] simplified dbsource.execute api to a single method with two possible behaviors

356. By webtec <webtec@webtec-dev>

CHG: adapted to base_external_dbsource; code refactored

355. By webtec <webtec@webtec-dev>

ADD: new execute method that also returns the column list; execute can accept a parameter list; added demo connrction to local PosgreSQL

354. By Maxime Chambreuil (http://www.savoirfairelinux.com)

[ADD] import_odbc and base_external_dbsource from addons-tko

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== added directory 'base_external_dbsource'
=== added file 'base_external_dbsource/__init__.py'
--- base_external_dbsource/__init__.py 1970-01-01 00:00:00 +0000
+++ base_external_dbsource/__init__.py 2012-09-13 10:12:28 +0000
@@ -0,0 +1,24 @@
1# -*- coding: utf-8 -*-
2##############################################################################
3#
4# Daniel Reis
5# 2011
6#
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU Affero General Public License as
9# published by the Free Software Foundation, either version 3 of the
10# License, or (at your option) any later version.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU Affero General Public License for more details.
16#
17# You should have received a copy of the GNU Affero General Public License
18# along with this program. If not, see <http://www.gnu.org/licenses/>.
19#
20##############################################################################
21
22import base_external_dbsource
23
24# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
025
=== added file 'base_external_dbsource/__openerp__.py'
--- base_external_dbsource/__openerp__.py 1970-01-01 00:00:00 +0000
+++ base_external_dbsource/__openerp__.py 2012-09-13 10:12:28 +0000
@@ -0,0 +1,62 @@
1# -*- coding: utf-8 -*-
2##############################################################################
3#
4# Daniel Reis, 2011
5# Additional contributions by Maxime Chambreuil, Savoir-faire Linux
6#
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU Affero General Public License as
9# published by the Free Software Foundation, either version 3 of the
10# License, or (at your option) any later version.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU Affero General Public License for more details.
16#
17# You should have received a copy of the GNU Affero General Public License
18# along with this program. If not, see <http://www.gnu.org/licenses/>.
19#
20##############################################################################
21
22{
23 'name': 'External Database Sources',
24 'version': '61.3',
25 'category': 'Tools',
26 'description': """
27This module allows you to define connections to foreign databases using ODBC,
28Oracle Client or SQLAlchemy.
29
30Databases sources can be configured in Settings > Configuration -> Data sources.
31
32Depending on the database, you need:
33 * to install unixodbc and python-pyodbc packages to use ODBC connections.
34 * to install FreeTDS driver (tdsodbc package) and configure it through ODBC to
35 connect to Microsoft SQL Server.
36 * to install and configure Oracle Instant Client and cx_Oracle python library
37 to connect to Oracle.
38 """,
39 'author': 'Daniel Reis',
40 'website': 'http://launchpad.net/addons-tko',
41 'images': [
42 'images/screenshot01.png',
43 ],
44 'depends': [
45 'base',
46 ],
47 'init': [],
48 'data': [
49 'base_external_dbsource_view.xml',
50 'security/ir.model.access.csv',
51 ],
52 'demo': [
53 'base_external_dbsource_demo.xml',
54 ],
55 'test': [
56 'dbsource_connect.yml',
57 ],
58 'installable': True,
59 'active': False,
60}
61
62# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
063
=== added file 'base_external_dbsource/base_external_dbsource.py'
--- base_external_dbsource/base_external_dbsource.py 1970-01-01 00:00:00 +0000
+++ base_external_dbsource/base_external_dbsource.py 2012-09-13 10:12:28 +0000
@@ -0,0 +1,159 @@
1# -*- coding: utf-8 -*-
2##############################################################################
3#
4# Daniel Reis
5# 2011
6#
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU Affero General Public License as
9# published by the Free Software Foundation, either version 3 of the
10# License, or (at your option) any later version.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU Affero General Public License for more details.
16#
17# You should have received a copy of the GNU Affero General Public License
18# along with this program. If not, see <http://www.gnu.org/licenses/>.
19#
20##############################################################################
21
22import os
23from osv import fields, osv
24from openerp.tools.translate import _
25import openerp.tools as tools
26import logging
27_logger = logging.getLogger(__name__)
28
29CONNECTORS = []
30
31try:
32 import sqlalchemy
33 import pymssql
34 CONNECTORS.append( ('mssql', 'Microsoft SQL Server') )
35except:
36 _logger.info('MS SQL Server not available. Please install "slqalchemy" and "pymssql" python package.')
37
38try:
39 import sqlalchemy
40 import MySQLdb
41 CONNECTORS.append( ('mysql', 'MySQL') )
42except:
43 _logger.info('MySQL not available. Please install "slqalchemy" and "mysqldb" python package.')
44
45try:
46 import pyodbc
47 CONNECTORS.append( ('pyodbc', 'ODBC') )
48except:
49 _logger.info('ODBC libraries not available. Please install "unixodbc" and "python-pyodbc" packages.')
50
51try:
52 import cx_Oracle
53 CONNECTORS.append( ('cx_Oracle', 'Oracle') )
54except:
55 _logger.info('Oracle libraries not available. Please install "cx_Oracle" python package.')
56
57import psycopg2
58CONNECTORS.append( ('postgresql', 'PostgreSQL') )
59
60try:
61 import sqlalchemy
62 CONNECTORS.append( ('sqlite', 'SQLite') )
63except:
64 _logger.info('SQLAlchemy not available. Please install "slqalchemy" python package.')
65
66class base_external_dbsource(osv.osv):
67 _name = "base.external.dbsource"
68 _description = 'External Database Sources'
69 _columns = {
70 'name': fields.char('Datasource name', required=True, size=64),
71 'conn_string': fields.text('Connection string', help="""\
72Sample connection strings:
73- Microsoft SQL Server: mssql+pymssql://username:%s@server:port/dbname?charset=utf8
74- MySQL: mysql://user:%s@server:port/dbname
75- ODBC: DRIVER={FreeTDS};SERVER=server.address;Database=mydb;UID=sa
76- ORACLE: username/%s@//server.address:port/instance
77- PostgreSQL: dbname='template1' user='dbuser' host='localhost' port='5432' password=%s
78- SQLite: sqlite:///test.db
79"""),
80 'password': fields.char('Password' , size=40),
81 'connector': fields.selection(CONNECTORS, 'Connector', required=True,
82 help = "If a connector is missing from the list, check the " \
83 + "server log to confirm that the required componentes were detected."),
84 }
85
86 def conn_open(self, cr, uid, id1):
87 #Get dbsource record
88 data = self.browse(cr, uid, id1)
89 #Build the full connection string
90 connStr = data.conn_string
91 if data.password:
92 if '%s' not in data.conn_string:
93 connStr += ';PWD=%s'
94 connStr = connStr % data.password
95 #Try to connect
96 if data.connector == 'cx_Oracle':
97 os.environ['NLS_LANG'] = 'AMERICAN_AMERICA.UTF8'
98 conn = cx_Oracle.connect(connStr)
99 elif data.connector == 'pyodbc':
100 conn = pyodbc.connect(connStr)
101 elif data.connector in ('sqlite','mysql','mssql'):
102 conn = sqlalchemy.create_engine(connStr).connect()
103 elif data.connector == 'postgresql':
104 conn = psycopg2.connect(connStr)
105
106 return conn
107
108 def execute(self, cr, uid, ids, sqlquery, sqlparams=None, metadata=False, context=None):
109 """Executes SQL and returns a list of rows.
110
111 "sqlparams" can be a dict of values, that can be referenced in the SQL statement
112 using "%(key)s" or, in the case of Oracle, ":key".
113 Example:
114 sqlquery = "select * from mytable where city = %(city)s and date > %(dt)s"
115 params = {'city': 'Lisbon', 'dt': datetime.datetime(2000, 12, 31)}
116
117 If metadata=True, it will instead return a dict containing the rows list and the columns list,
118 in the format:
119 { 'cols': [ 'col_a', 'col_b', ...]
120 , 'rows': [ (a0, b0, ...), (a1, b1, ...), ...] }
121 """
122 data = self.browse(cr, uid, ids)
123 rows, cols = list(), list()
124 for obj in data:
125 conn = self.conn_open(cr, uid, obj.id)
126 if obj.connector in ["sqlite","mysql","mssql"]:
127 #using sqlalchemy
128 cur = conn.execute(sqlquery, sqlparams)
129 if metadata: cols = cur.keys()
130 rows = [r for r in cur]
131 else:
132 #using other db connectors
133 cur = conn.cursor()
134 cur.execute(sqlquery, sqlparams)
135 if metadata: cols = [d[0] for d in cur.description]
136 rows = cur.fetchall()
137 conn.close()
138 if metadata:
139 return{'cols': cols, 'rows': rows}
140 else:
141 return rows
142
143 def connection_test(self, cr, uid, ids, context=None):
144 for obj in self.browse(cr, uid, ids, context):
145 conn = False
146 try:
147 conn = self.conn_open(cr, uid, obj.id)
148 except Exception, e:
149 raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e))
150 finally:
151 try:
152 if conn: conn.close()
153 except Exception:
154 # ignored, just a consequence of the previous exception
155 pass
156 #TODO: if OK a (wizard) message box should be displayed
157 raise osv.except_osv(_("Connection test succeeded!"), _("Everything seems properly set up!"))
158
159base_external_dbsource()
0160
=== added file 'base_external_dbsource/base_external_dbsource_demo.xml'
--- base_external_dbsource/base_external_dbsource_demo.xml 1970-01-01 00:00:00 +0000
+++ base_external_dbsource/base_external_dbsource_demo.xml 2012-09-13 10:12:28 +0000
@@ -0,0 +1,15 @@
1<?xml version="1.0"?>
2<openerp>
3 <data>
4
5 <record model="base.external.dbsource" id="demo_postgre">
6 <field name="name">PostgreSQL local</field>
7 <field name="conn_string">dbname='postgres' password=%s</field>
8 <field name="password">postgresql</field>
9 <field name="connector">postgresql</field>
10 </record>
11
12 </data>
13</openerp>
14
15
016
=== added file 'base_external_dbsource/base_external_dbsource_view.xml'
--- base_external_dbsource/base_external_dbsource_view.xml 1970-01-01 00:00:00 +0000
+++ base_external_dbsource/base_external_dbsource_view.xml 2012-09-13 10:12:28 +0000
@@ -0,0 +1,54 @@
1<?xml version="1.0"?>
2<openerp>
3 <data>
4
5 <!-- DBSource -->
6
7 <record model="ir.ui.view" id="view_dbsource_tree">
8 <field name="name">base.external.dbsource.tree</field>
9 <field name="model">base.external.dbsource</field>
10 <field name="type">tree</field>
11 <field name="arch" type="xml">
12 <tree>
13 <field name="name"/>
14 <field name="connector"/>
15 <field name="conn_string"/>
16 </tree>
17 </field>
18 </record>
19
20 <record model="ir.ui.view" id="view_dbsource_form">
21 <field name="name">base.external.dbsource.form</field>
22 <field name="model">base.external.dbsource</field>
23 <field name="type">form</field>
24 <field name="arch" type="xml">
25 <form>
26 <field name="name"/>
27 <field name="password" password="True"/>
28 <newline/>
29 <field name="connector" colspan="2"/>
30 <newline/>
31 <field name="conn_string" colspan="4"/>
32 <newline/>
33 <button name="connection_test" string="Test Connection" type="object" icon="gtk-network" colspan="4"/>
34 </form>
35 </field>
36 </record>
37
38 <record model="ir.actions.act_window" id="action_dbsource">
39 <field name="name">External Database Sources</field>
40 <field name="res_model">base.external.dbsource</field>
41 <field name="view_type">form</field>
42 <field name="view_mode">tree,form</field>
43 <field name="view_id" ref="view_dbsource_tree"/>
44 </record>
45
46 <menuitem name="Database Sources"
47 id="menu_dbsource"
48 parent="base.next_id_15"
49 action="action_dbsource"/>
50
51 </data>
52</openerp>
53
54
055
=== added directory 'base_external_dbsource/images'
=== added file 'base_external_dbsource/images/screenshot01.png'
1Binary files base_external_dbsource/images/screenshot01.png 1970-01-01 00:00:00 +0000 and base_external_dbsource/images/screenshot01.png 2012-09-13 10:12:28 +0000 differ56Binary files base_external_dbsource/images/screenshot01.png 1970-01-01 00:00:00 +0000 and base_external_dbsource/images/screenshot01.png 2012-09-13 10:12:28 +0000 differ
=== added directory 'base_external_dbsource/security'
=== added file 'base_external_dbsource/security/ir.model.access.csv'
--- base_external_dbsource/security/ir.model.access.csv 1970-01-01 00:00:00 +0000
+++ base_external_dbsource/security/ir.model.access.csv 2012-09-13 10:12:28 +0000
@@ -0,0 +1,2 @@
1id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
2access_base_external_dbsource_group_system,bae_external_dbsource_group_system,model_base_external_dbsource,base.group_system,1,1,1,1
03
=== added directory 'base_external_dbsource/test'
=== added file 'base_external_dbsource/test/dbsource_connect.yml'
--- base_external_dbsource/test/dbsource_connect.yml 1970-01-01 00:00:00 +0000
+++ base_external_dbsource/test/dbsource_connect.yml 2012-09-13 10:12:28 +0000
@@ -0,0 +1,5 @@
1-
2 Connect to local Postgres.
3-
4 !python {model: base.external.dbsource}: |
5 self.connection_test(cr, uid, [ref("demo_postgresql")]
06
=== added directory 'import_odbc'
=== added file 'import_odbc/__init__.py'
--- import_odbc/__init__.py 1970-01-01 00:00:00 +0000
+++ import_odbc/__init__.py 2012-09-13 10:12:28 +0000
@@ -0,0 +1,24 @@
1# -*- coding: utf-8 -*-
2##############################################################################
3#
4# Daniel Reis
5# 2011
6#
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU Affero General Public License as
9# published by the Free Software Foundation, either version 3 of the
10# License, or (at your option) any later version.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU Affero General Public License for more details.
16#
17# You should have received a copy of the GNU Affero General Public License
18# along with this program. If not, see <http://www.gnu.org/licenses/>.
19#
20##############################################################################
21
22import import_odbc
23
24# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
025
=== added file 'import_odbc/__openerp__.py'
--- import_odbc/__openerp__.py 1970-01-01 00:00:00 +0000
+++ import_odbc/__openerp__.py 2012-09-13 10:12:28 +0000
@@ -0,0 +1,84 @@
1# -*- coding: utf-8 -*-
2##############################################################################
3#
4# Daniel Reis
5# 2011
6#
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU Affero General Public License as
9# published by the Free Software Foundation, either version 3 of the
10# License, or (at your option) any later version.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU Affero General Public License for more details.
16#
17# You should have received a copy of the GNU Affero General Public License
18# along with this program. If not, see <http://www.gnu.org/licenses/>.
19#
20##############################################################################
21
22{
23 'name': 'Import data from SQL and ODBC data sources.',
24 'version': '61.3',
25 'category': 'Tools',
26 'description': """
27Import data directly from other databases.
28
29Installed in the Administration module, menu Configuration -> Import from SQL.
30
31Features:
32 * Fetched data from the databases are used to build lines equivalent to regular import files. These are imported using the standard "import_data()" ORM method, benefiting from all its features, including xml_ids.
33 * Each table import is defined by an SQL statement, used to build the equivalent for an import file. Each column's name should match the column names you would use in an import file. The first column must provide an unique identifier for the record, and will be used to build its xml_id.
34 * SQL columns named "none" are ignored. This can be used for the first column of the SQL, so that it's used to build the XML Id but it's not imported to any OpenERP field.
35 * The last sync date is the last successfull execution can be used in the SQL using "%(sync)s", or ":sync" in the case of Oracle.
36 * When errors are found, only the record with the error fails import. The other correct records are commited. However, the "last sync date" will only be automaticaly updated when no errors are found.
37 * The import execution can be scheduled to run automatically.
38
39Examples:
40 * Importing suppliers to res.partner:
41 SELECT distinct
42 [SUPPLIER_CODE] as "ref"
43 , [SUPPLIER_NAME] as "name"
44 , 1 as "is_supplier"
45 , [INFO] as "comment"
46 FROM T_SUPPLIERS
47 WHERE INACTIVE_DATE IS NULL and DATE_CHANGED >= %(sync)s
48
49 * Importing products to product.product:
50 SELECT PRODUCT_CODE as "ref"
51 , PRODUCT_NAME as "name"
52 , 'res_partner_id_'+SUPPLIER_ID as "partner_id/id"
53 FROM T_PRODUCTS
54 WHERE DATE_CHANGED >= %(sync)s
55
56Improvements ideas waiting for a contributor:
57 * Allow to import many2one fields (currently not supported). Done by adding a second SQL sentence to get child record list?
58 * Allow "import sets" that can be executed at different time intervals using different scheduler jobs.
59 * Allow to inactivate/delete OpenERP records when not present in an SQL result set.
60 """,
61 'author': 'Daniel Reis',
62 'website': 'http://launchpad.net/addons-tko',
63 'images': [
64 'images/snapshot1.png',
65 'images/snapshot2.png',
66 ],
67 'depends': [
68 'base',
69 'base_external_dbsource',
70 ],
71 'init': [],
72 'data': [
73 'import_odbc_view.xml',
74 'security/ir.model.access.csv',
75 ],
76 'demo': [
77 'import_odbc_demo.xml',
78 ],
79 'test': [],
80 'installable': True,
81 'active': False,
82}
83
84# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
085
=== added directory 'import_odbc/images'
=== added file 'import_odbc/images/snapshot1.png'
1Binary files import_odbc/images/snapshot1.png 1970-01-01 00:00:00 +0000 and import_odbc/images/snapshot1.png 2012-09-13 10:12:28 +0000 differ86Binary files import_odbc/images/snapshot1.png 1970-01-01 00:00:00 +0000 and import_odbc/images/snapshot1.png 2012-09-13 10:12:28 +0000 differ
=== added file 'import_odbc/images/snapshot2.png'
2Binary files import_odbc/images/snapshot2.png 1970-01-01 00:00:00 +0000 and import_odbc/images/snapshot2.png 2012-09-13 10:12:28 +0000 differ87Binary files import_odbc/images/snapshot2.png 1970-01-01 00:00:00 +0000 and import_odbc/images/snapshot2.png 2012-09-13 10:12:28 +0000 differ
=== added file 'import_odbc/import_odbc.py'
--- import_odbc/import_odbc.py 1970-01-01 00:00:00 +0000
+++ import_odbc/import_odbc.py 2012-09-13 10:12:28 +0000
@@ -0,0 +1,214 @@
1# -*- coding: utf-8 -*-
2##############################################################################
3#
4# Daniel Reis
5# 2011
6#
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU Affero General Public License as
9# published by the Free Software Foundation, either version 3 of the
10# License, or (at your option) any later version.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU Affero General Public License for more details.
16#
17# You should have received a copy of the GNU Affero General Public License
18# along with this program. If not, see <http://www.gnu.org/licenses/>.
19#
20##############################################################################
21
22import sys
23from datetime import datetime
24from osv import fields, osv
25import logging
26_logger = logging.getLogger(__name__)
27_loglvl = _logger.getEffectiveLevel()
28SEP = '|'
29
30class import_odbc_dbtable(osv.osv):
31 _name="import.odbc.dbtable"
32 _description = 'Import Table Data'
33 _order = 'exec_order'
34 _columns = {
35 'name': fields.char('Datasource name', required=True, size=64),
36 'enabled': fields.boolean('Execution enabled'),
37 'dbsource_id': fields.many2one('base.external.dbsource', 'Database source', required=True),
38 'sql_source': fields.text('SQL', required=True, help='Column names must be valid "import_data" columns.'),
39 'model_target': fields.many2one('ir.model','Target object'),
40 'noupdate': fields.boolean('No updates', help="Only create new records; disable updates to existing records."),
41 'exec_order': fields.integer('Execution order', help="Defines the order to perform the import"),
42 'last_sync': fields.datetime('Last sync date', help="Datetime for the last succesfull sync. Later changes on the source may not be replicated on the destination"),
43 'start_run': fields.datetime('Time started', readonly=True),
44 'last_run': fields.datetime('Time ended', readonly=True),
45 'last_record_count': fields.integer('Last record count', readonly=True),
46 'last_error_count': fields.integer('Last error count', readonly=True),
47 'last_warn_count': fields.integer('Last warning count', readonly=True),
48 'last_log': fields.text('Last run log', readonly=True),
49 'ignore_rel_errors': fields.boolean('Ignore relationship errors',
50 help = "On error try to reimport rows ignoring relationships."),
51 'raise_import_errors': fields.boolean('Raise import errors',
52 help = "Import errors not handled, intended for debugging purposes."
53 + "\nAlso forces debug messages to be written to the server log."),
54 }
55 _defaults = {
56 'enabled': True,
57 'exec_order': 10,
58 }
59
60 def _import_data(self, cr, uid, flds, data, model_obj, table_obj, log):
61 """Import data and returns error msg or empty string"""
62
63 def find_m2o(field_list):
64 """"Find index of first column with a one2many field"""
65 for i, x in enumerate(field_list):
66 if len(x)>3 and x[-3:] == ':id' or x[-3:] == '/id':
67 return i
68 return -1
69
70 def append_to_log(log, level, obj_id = '', msg = '', rel_id = ''):
71 if '_id_' in obj_id:
72 obj_id = '.'.join(obj_id.split('_')[:-2]) + ': ' + obj_id.split('_')[-1]
73 if ': .' in msg and not rel_id:
74 rel_id = msg[msg.find(': .')+3:]
75 if '_id_' in rel_id:
76 rel_id = '.'.join(rel_id.split('_')[:-2]) + ': ' + rel_id.split('_')[-1]
77 msg = msg[:msg.find(': .')]
78 log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), obj_id, rel_id, msg))
79
80
81 _logger.debug( data )
82 cols = list(flds) #copy to avoid side effects
83 errmsg = str()
84 if table_obj.raise_import_errors:
85 model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
86 else:
87 try:
88 model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
89 except:
90 errmsg = str(sys.exc_info()[1])
91
92 if errmsg and not table_obj.ignore_rel_errors:
93 #Fail
94 append_to_log(log, 'ERROR', data, errmsg )
95 log['last_error_count'] += 1
96 return False
97 if errmsg and table_obj.ignore_rel_errors:
98 #Warn and retry ignoring many2one fields...
99 append_to_log(log, 'WARN', data, errmsg )
100 log['last_warn_count'] += 1
101 #Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first)
102 i = find_m2o(cols)
103 if i >= 0:
104 #Try again without the [i] column
105 del cols[i]
106 del data[i]
107 self._import_data(cr, uid, cols, data, model_obj, table_obj, log)
108 else:
109 #Fail
110 append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.' )
111 log['last_error_count'] += 1
112 return False
113
114 return True
115
116
117 def import_run(self, cr, uid, ids=None, context=None):
118
119 db_model = self.pool.get('base.external.dbsource')
120 actions = self.read(cr, uid, ids, ['id', 'exec_order'])
121 actions.sort(key = lambda x:(x['exec_order'], x['id']))
122
123 #Consider each dbtable:
124 for action_ref in actions:
125
126 obj = self.browse(cr, uid, action_ref['id'])
127 if not obj.enabled: continue #skip
128
129 _logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl)
130 _logger.debug('Importing %s...' % obj.name)
131
132 #now() microseconds are stripped to avoid problem with SQL smalldate
133 #TODO: convert UTC Now to local timezone (http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime)
134 model_name = obj.model_target.model
135 model_obj = self.pool.get(model_name)
136 xml_prefix = model_name.replace('.', '_') + "_id_"
137 log = {'start_run': datetime.now().replace(microsecond=0),
138 'last_run': None,
139 'last_record_count': 0,
140 'last_error_count': 0,
141 'last_warn_count': 0,
142 'last_log': list()}
143 self.write(cr, uid, [obj.id], log)
144
145 #Prepare SQL sentence; replace "%s" with the last_sync date
146 if obj.last_sync: sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S")
147 else: sync = datetime.datetime(1900, 1, 1, 0, 0, 0)
148 params = {'sync': sync}
149 res = db_model.execute(cr, uid, [obj.dbsource_id.id], obj.sql_source, params, metadata=True)
150
151 #Exclude columns titled "None"; add (xml_)"id" column
152 cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE']
153 cols = [x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + ['id']
154
155 #Import each row:
156 for row in res['rows']:
157 #Build data row; import only columns present in the "cols" list
158 data = list()
159 for i in cidx:
160 #TODO: Handle imported datetimes properly - convert from localtime to UTC!
161 v = row[i]
162 if isinstance(v, str): v = v.strip()
163 data.append(v)
164 data.append( xml_prefix + str(row[0]).strip() )
165
166 #Import the row; on error, write line to the log
167 log['last_record_count'] += 1
168 self._import_data(cr, uid, cols, data, model_obj, obj, log)
169 if log['last_record_count'] % 500 == 0:
170 _logger.info('...%s rows processed...' % (log['last_record_count']) )
171
172 #Finished importing all rows
173 #If no errors, write new sync date
174 if not (log['last_error_count'] or log['last_warn_count']):
175 log['last_sync'] = log['start_run']
176 level = logging.DEBUG
177 if log['last_warn_count']: level = logging.WARN
178 if log['last_error_count']: level = logging.ERROR
179 _logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % (
180 model_name, log['last_record_count'], log['last_error_count'] ,
181 log['last_warn_count'] ) )
182 #Write run log, either if the table import is active or inactive
183 if log['last_log']:
184 log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==')
185 log.update( {'last_log': '\n'.join(log['last_log'])} )
186 log.update({ 'last_run': datetime.now().replace(microsecond=0) }) #second=0,
187 self.write(cr, uid, [obj.id], log)
188 #Finished
189 _logger.debug('Import job FINISHED.')
190 return True
191
192
193 def import_schedule(self, cr, uid, ids, context=None):
194 cron_obj = self.pool.get('ir.cron')
195 new_create_id = cron_obj.create(cr, uid, {
196 'name': 'Import ODBC tables',
197 'interval_type': 'hours',
198 'interval_number': 1,
199 'numbercall': -1,
200 'model': 'import.odbc.dbtable',
201 'function': 'import_run',
202 'doall': False,
203 'active': True
204 })
205 return {
206 'name': 'Import ODBC tables',
207 'view_type': 'form',
208 'view_mode': 'form,tree',
209 'res_model': 'ir.cron',
210 'res_id': new_create_id,
211 'type': 'ir.actions.act_window',
212 }
213
214import_odbc_dbtable()
0215
=== added file 'import_odbc/import_odbc_demo.xml'
--- import_odbc/import_odbc_demo.xml 1970-01-01 00:00:00 +0000
+++ import_odbc/import_odbc_demo.xml 2012-09-13 10:12:28 +0000
@@ -0,0 +1,15 @@
1<?xml version="1.0"?>
2<openerp>
3 <data>
4
5 <record model="import.odbc.dbtable" id="demo_postgresql_users">
6 <field name="name">Users from PostgreSQL </field>
7 <field name="dbsource_id" ref="base_external_dbsource.demo_postgre"/>
8 <field name="sql_source">select usename as "login", usename as "name" from pg_catalog.pg_user</field>
9 <field name="model_target" ref="base.model_res_users"/>
10 </record>
11
12 </data>
13</openerp>
14
15
016
=== added file 'import_odbc/import_odbc_view.xml'
--- import_odbc/import_odbc_view.xml 1970-01-01 00:00:00 +0000
+++ import_odbc/import_odbc_view.xml 2012-09-13 10:12:28 +0000
@@ -0,0 +1,86 @@
1<?xml version="1.0"?>
2<openerp>
3 <data>
4
5 <!-- Table form -->
6
7 <record model="ir.ui.view" id="view_import_dbtable_form">
8 <field name="name">import.odbc.dbtable.form</field>
9 <field name="model">import.odbc.dbtable</field>
10 <field name="type">form</field>
11 <field name="arch" type="xml">
12 <form>
13 <field name="name" search="1"/>
14 <field name="exec_order"/>
15 <field name="model_target"/>
16 <field name="dbsource_id" search="1"/>
17 <field name="noupdate"/>
18 <field name="enabled"/>
19 <field name="ignore_rel_errors"/>
20 <field name="raise_import_errors"/>
21 <field name="last_sync"/>
22 <group colspan="2">
23 <button name="import_run" string="Run Import" type="object" icon="gtk-execute"/>
24 <button name="import_schedule" string="Schedule Import" type="object" icon="gtk-paste"/>
25 </group>
26 <field name="sql_source" colspan="4"/>
27 <separator string="Last execution" colspan="4"/>
28 <field name="last_record_count"/>
29 <field name="start_run"/>
30 <field name="last_warn_count"/>
31 <field name="last_run"/>
32 <field name="last_error_count"/>
33 <field name="last_log" colspan="4"/>
34 </form>
35 </field>
36 </record>
37
38 <!-- Table Tree -->
39
40 <record id="view_import_dbtable_tree" model="ir.ui.view">
41 <field name="name">import.odbc.dbtable.tree</field>
42 <field name="model">import.odbc.dbtable</field>
43 <field name="type">tree</field>
44 <field name="arch" type="xml">
45 <tree colors="grey: enabled==False; red:last_error_count&gt;0; blue:last_warn_count&gt;0">
46 <field name="exec_order"/>
47 <field name="name"/>
48 <field name="model_target"/>
49 <field name="dbsource_id"/>
50 <field name="enabled"/>
51 <field name="last_run"/>
52 <field name="last_sync"/>
53 <field name="last_record_count"/>
54 <field name="last_error_count"/>
55 <field name="last_warn_count"/>
56 </tree>
57 </field>
58</record>
59
60
61<!-- Tree Search -->
62 <record id="view_import_dbtable_filter" model="ir.ui.view">
63 <field name="name">import.odbc.dbtable.filter</field>
64 <field name="model">import.odbc.dbtable</field>
65 <field name="type">search</field>
66 <field name="arch" type="xml">
67 <search string="Search ODBC Imports">
68 <field name="name"/>
69 <field name="dbsource_id"/>
70 <field name="model_target"/>
71 </search>
72 </field>
73 </record>
74
75<!--Menu-->
76 <record model="ir.actions.act_window" id="action_import_dbtable">
77 <field name="name">Import from SQL</field>
78 <field name="res_model">import.odbc.dbtable</field>
79 <field name="view_type">form</field>
80 </record>
81 <menuitem name="Import from SQL" id="menu_import_dbtable" parent="base.next_id_15" action="action_import_dbtable"/>
82</data>
83</openerp>
84
85
86
087
=== added directory 'import_odbc/security'
=== added file 'import_odbc/security/ir.model.access.csv'
--- import_odbc/security/ir.model.access.csv 1970-01-01 00:00:00 +0000
+++ import_odbc/security/ir.model.access.csv 2012-09-13 10:12:28 +0000
@@ -0,0 +1,2 @@
1id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
2access_import_odbc_dbsource_group_system,import_odbc_dbtable_group_system,model_import_odbc_dbtable,base.group_system,1,1,1,1