← Back to team overview

openerp-community team mailing list archive

lp:~openerp-community/openobject-extension/import_odbc_base_external_dbsource into lp:openobject-extension/openerp6.1-module

 

Maxime Chambreuil (http://www.savoirfairelinux.com) has proposed merging lp:~openerp-community/openobject-extension/import_odbc_base_external_dbsource into lp:openobject-extension/openerp6.1-module.

Requested reviews:
  extra-addons-commiter (extra-addons-commiter)

For more details, see:
https://code.launchpad.net/~openerp-community/openobject-extension/import_odbc_base_external_dbsource/+merge/118290

[ADD] import_odbc and base_external_dbsource from lp:~dreis-pt/addons-tko/reis
-- 
https://code.launchpad.net/~openerp-community/openobject-extension/import_odbc_base_external_dbsource/+merge/118290
Your team OpenERP Community is subscribed to branch lp:~openerp-community/openobject-extension/import_odbc_base_external_dbsource.
=== added directory 'base_external_dbsource'
=== added file 'base_external_dbsource/__init__.py'
--- base_external_dbsource/__init__.py	1970-01-01 00:00:00 +0000
+++ base_external_dbsource/__init__.py	2012-08-06 03:35:23 +0000
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+##############################################################################
+#
+#    Daniel Reis
+#    2011
+#
+#    This program is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU Affero General Public License as
+#    published by the Free Software Foundation, either version 3 of the
+#    License, or (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU Affero General Public License for more details.
+#
+#    You should have received a copy of the GNU Affero General Public License
+#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+import base_external_dbsource
+
+# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

=== added file 'base_external_dbsource/__openerp__.py'
--- base_external_dbsource/__openerp__.py	1970-01-01 00:00:00 +0000
+++ base_external_dbsource/__openerp__.py	2012-08-06 03:35:23 +0000
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+##############################################################################
+#
+#    Daniel Reis
+#    2011
+#
+#    This program is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU Affero General Public License as
+#    published by the Free Software Foundation, either version 3 of the
+#    License, or (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU Affero General Public License for more details.
+#
+#    You should have received a copy of the GNU Affero General Public License
+#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+{
+    'name': 'External Database Sources',
+    'version': '61.3',
+    'category': 'Tools',
+    'description': """
+This module allows you to define connections to foreign databases using ODBC,
+Oracle Client or SQLAlchemy.
+
+Databases sources can be configured in Settings > Configuration -> Data sources.
+
+Depending on the database, you need:
+ * to install unixodbc and python-pyodbc packages to use ODBC connections.
+ * to install FreeTDS driver (tdsodbc package) and configure it through ODBC to 
+   connect to Microsoft SQL Server.
+ * to install and configure Oracle Instant Client and cx_Oracle python library
+   to connect to Oracle.
+    """,
+    'author': 'Daniel Reis',
+    'website': 'http://launchpad.net/addons-tko',
+    'images': [
+        'images/screenshot01.png',
+    ],
+    'depends': [
+        'base',
+    ],
+    'init': [],
+    'data': [
+        'base_external_dbsource_view.xml',
+        'security/ir.model.access.csv',
+    ],
+    'demo': [],
+    'test': [], 
+    'installable': True,
+    'active': False,
+}
+
+# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

=== added file 'base_external_dbsource/base_external_dbsource.py'
--- base_external_dbsource/base_external_dbsource.py	1970-01-01 00:00:00 +0000
+++ base_external_dbsource/base_external_dbsource.py	2012-08-06 03:35:23 +0000
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+##############################################################################
+#
+#    Daniel Reis
+#    2011
+#
+#    This program is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU Affero General Public License as
+#    published by the Free Software Foundation, either version 3 of the
+#    License, or (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU Affero General Public License for more details.
+#
+#    You should have received a copy of the GNU Affero General Public License
+#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+import os
+import sys
+import datetime
+from osv import fields, osv
+from openerp.tools.translate import _
+import openerp.tools as tools
+import logging
+_logger = logging.getLogger(__name__)
+
+CONNECTORS = []
+try:
+    import sqlalchemy
+    import pymssql 
+    CONNECTORS.append( ('mssql', 'Microsoft SQL Server') )
+except:
+        _logger.info('MS SQL Server not available. Please install "slqalchemy" and "pymssql" python package.')
+
+try:
+    import sqlalchemy
+    import MySQLdb
+    CONNECTORS.append( ('mysql', 'MySQL') )
+except:
+    _logger.info('MySQL not available. Please install "slqalchemy" and "mysqldb" python package.')
+
+try:
+    import pyodbc
+    CONNECTORS.append( ('pyodbc', 'ODBC') )
+except:
+    _logger.info('ODBC libraries not available. Please install "unixodbc" and "python-pyodbc" packages.')
+
+try:
+    import cx_Oracle
+    CONNECTORS.append( ('cx_Oracle', 'Oracle') )
+except:
+    _logger.info('Oracle libraries not available. Please install "cx_Oracle" python package.')
+
+CONNECTORS.append( ('postgresql', 'PostgreSQL') )
+
+try:
+    import sqlalchemy
+    CONNECTORS.append( ('sqlite', 'SQLite') )
+except:
+    _logger.info('SQLAlchemy not available. Please install "slqalchemy" python package.')
+ 
+class base_external_dbsource(osv.osv):
+    _name="base.external.dbsource"
+    _description = 'External Database Sources'
+    _columns = {
+        'name': fields.char('Datasource name', required=True, size=64),
+        'conn_string': fields.text('Connection string', help="Microsoft SQL Server Sample: mssql+pymssql://username:%s@server:port/dbname?charset=utf8\nMySQL Sample: mysql://user:%s@server:port/dbname\nODBC Sample: DRIVER={FreeTDS};SERVER=server.address;Database=mydb;UID=sa\nORACLE Sample: username/{'view_dbsource_tree': 486}@//server.address:port/instance\nPostgreSQL Sample: dbname='template1' user='dbuser' host='localhost' port='5432'password=%s\nSQLite Sample: sqlite:///test.db"),
+        'password': fields.char('Password' , size=40),
+        'connector': fields.selection(CONNECTORS, 'Connector', required=True),
+    }
+
+    def conn_open(self, cr, uid, id1):
+        #Get dbsource record
+        data = self.browse(cr, uid, id1)
+        #Build the full connection string
+        connStr = data.conn_string
+        if data.password:
+            if '%s' not in data.conn_string:
+                connStr += ';PWD=%s'
+            connStr = connStr % data.password
+        #Try to connect
+        if data.connector == 'cx_Oracle':
+            os.environ['NLS_LANG'] = 'AMERICAN_AMERICA.UTF8'
+            conn = cx_Oracle.connect(connStr)
+        elif data.connector == 'pyodbc':
+            conn = pyodbc.connect(connStr)
+        elif data.connector in ('sqlite','mysql','mssql'):
+            conn = sqlalchemy.create_engine(connStr).connect()
+        elif data.connector == 'postgresql':
+            conn = psycopg2.connect(connStr)
+
+        return conn
+
+    def execute(self, cr, uid, ids, sqlquery, context=None):
+        data = self.browse(cr, uid, ids)
+        res = []
+        for obj in data:
+            conn = self.conn_open(cr, uid, obj.id)
+            
+            if obj.connector in ["sqlite","mysql","mssql"]:
+                res_prox = conn.execute(sqlquery)
+                for row in res_prox:
+                    res.append(row)
+            else:
+                cur = conn.cursor()
+                cur.execute(sqlquery)
+                res = cur.fetchall()
+
+            conn.close()
+        return res
+
+    def connection_test(self, cr, uid, ids, context=None):
+        for obj in self.browse(cr, uid, ids, context):
+            conn = False
+            try:
+                conn = self.conn_open(cr, uid, obj.id)
+            except Exception, e:
+                raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e))
+            finally:
+                try:
+                    if conn: conn.close()
+                except Exception:
+                    # ignored, just a consequence of the previous exception
+                    pass
+        raise osv.except_osv(_("Connection test succeeded!"), _("Everything seems properly set up!"))
+    
+base_external_dbsource()

=== added file 'base_external_dbsource/base_external_dbsource_view.xml'
--- base_external_dbsource/base_external_dbsource_view.xml	1970-01-01 00:00:00 +0000
+++ base_external_dbsource/base_external_dbsource_view.xml	2012-08-06 03:35:23 +0000
@@ -0,0 +1,53 @@
+<?xml version="1.0"?>
+<openerp>
+    <data>
+
+        <!-- DBSource -->
+
+        <record model="ir.ui.view" id="view_dbsource_tree">
+            <field name="name">base.external.dbsource.tree</field>
+            <field name="model">base.external.dbsource</field>
+            <field name="type">tree</field>
+            <field name="arch" type="xml">
+                <tree>
+                    <field name="name"/>
+		    <field name="connector"/>
+		    <field name="conn_string"/>
+                </tree>
+            </field>
+        </record>
+        
+        <record model="ir.ui.view" id="view_dbsource_form">
+            <field name="name">base.external.dbsource.form</field>
+            <field name="model">base.external.dbsource</field>
+            <field name="type">form</field>
+            <field name="arch" type="xml">
+                <form>
+                    <field name="name"/>
+                    <field name="password" password="True"/>
+                    <newline/>
+		    <field name="connector" colspan="2"/>
+                    <newline/>
+		    <field name="conn_string" colspan="4"/>
+                    <newline/>
+                    <button name="connection_test" string="Test Connection" type="object" icon="gtk-network" colspan="4"/>
+                </form>
+            </field>
+        </record>
+
+        <record model="ir.actions.act_window" id="action_dbsource">
+            <field name="name">External Database Sources</field>
+            <field name="res_model">base.external.dbsource</field>
+            <field name="view_type">form</field>
+            <field name="view_mode">tree,form</field>
+            <field name="view_id" ref="view_dbsource_tree"/>
+        </record>
+
+        <menuitem name="Sources" 
+                  id="menu_dbsource" 
+                  parent="base.next_id_15" 
+                  action="action_dbsource"/>
+
+    </data>
+</openerp>
+

=== added directory 'base_external_dbsource/images'
=== added file 'base_external_dbsource/images/screenshot01.png'
Binary files base_external_dbsource/images/screenshot01.png	1970-01-01 00:00:00 +0000 and base_external_dbsource/images/screenshot01.png	2012-08-06 03:35:23 +0000 differ
=== added directory 'base_external_dbsource/security'
=== added file 'base_external_dbsource/security/ir.model.access.csv'
--- base_external_dbsource/security/ir.model.access.csv	1970-01-01 00:00:00 +0000
+++ base_external_dbsource/security/ir.model.access.csv	2012-08-06 03:35:23 +0000
@@ -0,0 +1,2 @@
+id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
+access_base_external_dbsource_group_system,bae_external_dbsource_group_system,model_base_external_dbsource,base.group_system,1,1,1,1

=== added directory 'import_odbc'
=== added file 'import_odbc/__init__.py'
--- import_odbc/__init__.py	1970-01-01 00:00:00 +0000
+++ import_odbc/__init__.py	2012-08-06 03:35:23 +0000
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+##############################################################################
+#
+#    Daniel Reis
+#    2011
+#
+#    This program is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU Affero General Public License as
+#    published by the Free Software Foundation, either version 3 of the
+#    License, or (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU Affero General Public License for more details.
+#
+#    You should have received a copy of the GNU Affero General Public License
+#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+import import_odbc
+
+# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

=== added file 'import_odbc/__openerp__.py'
--- import_odbc/__openerp__.py	1970-01-01 00:00:00 +0000
+++ import_odbc/__openerp__.py	2012-08-06 03:35:23 +0000
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+##############################################################################
+#
+#    Daniel Reis
+#    2011
+#
+#    This program is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU Affero General Public License as
+#    published by the Free Software Foundation, either version 3 of the
+#    License, or (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU Affero General Public License for more details.
+#
+#    You should have received a copy of the GNU Affero General Public License
+#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+{
+    'name': 'Import data from ODBC sources.',
+    'version': '61.3',
+    'category': 'Tools',
+    'description': """
+Import data directly from other databases.
+
+Installed in the Administration module, menu Configuration -> Import from ODBC.
+
+Features:
+ * Fetched data from the databases are used to build lines equivalent to regular import files. These are imported using the standard "import_data()" ORM method, benefiting from all its features, including xml_ids.
+ * Each table import is defined by an SQL statement, used to build the equivalent for an import file. Each column's name should match the column names you would use in an import file. The first column must provide an unique identifier for the record, and will be used to build its xml_id.
+ * The last sync date is the last (successfull) execution. You can select only records changed since the last execution by adding a WHERE clause using this date with the representation "%s".
+ * When errors are found, only the record with the error fails import. The other correct records are commited. However, the "last sync date" will only be automaticaly updated when no errors are found.
+ * The import execution can be scheduled to run automatically.
+
+Example SQL:
+SELECT PRODUCT_CODE as "ref", PRODUCT_NAME as "name", 'res_partner_id_'+SUPPLIER_ID as "partner_id/id"
+FROM T_PRODUCTS 
+WHERE DATE_CHANGED >= %s
+    """,
+    'author': 'Daniel Reis',
+    'website': 'http://launchpad.net/addons-tko',
+    'images': [
+        'images/snapshot1.png',
+        'images/snapshot2.png',
+    ],
+    'depends': [
+        'base',
+        'base_external_dbsource',
+    ],
+    'init': [],
+    'data': [
+        'import_odbc_view.xml',
+        'security/ir.model.access.csv',
+    ],
+    'demo': [],
+    'test': [], 
+    'installable': True,
+    'active': False,
+}
+
+# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

=== added directory 'import_odbc/images'
=== added file 'import_odbc/images/snapshot1.png'
Binary files import_odbc/images/snapshot1.png	1970-01-01 00:00:00 +0000 and import_odbc/images/snapshot1.png	2012-08-06 03:35:23 +0000 differ
=== added file 'import_odbc/images/snapshot2.png'
Binary files import_odbc/images/snapshot2.png	1970-01-01 00:00:00 +0000 and import_odbc/images/snapshot2.png	2012-08-06 03:35:23 +0000 differ
=== added file 'import_odbc/import_odbc.py'
--- import_odbc/import_odbc.py	1970-01-01 00:00:00 +0000
+++ import_odbc/import_odbc.py	2012-08-06 03:35:23 +0000
@@ -0,0 +1,271 @@
+# -*- coding: utf-8 -*-
+##############################################################################
+#
+#    Daniel Reis
+#    2011
+#
+#    This program is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU Affero General Public License as
+#    published by the Free Software Foundation, either version 3 of the
+#    License, or (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU Affero General Public License for more details.
+#
+#    You should have received a copy of the GNU Affero General Public License
+#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+import os
+import sys
+import datetime
+from osv import fields, osv
+
+import logging
+_logger = logging.getLogger(__name__)
+
+class import_odbc_dbsource(osv.osv):
+    _name = "base.external.dbsource"
+    _inherit = "base.external.dbsource"
+
+    _columns = {
+        'dbtable_ids': fields.one2many('import.odbc.dbtable', 'dbsource_id', 'Import tables'),
+    }
+
+    def import_run(self, cr, uid, ids, context=None):
+        #Prepare objects to be used
+        table_obj = self.pool.get('import.odbc.dbtable')
+        #Import each selected dbsource
+        data = self.browse(cr, uid, ids)
+        for obj in data:
+            #Get list of tables
+            table_ids = [x.id for x in obj.dbtable_ids]
+            #Run import
+            table_obj.import_run( cr, uid, table_ids)
+        return True
+    
+import_odbc_dbsource()
+
+class import_odbc_dbtable(osv.osv):
+    _name="import.odbc.dbtable"
+    _description = 'Import Table Data'
+    _order = 'exec_order'
+    _columns = {
+        'name': fields.char('Datasource name', required=True, size=64),
+        'enabled': fields.boolean('Execution enabled'),
+        'dbsource_id': fields.many2one('base.external.dbsource', 'Database source', required=True),
+        'sql_source': fields.text('SQL', required=True, help='Column names must be valid "import_data" columns.'),
+        'model_target': fields.many2one('ir.model','Target object'),
+        'noupdate': fields.boolean('No updates', help="Only create new records; disable updates to existing records."),
+        'exec_order': fields.integer('Execution order', help="Defines the order to perform the import"),
+        'last_sync': fields.datetime('Last sync date', help="Datetime for the last succesfull sync. Later changes on the source may not be replicated on the destination"),
+        'start_run': fields.datetime('Time started', readonly=True),
+        'last_run': fields.datetime('Time ended', readonly=True),
+        'last_record_count': fields.integer('Last record count', readonly=True),
+        'last_error_count': fields.integer('Last error count', readonly=True),
+        'last_warn_count': fields.integer('Last warning count', readonly=True),
+        'last_log': fields.text('Last run log', readonly=True),
+        'ignore_rel_errors': fields.boolean('Ignore relationship errors', 
+            help="On error try to reimport rows ignoring relationships."),
+        'raise_import_errors': fields.boolean('Raise import errors', 
+            help="Import errors not handled, intended for debugging purposes."),
+    }
+    _defaults = {
+        'enabled': True,
+        'exec_order': 10,
+    }
+
+    #TODO: allow differnt cron jobs to run different sets of imports
+    #TODO: add field for user-friendly error report, to be used in automatic e-mail
+    #TODO: create a "clean-up" procedure, to act on (inactivate?) each record without correspondence in the SQL results 
+    #TODO: write dates in dbtable in UTC
+    
+    def import_run(self, cr, uid, ids=None, context=None):
+        #TODO: refactor - split in smaller routines!
+        def is_id_field(x): 
+            """"Detect is the column is a one2many field"""
+            return len(x)>3 and x[-3:] == ':id' or x[-3:] == '/id'
+            
+        def remove_cols(ids, cols, data):
+            """Remove ids cols and data lists"""
+            rc, rd = list(), list()
+            for c, d in zip(cols, data):
+                if c not in ids:
+                    rc.append(c)
+                    rd.append(d)
+            return rc, rd
+        
+        def safe_import(cr, uid, target_obj, colrow, datarows, noupdate, raise_import_errors=False):
+            """Import data and returns error msg or empty string"""
+            res = ''
+            if raise_import_errors:
+                target_obj.import_data(cr, uid, colrow, datarows, noupdate=obj.noupdate)
+            else:
+                try:
+                    target_obj.import_data(cr, uid, colrow, datarows, noupdate=obj.noupdate)
+                    cr.commit()
+                except:
+                    #Can't use cr.rollback() - it breaks ir.cron's lock on the job, causing duplicate spawns
+                    res = str(sys.exc_info()[1])
+            return res
+
+        def text_to_log(level, obj_id = '', msg = '', rel_id = ''):
+            if '_id_' in obj_id:
+                obj_id = '.'.join(obj_id.split('_')[:-2]) \
+                       + ': ' + obj_id.split('_')[-1]
+            if ': .' in msg and not rel_id:
+                rel_id = msg[msg.find(': .')+3:]
+                if '_id_' in rel_id:
+                    rel_id = '.'.join(rel_id.split('_')[:-2]) \
+                           + ': ' + rel_id.split('_')[-1]
+                    msg = msg[:msg.find(': .')]
+            return '%s|%s\t|%s\t|%s' % (level.ljust(5), obj_id, rel_id, msg)
+            
+        #Prepare support objects
+        dbsource_obj = self.pool.get('base.external.dbsource')
+        ###_logger.setLevel(logging.DEBUG)
+        _logger.debug('Import job STARTING...')
+        #Build id list if none is provided
+        if not ids:
+            ids = self.search(cr, uid, [('enabled', '=', True)])
+        #Sort list by exec_order
+        actions = self.read(cr, uid, ids, ['id', 'exec_order'])
+        actions.sort(key = lambda x:(x['exec_order'], x['id']))
+        #Consider each dbtable:
+        for action in actions:
+            obj = self.browse(cr, uid, action['id'])
+            #Skip if it's inactive or is running
+            if obj.enabled:
+                #Prepare log to write
+                #now() microseconds are stripped to avoid problem with SQL smalldate
+                #TODO: convert UTC Now to local timezone (http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime)
+                _logger.debug('Importing %s...' % obj.name)
+                log = { 
+                    'start_run': datetime.datetime.now().replace(microsecond=0),
+                    'last_run': None,
+                    'last_record_count': 0,
+                    'last_error_count': 0,
+                    'last_warn_count': 0,
+                    'last_log': ''
+                    }
+                self.write(cr, uid, [obj.id], log)
+                log_lines = list()
+                ignore_rel_errors = obj.ignore_rel_errors
+                raise_import_errors = obj.raise_import_errors
+                #Prepare SQL sentence; replace every "?" with the last_sync date
+                sql = obj.sql_source
+                dt  = obj.last_sync
+                params = tuple( [dt] * sql.count('?') )
+                #Open the source connection
+                conn = dbsource_obj.conn_open(cr, uid, obj.dbsource_id.id)
+                #Get source data cursor
+                db_cursor = conn.cursor()
+                db_cursor.execute(sql, params)
+                #Build column list from cursor:
+                # - exclude columns titled "None"
+                # - add an extra "id" for the xml_id
+                cols = [x[0] for x in db_cursor.description if x[0].upper() != 'NONE']
+                cols.append('id')
+                #Get destination object
+                model = obj.model_target.model
+                model_obj = self.pool.get(model)
+                #Setup prefix to use in xml_ids 
+                xml_prefix = model.replace('.', '_') + "_id_"
+                #Import each row:
+                for row in db_cursor:
+                    #Build data row; import only columns present in the "cols" list
+                    datarow = []
+                    for (i, col) in enumerate(row):
+                        if db_cursor.description[i][0] in cols:
+                            ###print '==', db_cursor.description[i][0], col
+                            ###colstr = unicode(str(col).strip(), errors='replace').encode('utf-8')
+                            ###print str(colstr)
+                            colstr = str(col).strip()
+                            ###print colstr
+                            ###if col == 'Ind\xfastria':
+                            ###    col = u'JOS\xc9'
+                            ###    colstr=col
+                            ###import pdb; pdb.set_trace()
+                            datarow.append( colstr )
+                            #TODO: Handle datetimes properly - convert from localtime to UTC!
+                    #Add "xml_id" column to row
+                    datarow.append( xml_prefix + str(row[0]).strip() )
+                    _logger.debug( datarow )
+                    #Import the row; on error, write line to the log
+                    log['last_record_count'] += 1
+                    err = safe_import(cr, uid, model_obj, cols, [datarow], obj.noupdate, raise_import_errors)
+                    #If error; retry ignoring many2one fields...
+                    if err and ignore_rel_errors:
+                        #Log a warning
+                        log_lines.append( text_to_log('WARN', datarow[-1], err ) )
+                        log['last_warn_count'] += 1
+                        #Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first)
+                        idcols = filter(is_id_field, cols)
+                        for idcol in idcols:
+                            c, d = remove_cols( [idcol], cols, datarow)
+                            err = safe_import(cr, uid, c, [d], obj.noupdate, raise_import_errors)
+                            if not err: 
+                                break
+                        #If still error; retry ignoring all ".../id" fields
+                        if err:
+                            c, d = remove_cols( idcols, cols, datarow)
+                            err = safe_import(cr, uid, c, [d], obj.noupdate, raise_import_errors)
+                    #If still error after all import tries, reject data row
+                    if err:
+                        log_lines.append( text_to_log('ERROR', datarow[-1], err ) )
+                        log['last_error_count'] += 1
+                    #Inform progress on long Imports, every 500 rows
+                    if log['last_record_count'] % 500 == 0:
+                        _logger.info('...%s rows processed...' % (log['last_record_count']) )
+
+                #Finished importing all rows
+                msg = 'Imported %s , %s rows, %s errors, %s warnings.' % (
+                    model, 
+                    log['last_record_count'], 
+                    log['last_error_count'] ,
+                    log['last_warn_count'] ) 
+                #Close the connection
+                conn.close()
+                #If no errors, write new sync date
+                if not (log['last_error_count'] or log['last_warn_count']):
+                    log['last_sync'] = log['start_run']
+                level = logging.DEBUG
+                if log['last_warn_count']: level = logging.WARN
+                if log['last_error_count']: level = logging.ERROR
+                _logger.log(level, msg)
+                #Write run log, either if the table import is active or inactive
+                if log_lines:
+                     log_lines.insert(0, text_to_log('LEVEL', '== Line ==    ','== Relationship ==','== Message =='))
+                     log.update( {'last_log': '\n'.join(log_lines)} )
+                log.update({ 'last_run': datetime.datetime.now().replace(microsecond=0) }) #second=0, 
+                self.write(cr, uid, [obj.id], log)
+                #cr.commit() #Avoid conflicts with user actions on long running imports (?)
+        #Finished
+        _logger.debug('Import job FINISHED.')
+        return True
+
+    def import_schedule(self, cr, uid, ids, context=None):
+        cron_obj = self.pool.get('ir.cron')
+        new_create_id = cron_obj.create(cr, uid, {
+            'name': 'Import ODBC tables',
+            'interval_type': 'hours',
+            'interval_number': 1, 
+            'numbercall': -1,
+            'model': 'import.odbc.dbtable',
+            'function': 'import_run', 
+            'doall': False,
+            'active': True
+            })
+        return {
+            'name': 'Import ODBC tables',
+            'view_type': 'form',
+            'view_mode': 'form,tree',
+            'res_model': 'ir.cron',
+            'res_id': new_create_id,
+            'type': 'ir.actions.act_window',
+            }
+        
+import_odbc_dbtable()

=== added file 'import_odbc/import_odbc_view.xml'
--- import_odbc/import_odbc_view.xml	1970-01-01 00:00:00 +0000
+++ import_odbc/import_odbc_view.xml	2012-08-06 03:35:23 +0000
@@ -0,0 +1,84 @@
+<?xml version="1.0"?>
+<openerp>
+    <data>
+
+        <!-- Table form -->
+
+        <record model="ir.ui.view" id="view_import_dbtable_form">
+            <field name="name">import.odbc.dbtable.form</field>
+            <field name="model">import.odbc.dbtable</field>
+            <field name="type">form</field>
+            <field name="arch" type="xml">
+	        <form>
+	            <field name="name" search="1"/>
+	            <field name="exec_order"/>
+	            <field name="model_target"/>
+	            <field name="dbsource_id" search="1"/>
+	            <field name="noupdate"/>
+	            <field name="enabled"/>
+	            <field name="ignore_rel_errors"/>
+	            <field name="raise_import_errors"/>
+                    <field name="last_sync"/>
+                    <group colspan="2">
+                        <button name="import_run" string="Run Import" type="object" icon="gtk-execute"/>
+                        <button name="import_schedule" string="Schedule Import" type="object" icon="gtk-paste"/>
+                    </group>
+	            <field name="sql_source" colspan="4"/>
+                    <separator string="Last execution" colspan="4"/>
+                    <field name="last_record_count"/>
+                    <field name="start_run"/>
+                    <field name="last_warn_count"/>
+	            <field name="last_run"/>
+                    <field name="last_error_count"/>
+                    <field name="last_log" colspan="4"/>
+                </form>
+            </field>
+        </record>
+
+        <!-- Table Tree -->
+
+        <record id="view_import_dbtable_tree" model="ir.ui.view">
+            <field name="name">import.odbc.dbtable.tree</field>
+	    <field name="model">import.odbc.dbtable</field>
+	    <field name="type">tree</field>
+	    <field name="arch" type="xml">
+                <tree colors="grey: enabled==False; red:last_error_count&gt;0; blue:last_warn_count&gt;0">
+			<field name="exec_order"/>
+			<field name="name"/>
+			<field name="model_target"/>
+			<field name="dbsource_id"/>
+			<field name="enabled"/>
+			<field name="last_run"/>
+			<field name="last_sync"/>
+			<field name="last_record_count"/>
+			<field name="last_error_count"/>
+			<field name="last_warn_count"/>
+		</tree>
+	</field>
+</record>
+
+
+<!-- Tree Search -->
+       <record id="view_import_dbtable_filter" model="ir.ui.view">
+            <field name="name">import.odbc.dbtable.filter</field>
+		<field name="model">import.odbc.dbtable</field>
+            <field name="type">search</field>
+            <field name="arch" type="xml">
+                <search string="Search ODBC Imports">
+				<field name="name"/>
+				<field name="dbsource_id"/>
+				<field name="model_target"/>
+               </search>
+            </field>
+        </record>
+
+<!--Menu-->
+    <record model="ir.actions.act_window" id="action_import_dbtable">
+      <field name="name">Import from ODBC</field>
+      <field name="res_model">import.odbc.dbtable</field>
+      <field name="view_type">form</field>
+    </record>
+    <menuitem name="Import from ODBC" id="menu_import_dbtable" parent="base.next_id_15" action="action_import_dbtable"/>
+</data>
+</openerp>
+

=== added directory 'import_odbc/security'
=== added file 'import_odbc/security/ir.model.access.csv'
--- import_odbc/security/ir.model.access.csv	1970-01-01 00:00:00 +0000
+++ import_odbc/security/ir.model.access.csv	2012-08-06 03:35:23 +0000
@@ -0,0 +1,2 @@
+id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
+access_import_odbc_dbsource_group_system,import_odbc_dbtable_group_system,model_import_odbc_dbtable,base.group_system,1,1,1,1