File:  [Repository] / ZSQLExtend / importASCII.py
Revision 1.2: download - view: text, annotated - select for diffs - revision graph
Wed Dec 5 18:42:46 2007 UTC (16 years, 5 months ago) by casties
Branches: MAIN
CVS tags: HEAD
try to improve performance

#!/usr/local/bin/python
#

import string
import logging
import sys
import types
import time

try:
    import psycopg2 as psycopg
    psyco = 2
except:
    import psycopg
    psyco = 1

version_string = "V0.1 ROC 4.12.2007"


def sql_quote(v):
    # quote dictionary
    quote_dict = {"\'": "''", "\\": "\\\\"}
    for dkey in quote_dict.keys():
        if string.find(v, dkey) >= 0:
            v=string.join(string.split(v,dkey),quote_dict[dkey])
    return "'%s'"%v

def SimpleSearch(curs,query, args=None, ascii=False, result=True):
    """execute sql query and return data"""
    #logging.debug("executing: "+query+" "+repr(args))
    if ascii:
        # encode all in UTF-8
        query = query.encode("UTF-8")
        if args is not None:
            encargs = []
            for a in args:
                if a is not None:
                    a = a.encode("UTF-8")
                encargs.append(a)
            
            args = encargs

    curs.execute(query, args)
    #logger.debug("sql done")
    if result:
        try:
            return curs.fetchall()
        except:
            return None


class TableColumn:
    """simple type for storing sql column name and type"""
    
    def __init__(self, name, type=None):
        #print "new tablecolumn(%s,%s)"%(name, type)
        self.name = name
        self.type = type
        
    def getName(self):
        return self.name
    
    def getType(self):
        if self.type is not None:
            return self.type
        else:
            return "text"

    def __str__(self):
        return self.name
    
    
class ASCII_handler:
    def __init__(self,options):
        """Handler to import text format file (separated values format) into the table.
        @param options: dict of options
        @param options.dsn: database connection string
        @param options.table: name of the table the xml shall be imported into
        @param options.filename: xmlfile filename
        @param options.update_fields: (optional) list of fields to update; default is to create all fields
        @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
        @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
        @param options.ascii_db: (optional) assume ascii encoding in db
        @param options.replace_table: (optional) delete and re-insert data
        @param options.backup_table: (optional) create backup of old table (breaks indices)
        @param options.use_logger_instance: (optional) use this instance of a logger
        """
        
        # set up logger
        if hasattr(options, 'use_logger_instance'):
            self.logger = options.use_logger_instance
        else:
            self.logger = logging.getLogger('db.import.ascii')

        
        # connect database
        self.dbCon = psycopg.connect(options.dsn)
        self.db = self.dbCon.cursor()
        assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
    
        self.table = getattr(options,"table",None)
        self.update_fields = getattr(options,"update_fields",None)
        self.id_field = getattr(options,"id_field",None)
        self.sync_mode = getattr(options,"sync_mode",None)
        self.update_mode = getattr(options,"update_mode",None)
        self.ascii_db = getattr(options,"ascii_db",None)
        self.replace_table = getattr(options,"replace_table",None)
        self.backup_table = getattr(options,"backup_table",None)

        self.logger.debug("dsn: "+repr(getattr(options,"dsn",None)))
        self.logger.debug("table: "+repr(self.table))
        self.logger.debug("update_fields: "+repr(self.update_fields))
        self.logger.debug("id_field: "+repr(self.id_field))
        self.logger.debug("sync_mode: "+repr(self.sync_mode))
        self.logger.debug("update_mode: "+repr(self.update_mode))
        self.logger.debug("ascii_db: "+repr(self.ascii_db))
        self.logger.debug("replace_table: "+repr(self.replace_table))
        self.logger.debug("backup_table: "+repr(self.backup_table))
        
        self.dbIDs = {}
        self.rowcnt = 0
        
        if self.id_field is not None:
            # prepare a list of ids for sync mode
            qstr="select %s from %s"%(self.id_field,self.table)
            for id in SimpleSearch(self.db, qstr):
                # value 0: not updated
                self.dbIDs[id[0]] = 0;
                self.rowcnt += 1
                
            self.logger.info("%d entries in DB to sync"%self.rowcnt)
        
        # map XML field names to SQL field names
        self.xml_field_map = {}
        # and vice versa
        self.sql_field_map = {}
        
        return


    def setup(self):
        """initialisation"""
        # rename table for backup
        if self.backup_table:
            self.orig_table = self.table
            self.table = self.table + "_tmp"
            # remove old temp table
            qstr = "DROP TABLE %s"%(self.table)
            try:
                self.db.execute(qstr)
            except:
                pass
            
            self.dbCon.commit()
           
            if self.id_field:
                # sync mode -- copy table
                self.logger.info("copy table %s to %s"%(self.orig_table,self.table))
                qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(self.table,self.orig_table)

            else:
                # rename table and create empty new one
                self.logger.info("create empty table %s"%(self.table))
                qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
            
            self.db.execute(qstr)
            self.dbCon.commit()
        
        # delete data from table for replace
        if self.replace_table:
            self.logger.info("delete data from table %s"%(self.table))
            qstr = "TRUNCATE TABLE %s"%(self.table)
            self.db.execute(qstr)
            self.dbCon.commit()
           
        # try to match date style with XML
        self.db.execute("set datestyle to 'german'")
        
        # get list of fields and types of db table
        qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
        self.sql_fields={}
        for f in SimpleSearch(self.db, qstr%self.table):
            n = f[0]
            t = f[1]
            #print "SQL fields: %s (%s)"%(n,t)
            self.sql_fields[n] = TableColumn(n,t)
        
        self.xml_update_list = []

        # map fields in text file
        for (k,v) in self.update_fields.items():
            # map to sql name and default text type
            self.xml_field_map[k] = v
            self.sql_field_map[v.getName()] = k
            # add to list of updateable fields (without id_field)
            if v.getName() != self.id_field:
                self.xml_update_list.append(k)
        
        if self.id_field:
            self.xml_id = self.sql_field_map[self.id_field]

        # and translate to list of xml fields
        self.xml_update_list.sort()
        
        # prepare sql statements for update
        setStr=string.join(["%s = %%s"%self.xml_field_map[f].getName() for f in self.xml_update_list], ', ')
        self.updQuery="UPDATE %s SET %s WHERE %s = %%s"%(self.table,setStr,self.id_field)
        # and insert
        fields=string.join([self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
        values=string.join(['%s' for f in self.xml_update_list], ',')
        self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
        self.logger.debug("update-query: "+self.updQuery)
        self.logger.debug("add-query: "+self.addQuery)
        return


    def parse(self, filename):
        """open file and read data"""
        self.logger.info("reading data...")
        self.rowcnt = 0

        fh = open(filename,"r")
        self.logger.debug("BEGIN RESULTSET")
        # parse line-wise
        for line in fh:
            self.handle_line(line)

        # done. Wrap up
        self.logger.debug("END RESULTSET")
        self.dbCon.commit()
        
        if self.sync_mode:
            # delete unmatched entries in db
            self.logger.info("deleting unmatched rows from db")
            delQuery = "DELETE FROM %s WHERE %s = %%s"%(self.table,self.id_field)
            for id in self.dbIDs.keys():
                # find all not-updated fields
                if self.dbIDs[id] == 0:
                    self.logger.info(" delete:"+id)
                    SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db, result=False)
                    sys.exit(1)
                    
                elif self.dbIDs[id] > 1:
                    self.logger.info(" sync: ID %s used more than once?"%id)
            
            self.dbCon.commit()
            
        # reinstate backup tables
        if self.backup_table:
            backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
            self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name))
            qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
            self.db.execute(qstr)
            self.logger.info("rename working table %s to %s"%(self.table,self.orig_table))
            qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
            self.db.execute(qstr)
            self.dbCon.commit()
        
        return

    def handle_line(self, line):
        """process single line of text data"""
        self.logger.debug("START ROW")

        content = line.split()
        self.xml_data = content
        self.rowcnt += 1
        # process collected row data
        update=False
        id_val=''
        # synchronize by id_field
        if self.id_field:
            id_val = self.xml_data[self.xml_id]
            if id_val in self.dbIDs:
                self.dbIDs[id_val] += 1
                update=True

        # collect all values
        args = []
        for fn in self.xml_update_list:
            f = self.xml_field_map[fn]
            val = self.xml_data[fn]
            type = self.sql_fields[f.getName()].getType()
            if type == "date" and len(val) == 0: 
                # empty date field
                val = None
                
            elif type == "integer" and len(val) == 0: 
                # empty int field
                val = None
                
            args.append(val)
                    
        if update:
            # update existing row (by id_field)
            # last argument is ID match
            args.append(id_val)
            self.logger.debug("update: %s = %s"%(id_val, args))
            SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db, result=False)

        elif not self.update_mode:
            # create new row
            self.logger.debug("insert: %s"%args)
            SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db, result=False)

        #self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
        if (self.rowcnt % 100) == 0:
            self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val))
            self.dbCon.commit()
            
        self.logger.debug("END ROW")
        return



def importASCII(options):
    """import simple text file (separated values) into the table.     
        @param options: dict of options
        @param options.dsn: database connection string
        @param options.table: name of the table the xml shall be imported into
        @param options.filename: textfile filename
        @param options.update_fields: list of fields to update
        @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
        @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
        @param options.replace_table: (optional) delete and re-insert data
        @param options.backup_table: (optional) create backup of old table (breaks indices)
        """

    # process list of fields into hash indexed by column number in text file
    uf = {}
    i = 0
    for f in options.update_fields.split(','):
        if f.find(':') > 0:
            (n,t) = f.split(':')
        else:
            n = f
            t = None
        
        if n:
            uf[i] = TableColumn(n,t)

        i += 1

    options.update_fields = uf
    
    if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
        logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
        return
        
    if getattr(options,'sync_mode',None) and getattr(options,'update_mode',None):
        logging.error("ABORT: sorry, you can't do both sync-mode and update-mode")
        return
        
    if not getattr(options,'id_field',None) and getattr(options,'update_mode',None):
        logging.error("ABORT: sorry, you can't do update-mode without id-field")
        return
        
    # The "parser" is our own handler
    parser = ASCII_handler(options)
    # Initialize handler
    parser.setup()
    # run the file
    parser.parse(options.filename)  
    

if __name__ == "__main__":
    from optparse import OptionParser

    opars = OptionParser()
    opars.add_option("-f", "--file", 
                     dest="filename",
                     help="text file name", metavar="FILE")
    opars.add_option("-c", "--dsn", 
                     dest="dsn", 
                     help="database connection string")
    opars.add_option("-t", "--table", 
                     dest="table", 
                     help="database table name")
    opars.add_option("--fields",
                     dest="update_fields", 
                     help="list of fields in the text file (comma separated, empty fields are not updated, sql-names)", metavar="LIST")
    opars.add_option("--id-field", default=None,
                     dest="id_field", 
                     help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
    opars.add_option("--sync", "--sync-mode", default=False, action="store_true", 
                     dest="sync_mode", 
                     help="do full sync based on id field (remove unmatched fields from db)")
    opars.add_option("--update-only", "--update-mode", default=False, action="store_true", 
                     dest="update_mode",
                     help="only update existing rows based on id field")
    opars.add_option("--ascii-db", default=False, action="store_true", 
                     dest="ascii_db", 
                     help="the SQL database stores ASCII instead of unicode")
    opars.add_option("--replace", default=False, action="store_true", 
                     dest="replace_table", 
                     help="replace table i.e. delete and re-insert data")
    opars.add_option("--backup", default=False, action="store_true", 
                     dest="backup_table", 
                     help="create backup of old table (breaks indices)")
    opars.add_option("-d", "--debug", default=False, action="store_true", 
                     dest="debug", 
                     help="debug mode (more output)")
    
    (options, args) = opars.parse_args()
    
    if (options.filename is None 
        or options.dsn is None 
        or options.update_fields is None
        or options.table is None):
        # not enough parameters
        print "importASCII "+version_string
        opars.print_help()
        sys.exit(1)
    
    if options.debug:
        loglevel = logging.DEBUG
    else:
        loglevel = logging.INFO
    
    logging.basicConfig(level=loglevel, 
                        format='%(asctime)s %(levelname)s %(message)s',
                        datefmt='%H:%M:%S')

    importASCII(options)


    


FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>