#!/usr/local/bin/python # import string import logging import sys import types import time from xml import sax from amara import saxtools try: import psycopg2 as psycopg psyco = 2 except: import psycopg psyco = 1 fm_ns = 'http://www.filemaker.com/fmpxmlresult' version_string = "V0.4 ROC 29.3.2007" def getTextFromNode(nodename): """get the cdata content of a node""" if nodename is None: return "" nodelist=nodename.childNodes rc = "" for node in nodelist: if node.nodeType == node.TEXT_NODE: rc = rc + node.data return rc def sql_quote(v): # quote dictionary quote_dict = {"\'": "''", "\\": "\\\\"} for dkey in quote_dict.keys(): if string.find(v, dkey) >= 0: v=string.join(string.split(v,dkey),quote_dict[dkey]) return "'%s'"%v def SimpleSearch(curs,query, args=None, ascii=False): """execute sql query and return data""" #logging.debug("executing: "+query) if ascii: # encode all in UTF-8 query = query.encode("UTF-8") if args is not None: encargs = [] for a in args: if a is not None: a = a.encode("UTF-8") encargs.append(a) args = encargs curs.execute(query, args) #logging.debug("sql done") try: return curs.fetchall() except: return None class TableColumn: """simple type for storing sql column name and type""" def __init__(self, name, type=None): #print "new tablecolumn(%s,%s)"%(name, type) self.name = name self.type = type def getName(self): return self.name def getType(self): if self.type is not None: return self.type else: return "text" def __str__(self): return self.name class xml_handler: def __init__(self,options): """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table. @param options: dict of options @param options.dsn: database connection string @param options.table: name of the table the xml shall be imported into @param options.filename: xmlfile filename @param options.update_fields: (optional) list of fields to update; default is to create all fields @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes. @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file @param options.lc_names: (optional) lower case and clean up field names from XML @param options.keep_fields: (optional) don't add fields to SQL database @param options.ascii_db: (optional) assume ascii encoding in db @param options.replace_table: (optional) delete and re-insert data """ # set up parser self.event = None self.top_dispatcher = { (saxtools.START_ELEMENT, fm_ns, u'METADATA'): self.handle_meta_fields, (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'): self.handle_data_fields, } # connect database self.dbCon = psycopg.connect(options.dsn) self.db = self.dbCon.cursor() assert self.db, "AIIEE no db cursor for %s!!"%options.dsn self.table = getattr(options,"table",None) self.update_fields = getattr(options,"update_fields",None) self.id_field = getattr(options,"id_field",None) self.sync_mode = getattr(options,"sync_mode",None) self.lc_names = getattr(options,"lc_names",None) self.keep_fields = getattr(options,"keep_fields",None) self.ascii_db = getattr(options,"ascii_db",None) self.replace_table = getattr(options,"replace_table",None) self.backup_table = getattr(options,"backup_table",None) logging.debug("dsn: "+repr(getattr(options,"dsn",None))) logging.debug("table: "+repr(self.table)) logging.debug("update_fields: "+repr(self.update_fields)) logging.debug("id_field: "+repr(self.id_field)) logging.debug("sync_mode: "+repr(self.sync_mode)) logging.debug("lc_names: "+repr(self.lc_names)) logging.debug("keep_fields: "+repr(self.keep_fields)) logging.debug("ascii_db: "+repr(self.ascii_db)) logging.debug("replace_table: "+repr(self.replace_table)) self.dbIDs = {} self.rowcnt = 0 if self.id_field is not None: # prepare a list of ids for sync mode qstr="select %s from %s"%(self.id_field,self.table) for id in SimpleSearch(self.db, qstr): # value 0: not updated self.dbIDs[id[0]] = 0; self.rowcnt += 1 logging.info("%d entries in DB to sync"%self.rowcnt) # names of fields in XML file self.xml_field_names = [] # map XML field names to SQL field names self.xml_field_map = {} # and vice versa self.sql_field_map = {} return def handle_meta_fields(self, end_condition): dispatcher = { (saxtools.START_ELEMENT, fm_ns, u'FIELD'): self.handle_meta_field, } #First round through the generator corresponds to the #start element event logging.debug("START METADATA") yield None #delegate is a generator that handles all the events "within" #this element delegate = None while not self.event == end_condition: delegate = saxtools.tenorsax.event_loop_body( dispatcher, delegate, self.event) yield None #Element closed. Wrap up logging.debug("END METADATA") # rename table for backup if self.backup_table: self.orig_table = self.table self.table = self.table + "_tmp" # remove old temp table qstr = "DROP TABLE %s"%(self.table) try: self.db.execute(qstr) except: pass self.dbCon.commit() if self.id_field: # sync mode -- copy table logging.info("copy table %s to %s"%(self.orig_table,self.table)) qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(self.table,self.orig_table) else: # rename table and create empty new one logging.info("create empty table %s"%(self.table)) qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table) self.db.execute(qstr) self.dbCon.commit() # delete data from table for replace if self.replace_table: logging.info("delete data from table %s"%(self.table)) qstr = "TRUNCATE TABLE %s"%(self.table) self.db.execute(qstr) self.dbCon.commit() # try to match date style with XML self.db.execute("set datestyle to 'german'") # translate id_field (SQL-name) to XML-name self.xml_id = self.sql_field_map.get(self.id_field, None) #logging.debug("xml-fieldnames:"+repr(self.xml_field_names)) # get list of fields and types of db table qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'" self.sql_fields={} for f in SimpleSearch(self.db, qstr%self.table): n = f[0] t = f[1] #print "SQL fields: %s (%s)"%(n,t) self.sql_fields[n] = TableColumn(n,t) # check fields to update if self.update_fields is None: if self.keep_fields: # update existing fields self.update_fields = self.sql_fields else: # update all fields if self.lc_names: # create dict with sql names self.update_fields = {} for f in self.xml_field_map.values(): self.update_fields[f.getName()] = f else: self.update_fields = self.xml_field_map # and translate to list of xml fields if self.lc_names: self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields] else: self.xml_update_list = self.update_fields.keys() if not self.keep_fields: # adjust db table to fields in XML and update_fields for f in self.xml_field_map.values(): logging.debug("sync-fieldname: %s"%f.getName()) sf = self.sql_fields.get(f.getName(), None) uf = self.update_fields.get(f.getName(), None) if sf is not None: # name in db -- check type if f.getType() != sf.getType(): logging.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType())) elif uf is not None: # add field to table qstr="alter table %s add %s %s"%(self.table,uf.getName(),uf.getType()) logging.info("db add field:"+qstr) if self.ascii_db and type(qstr)==types.UnicodeType: qstr=qstr.encode('utf-8') self.db.execute(qstr) self.dbCon.commit() # prepare sql statements for update setStr=string.join(["%s = %%s"%self.xml_field_map[f] for f in self.xml_update_list], ', ') self.updQuery="UPDATE %s SET %s WHERE %s = %%s"%(self.table,setStr,self.id_field) # and insert fields=string.join([self.xml_field_map[x].getName() for x in self.xml_update_list], ',') values=string.join(['%s' for f in self.xml_update_list], ',') self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values) logging.debug("update-query: "+self.updQuery) logging.debug("add-query: "+self.addQuery) return def handle_meta_field(self, end_condition): name = self.params.get((None, u'NAME')) yield None #Element closed. Wrap up if self.lc_names: # clean name sqlname = name.replace(" ","_").lower() else: sqlname = name self.xml_field_names.append(name) # map to sql name and default text type self.xml_field_map[name] = TableColumn(sqlname, 'text') self.sql_field_map[sqlname] = name logging.debug("FIELD name: "+name) return def handle_data_fields(self, end_condition): dispatcher = { (saxtools.START_ELEMENT, fm_ns, u'ROW'): self.handle_row, } #First round through the generator corresponds to the #start element event logging.debug("START RESULTSET") self.rowcnt = 0 yield None #delegate is a generator that handles all the events "within" #this element delegate = None while not self.event == end_condition: delegate = saxtools.tenorsax.event_loop_body( dispatcher, delegate, self.event) yield None #Element closed. Wrap up logging.debug("END RESULTSET") self.dbCon.commit() if self.sync_mode: # delete unmatched entries in db logging.info("deleting unmatched rows from db") delQuery = "DELETE FROM %s WHERE %s = %%s"%(self.table,self.id_field) for id in self.dbIDs.keys(): # find all not-updated fields if self.dbIDs[id] == 0: logging.info(" delete:"+id) SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db) sys.exit(1) elif self.dbIDs[id] > 1: logging.info(" sync: ID %s used more than once?"%id) self.dbCon.commit() # reinstate backup tables if self.backup_table: backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S')) logging.info("rename backup table %s to %s"%(self.orig_table,backup_name)) qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name) self.db.execute(qstr) logging.info("rename working table %s to %s"%(self.table,self.orig_table)) qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table) self.db.execute(qstr) self.dbCon.commit() return def handle_row(self, end_condition): dispatcher = { (saxtools.START_ELEMENT, fm_ns, u'COL'): self.handle_col, } logging.debug("START ROW") self.xml_data = {} self.colIdx = 0 yield None #delegate is a generator that handles all the events "within" #this element delegate = None while not self.event == end_condition: delegate = saxtools.tenorsax.event_loop_body( dispatcher, delegate, self.event) yield None #Element closed. Wrap up logging.debug("END ROW") self.rowcnt += 1 # process collected row data update=False id_val='' # synchronize by id_field if self.id_field: id_val = self.xml_data[self.xml_id] if id_val in self.dbIDs: self.dbIDs[id_val] += 1 update=True # collect all values args = [] for fn in self.xml_update_list: f = self.xml_field_map[fn] val = self.xml_data[fn] type = self.sql_fields[f.getName()].getType() if type == "date" and len(val) == 0: # empty date field val = None elif type == "integer" and len(val) == 0: # empty int field val = None args.append(val) if update: # update existing row (by id_field) # last argument is ID match args.append(id_val) logging.debug("update: %s = %s"%(id_val, args)) SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db) else: # create new row logging.debug("insert: %s"%args) SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db) #logging.info(" row:"+"%d (%s)"%(self.rowcnt,id_val)) if (self.rowcnt % 10) == 0: logging.info(" row:"+"%d (%s)"%(self.rowcnt,id_val)) self.dbCon.commit() return def handle_col(self, end_condition): dispatcher = { (saxtools.START_ELEMENT, fm_ns, u'DATA'): self.handle_data_tag, } #print "START COL" yield None #delegate is a generator that handles all the events "within" #this element delegate = None while not self.event == end_condition: delegate = saxtools.tenorsax.event_loop_body( dispatcher, delegate, self.event) yield None #Element closed. Wrap up #print "END COL" self.colIdx += 1 return def handle_data_tag(self, end_condition): #print "START DATA" content = u'' yield None # gather child elements while not self.event == end_condition: if self.event[0] == saxtools.CHARACTER_DATA: content += self.params yield None #Element closed. Wrap up fn = self.xml_field_names[self.colIdx] self.xml_data[fn] = content return def importFMPXML(options): """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table. @param options: dict of options @param options.dsn: database connection string @param options.table: name of the table the xml shall be imported into @param options.filename: xmlfile filename @param options.update_fields: (optional) list of fields to update; default is to create all fields @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes. @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file @param options.lc_names: (optional) lower case and clean up field names from XML @param options.keep_fields: (optional) don't add fields to SQL database @param options.ascii_db: (optional) assume ascii encoding in db @param options.replace_table: (optional) delete and re-insert data """ if getattr(options,'update_fields',None): uf = {} for f in options.update_fields.split(','): if f.find(':') > 0: (n,t) = f.split(':') else: n = f t = None uf[n] = TableColumn(n,t) options.update_fields = uf if getattr(options,'id_field',None) and getattr(options,'replace_table',None): logging.error("ABORT: sorry, you can't do both sync (id_field) and replace") sys.exit(1) parser = sax.make_parser() #The "consumer" is our own handler consumer = xml_handler(options) #Initialize Tenorsax with handler handler = saxtools.tenorsax(consumer) #Resulting tenorsax instance is the SAX handler parser.setContentHandler(handler) parser.setFeature(sax.handler.feature_namespaces, 1) parser.parse(options.filename) if __name__ == "__main__": from optparse import OptionParser opars = OptionParser() opars.add_option("-f", "--file", dest="filename", help="FMPXML file name", metavar="FILE") opars.add_option("-c", "--dsn", dest="dsn", help="database connection string") opars.add_option("-t", "--table", dest="table", help="database table name") opars.add_option("--fields", default=None, dest="update_fields", help="list of fields to update (comma separated, sql-names)", metavar="LIST") opars.add_option("--id-field", default=None, dest="id_field", help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME") opars.add_option("--sync", "--sync-mode", default=False, action="store_true", dest="sync_mode", help="do full sync based on id field (remove unmatched fields from db)") opars.add_option("--lc-names", default=False, action="store_true", dest="lc_names", help="clean and lower case field names from XML") opars.add_option("--keep-fields", default=False, action="store_true", dest="keep_fields", help="don't add fields from XML to SQL table") opars.add_option("--ascii-db", default=False, action="store_true", dest="ascii_db", help="the SQL database stores ASCII instead of unicode") opars.add_option("--replace", default=False, action="store_true", dest="replace_table", help="replace table i.e. delete and re-insert data") opars.add_option("--backup", default=False, action="store_true", dest="backup_table", help="create backup of old table (breaks indices)") opars.add_option("-d", "--debug", default=False, action="store_true", dest="debug", help="debug mode (more output)") (options, args) = opars.parse_args() if len(sys.argv) < 2 or options.filename is None or options.dsn is None: print "importFMPXML "+version_string opars.print_help() sys.exit(1) if options.debug: loglevel = logging.DEBUG else: loglevel = logging.INFO logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s', datefmt='%H:%M:%S') importFMPXML(options)