Annotation of ZSQLExtend/importFMPXML.py, revision 1.17
1.1 casties 1: #!/usr/local/bin/python
2: #
3:
4: import string
5: import logging
6: import sys
1.7 dwinter 7: import types
1.8 casties 8: import time
1.1 casties 9:
1.5 casties 10: from xml import sax
11: from amara import saxtools
12:
1.2 casties 13: try:
14: import psycopg2 as psycopg
15: psyco = 2
16: except:
17: import psycopg
18: psyco = 1
19:
1.5 casties 20: fm_ns = 'http://www.filemaker.com/fmpxmlresult'
1.1 casties 21:
1.16 casties 22: version_string = "V0.5 ROC 11.12.2007"
1.8 casties 23:
1.1 casties 24: def getTextFromNode(nodename):
25: """get the cdata content of a node"""
26: if nodename is None:
27: return ""
28: nodelist=nodename.childNodes
29: rc = ""
30: for node in nodelist:
31: if node.nodeType == node.TEXT_NODE:
32: rc = rc + node.data
33: return rc
34:
35: def sql_quote(v):
36: # quote dictionary
37: quote_dict = {"\'": "''", "\\": "\\\\"}
38: for dkey in quote_dict.keys():
39: if string.find(v, dkey) >= 0:
40: v=string.join(string.split(v,dkey),quote_dict[dkey])
1.5 casties 41: return "'%s'"%v
1.1 casties 42:
1.8 casties 43: def SimpleSearch(curs,query, args=None, ascii=False):
1.1 casties 44: """execute sql query and return data"""
1.15 casties 45: #logger.debug("executing: "+query)
1.8 casties 46: if ascii:
47: # encode all in UTF-8
1.2 casties 48: query = query.encode("UTF-8")
1.8 casties 49: if args is not None:
50: encargs = []
51: for a in args:
1.17 ! casties 52: if a is not None and isinstance(a, str):
1.8 casties 53: a = a.encode("UTF-8")
54: encargs.append(a)
55:
56: args = encargs
1.7 dwinter 57:
1.1 casties 58: curs.execute(query, args)
1.15 casties 59: #logger.debug("sql done")
1.4 casties 60: try:
61: return curs.fetchall()
62: except:
63: return None
1.1 casties 64:
65:
1.8 casties 66: class TableColumn:
67: """simple type for storing sql column name and type"""
68:
69: def __init__(self, name, type=None):
70: #print "new tablecolumn(%s,%s)"%(name, type)
71: self.name = name
72: self.type = type
73:
74: def getName(self):
75: return self.name
76:
77: def getType(self):
78: if self.type is not None:
79: return self.type
80: else:
81: return "text"
1.5 casties 82:
1.8 casties 83: def __str__(self):
84: return self.name
85:
86:
1.5 casties 87: class xml_handler:
1.8 casties 88: def __init__(self,options):
1.9 dwinter 89: """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
1.8 casties 90: @param options: dict of options
91: @param options.dsn: database connection string
92: @param options.table: name of the table the xml shall be imported into
93: @param options.filename: xmlfile filename
94: @param options.update_fields: (optional) list of fields to update; default is to create all fields
95: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
96: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
97: @param options.lc_names: (optional) lower case and clean up field names from XML
98: @param options.keep_fields: (optional) don't add fields to SQL database
99: @param options.ascii_db: (optional) assume ascii encoding in db
100: @param options.replace_table: (optional) delete and re-insert data
1.14 casties 101: @param options.backup_table: (optional) create backup of old table (breaks indices)
1.15 casties 102: @param options.use_logger_instance: (optional) use this instance of a logger
1.9 dwinter 103: """
104:
1.15 casties 105: # set up logger
106: if hasattr(options, 'use_logger_instance'):
107: self.logger = options.use_logger_instance
108: else:
109: self.logger = logging.getLogger('db.import.fmpxml')
110:
111:
1.5 casties 112: # set up parser
113: self.event = None
114: self.top_dispatcher = {
115: (saxtools.START_ELEMENT, fm_ns, u'METADATA'):
116: self.handle_meta_fields,
117: (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'):
1.8 casties 118: self.handle_data_fields,
1.5 casties 119: }
120:
121: # connect database
1.8 casties 122: self.dbCon = psycopg.connect(options.dsn)
1.5 casties 123: self.db = self.dbCon.cursor()
1.8 casties 124: assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
1.1 casties 125:
1.9 dwinter 126: self.table = getattr(options,"table",None)
127: self.update_fields = getattr(options,"update_fields",None)
128: self.id_field = getattr(options,"id_field",None)
129: self.sync_mode = getattr(options,"sync_mode",None)
130: self.lc_names = getattr(options,"lc_names",None)
131: self.keep_fields = getattr(options,"keep_fields",None)
132: self.ascii_db = getattr(options,"ascii_db",None)
133: self.replace_table = getattr(options,"replace_table",None)
134: self.backup_table = getattr(options,"backup_table",None)
1.8 casties 135:
1.15 casties 136: self.logger.debug("dsn: "+repr(getattr(options,"dsn",None)))
137: self.logger.debug("table: "+repr(self.table))
138: self.logger.debug("update_fields: "+repr(self.update_fields))
139: self.logger.debug("id_field: "+repr(self.id_field))
140: self.logger.debug("sync_mode: "+repr(self.sync_mode))
141: self.logger.debug("lc_names: "+repr(self.lc_names))
142: self.logger.debug("keep_fields: "+repr(self.keep_fields))
143: self.logger.debug("ascii_db: "+repr(self.ascii_db))
144: self.logger.debug("replace_table: "+repr(self.replace_table))
145: self.logger.debug("backup_table: "+repr(self.backup_table))
1.5 casties 146:
147: self.dbIDs = {}
148: self.rowcnt = 0
1.8 casties 149:
150: if self.id_field is not None:
1.5 casties 151: # prepare a list of ids for sync mode
1.8 casties 152: qstr="select %s from %s"%(self.id_field,self.table)
1.5 casties 153: for id in SimpleSearch(self.db, qstr):
154: # value 0: not updated
155: self.dbIDs[id[0]] = 0;
156: self.rowcnt += 1
157:
1.15 casties 158: self.logger.info("%d entries in DB to sync"%self.rowcnt)
1.5 casties 159:
1.8 casties 160: # names of fields in XML file
161: self.xml_field_names = []
162: # map XML field names to SQL field names
163: self.xml_field_map = {}
164: # and vice versa
165: self.sql_field_map = {}
1.5 casties 166:
167: return
168:
169: def handle_meta_fields(self, end_condition):
170: dispatcher = {
171: (saxtools.START_ELEMENT, fm_ns, u'FIELD'):
172: self.handle_meta_field,
173: }
174: #First round through the generator corresponds to the
175: #start element event
1.15 casties 176: self.logger.info("reading metadata...")
177: self.logger.debug("START METADATA")
1.5 casties 178: yield None
1.1 casties 179:
1.5 casties 180: #delegate is a generator that handles all the events "within"
181: #this element
182: delegate = None
183: while not self.event == end_condition:
184: delegate = saxtools.tenorsax.event_loop_body(
185: dispatcher, delegate, self.event)
186: yield None
187:
188: #Element closed. Wrap up
1.15 casties 189: self.logger.debug("END METADATA")
1.8 casties 190:
191: # rename table for backup
192: if self.backup_table:
193: self.orig_table = self.table
1.16 casties 194: self.tmp_table = self.table + "_tmp"
195: backup_name = "%s_%s"%(self.table,time.strftime('%Y_%m_%d_%H_%M_%S'))
196:
1.8 casties 197: # remove old temp table
1.16 casties 198: qstr = "DROP TABLE %s"%(self.tmp_table)
1.8 casties 199: try:
200: self.db.execute(qstr)
201: except:
202: pass
203:
204: self.dbCon.commit()
205:
206: if self.id_field:
1.16 casties 207: # sync mode -- copy backup table, update current table
208: self.logger.info("copy table %s to %s"%(self.table,backup_name))
209: qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(backup_name,self.table)
1.8 casties 210:
211: else:
1.16 casties 212: # replace mode -- create empty tmp table, insert into tmp table
213: self.table = self.tmp_table
1.15 casties 214: self.logger.info("create empty table %s"%(self.table))
1.8 casties 215: qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
216:
217: self.db.execute(qstr)
218: self.dbCon.commit()
219:
220: # delete data from table for replace
221: if self.replace_table:
1.15 casties 222: self.logger.info("delete data from table %s"%(self.table))
1.8 casties 223: qstr = "TRUNCATE TABLE %s"%(self.table)
224: self.db.execute(qstr)
225: self.dbCon.commit()
226:
227: # try to match date style with XML
228: self.db.execute("set datestyle to 'german'")
229:
1.15 casties 230: #self.logger.debug("xml-fieldnames:"+repr(self.xml_field_names))
1.8 casties 231: # get list of fields and types of db table
232: qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
233: self.sql_fields={}
234: for f in SimpleSearch(self.db, qstr%self.table):
235: n = f[0]
236: t = f[1]
237: #print "SQL fields: %s (%s)"%(n,t)
238: self.sql_fields[n] = TableColumn(n,t)
239:
1.17 ! casties 240: # translate id_field (SQL-name) to XML-name
! 241: self.xml_id = self.sql_field_map.get(self.id_field, None)
! 242: # get type of id_field
! 243: if self.id_field:
! 244: self.id_type = self.sql_fields[self.id_field].getType()
! 245: else:
! 246: self.id_type = None
! 247:
1.8 casties 248: # check fields to update
1.5 casties 249: if self.update_fields is None:
1.8 casties 250: if self.keep_fields:
1.13 casties 251: # update all existing fields from sql (when they are in the xml file)
252: self.update_fields = {}
253: for f in self.sql_fields.keys():
254: if self.sql_field_map.has_key(f):
255: xf = self.sql_field_map[f]
256: self.update_fields[f] = self.xml_field_map[xf]
257:
1.8 casties 258: else:
259: # update all fields
260: if self.lc_names:
261: # create dict with sql names
262: self.update_fields = {}
263: for f in self.xml_field_map.values():
264: self.update_fields[f.getName()] = f
265:
266: else:
267: self.update_fields = self.xml_field_map
1.17 ! casties 268:
1.8 casties 269: # and translate to list of xml fields
270: if self.lc_names:
271: self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields]
272: else:
273: self.xml_update_list = self.update_fields.keys()
1.17 ! casties 274:
1.8 casties 275: if not self.keep_fields:
276: # adjust db table to fields in XML and update_fields
277: for f in self.xml_field_map.values():
1.15 casties 278: self.logger.debug("sync-fieldname: %s"%f.getName())
1.8 casties 279: sf = self.sql_fields.get(f.getName(), None)
280: uf = self.update_fields.get(f.getName(), None)
281: if sf is not None:
282: # name in db -- check type
283: if f.getType() != sf.getType():
1.15 casties 284: self.logger.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType()))
1.8 casties 285: elif uf is not None:
286: # add field to table
287: qstr="alter table %s add %s %s"%(self.table,uf.getName(),uf.getType())
1.15 casties 288: self.logger.info("db add field:"+qstr)
1.8 casties 289:
290: if self.ascii_db and type(qstr)==types.UnicodeType:
291: qstr=qstr.encode('utf-8')
292:
293: self.db.execute(qstr)
294: self.dbCon.commit()
1.7 dwinter 295:
1.17 ! casties 296: # prepare sql statements for update (do not update id_field)
! 297: setStr=string.join(["%s = %%s"%self.xml_field_map[f] for f in self.xml_update_list if f != self.xml_id], ', ')
1.5 casties 298: self.updQuery="UPDATE %s SET %s WHERE %s = %%s"%(self.table,setStr,self.id_field)
299: # and insert
1.8 casties 300: fields=string.join([self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
301: values=string.join(['%s' for f in self.xml_update_list], ',')
1.5 casties 302: self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
1.15 casties 303: self.logger.debug("update-query: "+self.updQuery)
304: self.logger.debug("add-query: "+self.addQuery)
1.5 casties 305: return
306:
307: def handle_meta_field(self, end_condition):
308: name = self.params.get((None, u'NAME'))
309: yield None
310: #Element closed. Wrap up
1.8 casties 311: if self.lc_names:
312: # clean name
313: sqlname = name.replace(" ","_").lower()
314: else:
315: sqlname = name
316: self.xml_field_names.append(name)
317: # map to sql name and default text type
318: self.xml_field_map[name] = TableColumn(sqlname, 'text')
319: self.sql_field_map[sqlname] = name
1.15 casties 320: self.logger.debug("FIELD name: "+name)
1.5 casties 321: return
322:
1.8 casties 323: def handle_data_fields(self, end_condition):
1.5 casties 324: dispatcher = {
325: (saxtools.START_ELEMENT, fm_ns, u'ROW'):
326: self.handle_row,
327: }
328: #First round through the generator corresponds to the
329: #start element event
1.15 casties 330: self.logger.info("reading data...")
331: self.logger.debug("START RESULTSET")
1.5 casties 332: self.rowcnt = 0
333: yield None
1.1 casties 334:
1.5 casties 335: #delegate is a generator that handles all the events "within"
336: #this element
337: delegate = None
338: while not self.event == end_condition:
339: delegate = saxtools.tenorsax.event_loop_body(
340: dispatcher, delegate, self.event)
341: yield None
342:
343: #Element closed. Wrap up
1.15 casties 344: self.logger.debug("END RESULTSET")
1.5 casties 345: self.dbCon.commit()
1.1 casties 346:
1.5 casties 347: if self.sync_mode:
348: # delete unmatched entries in db
1.15 casties 349: self.logger.info("deleting unmatched rows from db")
1.6 casties 350: delQuery = "DELETE FROM %s WHERE %s = %%s"%(self.table,self.id_field)
1.5 casties 351: for id in self.dbIDs.keys():
352: # find all not-updated fields
353: if self.dbIDs[id] == 0:
1.15 casties 354: self.logger.info(" delete:"+id)
1.8 casties 355: SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db)
1.6 casties 356: sys.exit(1)
1.1 casties 357:
1.5 casties 358: elif self.dbIDs[id] > 1:
1.15 casties 359: self.logger.info(" sync: ID %s used more than once?"%id)
1.8 casties 360:
361: self.dbCon.commit()
1.1 casties 362:
1.8 casties 363: # reinstate backup tables
1.16 casties 364: if self.backup_table and not self.id_field:
1.8 casties 365: backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
1.15 casties 366: self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name))
1.8 casties 367: qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
368: self.db.execute(qstr)
1.15 casties 369: self.logger.info("rename working table %s to %s"%(self.table,self.orig_table))
1.8 casties 370: qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
371: self.db.execute(qstr)
1.5 casties 372: self.dbCon.commit()
1.1 casties 373:
1.5 casties 374: return
375:
376: def handle_row(self, end_condition):
377: dispatcher = {
378: (saxtools.START_ELEMENT, fm_ns, u'COL'):
379: self.handle_col,
380: }
1.15 casties 381: self.logger.debug("START ROW")
1.8 casties 382: self.xml_data = {}
1.5 casties 383: self.colIdx = 0
384: yield None
1.1 casties 385:
1.5 casties 386: #delegate is a generator that handles all the events "within"
387: #this element
388: delegate = None
389: while not self.event == end_condition:
390: delegate = saxtools.tenorsax.event_loop_body(
391: dispatcher, delegate, self.event)
392: yield None
393:
394: #Element closed. Wrap up
1.15 casties 395: self.logger.debug("END ROW")
1.5 casties 396: self.rowcnt += 1
397: # process collected row data
398: update=False
399: id_val=''
400: # synchronize by id_field
401: if self.id_field:
1.17 ! casties 402: if self.id_type == 'integer':
! 403: id_val = int(self.xml_data[self.xml_id])
! 404: else:
! 405: id_val = self.xml_data[self.xml_id]
! 406:
1.5 casties 407: if id_val in self.dbIDs:
408: self.dbIDs[id_val] += 1
409: update=True
1.8 casties 410:
411: # collect all values
412: args = []
413: for fn in self.xml_update_list:
1.17 ! casties 414: # do not update id_field
! 415: if self.id_field and fn == self.xml_id:
! 416: continue
! 417:
1.8 casties 418: f = self.xml_field_map[fn]
419: val = self.xml_data[fn]
420: type = self.sql_fields[f.getName()].getType()
421: if type == "date" and len(val) == 0:
422: # empty date field
423: val = None
424:
425: elif type == "integer" and len(val) == 0:
426: # empty int field
427: val = None
428:
429: args.append(val)
430:
1.5 casties 431: if update:
432: # update existing row (by id_field)
1.8 casties 433: # last argument is ID match
1.5 casties 434: args.append(id_val)
1.15 casties 435: self.logger.debug("update: %s = %s"%(id_val, args))
1.8 casties 436: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
437:
1.5 casties 438: else:
439: # create new row
1.15 casties 440: self.logger.debug("insert: %s"%args)
1.17 ! casties 441: sys.exit(1)
1.8 casties 442: SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db)
1.5 casties 443:
1.15 casties 444: #self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
445: if (self.rowcnt % 100) == 0:
446: self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val))
1.5 casties 447: self.dbCon.commit()
448:
449: return
450:
451: def handle_col(self, end_condition):
452: dispatcher = {
453: (saxtools.START_ELEMENT, fm_ns, u'DATA'):
454: self.handle_data_tag,
455: }
456: #print "START COL"
457: yield None
458: #delegate is a generator that handles all the events "within"
459: #this element
460: delegate = None
461: while not self.event == end_condition:
462: delegate = saxtools.tenorsax.event_loop_body(
463: dispatcher, delegate, self.event)
464: yield None
465: #Element closed. Wrap up
466: #print "END COL"
467: self.colIdx += 1
468: return
469:
470: def handle_data_tag(self, end_condition):
471: #print "START DATA"
472: content = u''
473: yield None
474: # gather child elements
475: while not self.event == end_condition:
476: if self.event[0] == saxtools.CHARACTER_DATA:
477: content += self.params
478: yield None
479: #Element closed. Wrap up
1.8 casties 480: fn = self.xml_field_names[self.colIdx]
481: self.xml_data[fn] = content
1.5 casties 482: return
483:
484:
1.11 dwinter 485: def importFMPXML(options):
1.14 casties 486: """import FileMaker XML file (FMPXMLRESULT format) into the table.
1.11 dwinter 487: @param options: dict of options
488: @param options.dsn: database connection string
489: @param options.table: name of the table the xml shall be imported into
490: @param options.filename: xmlfile filename
491: @param options.update_fields: (optional) list of fields to update; default is to create all fields
492: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
493: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
494: @param options.lc_names: (optional) lower case and clean up field names from XML
495: @param options.keep_fields: (optional) don't add fields to SQL database
496: @param options.ascii_db: (optional) assume ascii encoding in db
497: @param options.replace_table: (optional) delete and re-insert data
1.16 casties 498: @param options.backup_table: (optional) create backup of old table
1.11 dwinter 499: """
1.15 casties 500:
1.11 dwinter 501: if getattr(options,'update_fields',None):
502: uf = {}
503: for f in options.update_fields.split(','):
1.12 casties 504: if f.find(':') > 0:
505: (n,t) = f.split(':')
506: else:
507: n = f
508: t = None
1.11 dwinter 509: uf[n] = TableColumn(n,t)
510:
511: options.update_fields = uf
512:
513: if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
514: logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
1.15 casties 515: return
1.11 dwinter 516:
517: parser = sax.make_parser()
518: #The "consumer" is our own handler
519: consumer = xml_handler(options)
520: #Initialize Tenorsax with handler
521: handler = saxtools.tenorsax(consumer)
522: #Resulting tenorsax instance is the SAX handler
523: parser.setContentHandler(handler)
524: parser.setFeature(sax.handler.feature_namespaces, 1)
525: parser.parse(options.filename)
526:
1.1 casties 527:
1.9 dwinter 528: if __name__ == "__main__":
529: from optparse import OptionParser
1.5 casties 530:
1.9 dwinter 531: opars = OptionParser()
532: opars.add_option("-f", "--file",
533: dest="filename",
534: help="FMPXML file name", metavar="FILE")
535: opars.add_option("-c", "--dsn",
536: dest="dsn",
537: help="database connection string")
538: opars.add_option("-t", "--table",
539: dest="table",
540: help="database table name")
541: opars.add_option("--fields", default=None,
542: dest="update_fields",
543: help="list of fields to update (comma separated, sql-names)", metavar="LIST")
544: opars.add_option("--id-field", default=None,
545: dest="id_field",
546: help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
547: opars.add_option("--sync", "--sync-mode", default=False, action="store_true",
548: dest="sync_mode",
549: help="do full sync based on id field (remove unmatched fields from db)")
550: opars.add_option("--lc-names", default=False, action="store_true",
551: dest="lc_names",
552: help="clean and lower case field names from XML")
553: opars.add_option("--keep-fields", default=False, action="store_true",
554: dest="keep_fields",
555: help="don't add fields from XML to SQL table")
556: opars.add_option("--ascii-db", default=False, action="store_true",
557: dest="ascii_db",
558: help="the SQL database stores ASCII instead of unicode")
559: opars.add_option("--replace", default=False, action="store_true",
560: dest="replace_table",
561: help="replace table i.e. delete and re-insert data")
562: opars.add_option("--backup", default=False, action="store_true",
563: dest="backup_table",
1.16 casties 564: help="create backup of old table")
1.9 dwinter 565: opars.add_option("-d", "--debug", default=False, action="store_true",
566: dest="debug",
567: help="debug mode (more output)")
568:
569: (options, args) = opars.parse_args()
570:
571: if len(sys.argv) < 2 or options.filename is None or options.dsn is None:
572: print "importFMPXML "+version_string
573: opars.print_help()
574: sys.exit(1)
575:
576: if options.debug:
577: loglevel = logging.DEBUG
578: else:
579: loglevel = logging.INFO
580:
581: logging.basicConfig(level=loglevel,
582: format='%(asctime)s %(levelname)s %(message)s',
583: datefmt='%H:%M:%S')
584:
1.11 dwinter 585: importFMPXML(options)
586:
1.9 dwinter 587:
588:
1.1 casties 589:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>