Annotation of ZSQLExtend/importFMPXML.py, revision 1.13
1.1 casties 1: #!/usr/local/bin/python
2: #
3:
4: import string
5: import logging
6: import sys
1.7 dwinter 7: import types
1.8 casties 8: import time
1.1 casties 9:
1.5 casties 10: from xml import sax
11: from amara import saxtools
12:
1.2 casties 13: try:
14: import psycopg2 as psycopg
15: psyco = 2
16: except:
17: import psycopg
18: psyco = 1
19:
1.5 casties 20: fm_ns = 'http://www.filemaker.com/fmpxmlresult'
1.1 casties 21:
1.8 casties 22: version_string = "V0.4 ROC 29.3.2007"
23:
1.1 casties 24: def getTextFromNode(nodename):
25: """get the cdata content of a node"""
26: if nodename is None:
27: return ""
28: nodelist=nodename.childNodes
29: rc = ""
30: for node in nodelist:
31: if node.nodeType == node.TEXT_NODE:
32: rc = rc + node.data
33: return rc
34:
35: def sql_quote(v):
36: # quote dictionary
37: quote_dict = {"\'": "''", "\\": "\\\\"}
38: for dkey in quote_dict.keys():
39: if string.find(v, dkey) >= 0:
40: v=string.join(string.split(v,dkey),quote_dict[dkey])
1.5 casties 41: return "'%s'"%v
1.1 casties 42:
1.8 casties 43: def SimpleSearch(curs,query, args=None, ascii=False):
1.1 casties 44: """execute sql query and return data"""
1.8 casties 45: #logging.debug("executing: "+query)
46: if ascii:
47: # encode all in UTF-8
1.2 casties 48: query = query.encode("UTF-8")
1.8 casties 49: if args is not None:
50: encargs = []
51: for a in args:
52: if a is not None:
53: a = a.encode("UTF-8")
54: encargs.append(a)
55:
56: args = encargs
1.7 dwinter 57:
1.1 casties 58: curs.execute(query, args)
1.8 casties 59: #logging.debug("sql done")
1.4 casties 60: try:
61: return curs.fetchall()
62: except:
63: return None
1.1 casties 64:
65:
1.8 casties 66: class TableColumn:
67: """simple type for storing sql column name and type"""
68:
69: def __init__(self, name, type=None):
70: #print "new tablecolumn(%s,%s)"%(name, type)
71: self.name = name
72: self.type = type
73:
74: def getName(self):
75: return self.name
76:
77: def getType(self):
78: if self.type is not None:
79: return self.type
80: else:
81: return "text"
1.5 casties 82:
1.8 casties 83: def __str__(self):
84: return self.name
85:
86:
1.5 casties 87: class xml_handler:
1.8 casties 88: def __init__(self,options):
1.9 dwinter 89: """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
1.8 casties 90: @param options: dict of options
91: @param options.dsn: database connection string
92: @param options.table: name of the table the xml shall be imported into
93: @param options.filename: xmlfile filename
94: @param options.update_fields: (optional) list of fields to update; default is to create all fields
95: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
96: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
97: @param options.lc_names: (optional) lower case and clean up field names from XML
98: @param options.keep_fields: (optional) don't add fields to SQL database
99: @param options.ascii_db: (optional) assume ascii encoding in db
100: @param options.replace_table: (optional) delete and re-insert data
1.9 dwinter 101: """
102:
1.5 casties 103: # set up parser
104: self.event = None
105: self.top_dispatcher = {
106: (saxtools.START_ELEMENT, fm_ns, u'METADATA'):
107: self.handle_meta_fields,
108: (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'):
1.8 casties 109: self.handle_data_fields,
1.5 casties 110: }
111:
112: # connect database
1.8 casties 113: self.dbCon = psycopg.connect(options.dsn)
1.5 casties 114: self.db = self.dbCon.cursor()
1.8 casties 115: assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
1.1 casties 116:
1.9 dwinter 117: self.table = getattr(options,"table",None)
118: self.update_fields = getattr(options,"update_fields",None)
119: self.id_field = getattr(options,"id_field",None)
120: self.sync_mode = getattr(options,"sync_mode",None)
121: self.lc_names = getattr(options,"lc_names",None)
122: self.keep_fields = getattr(options,"keep_fields",None)
123: self.ascii_db = getattr(options,"ascii_db",None)
124: self.replace_table = getattr(options,"replace_table",None)
125: self.backup_table = getattr(options,"backup_table",None)
1.8 casties 126:
1.9 dwinter 127: logging.debug("dsn: "+repr(getattr(options,"dsn",None)))
1.8 casties 128: logging.debug("table: "+repr(self.table))
129: logging.debug("update_fields: "+repr(self.update_fields))
130: logging.debug("id_field: "+repr(self.id_field))
131: logging.debug("sync_mode: "+repr(self.sync_mode))
132: logging.debug("lc_names: "+repr(self.lc_names))
133: logging.debug("keep_fields: "+repr(self.keep_fields))
134: logging.debug("ascii_db: "+repr(self.ascii_db))
135: logging.debug("replace_table: "+repr(self.replace_table))
1.5 casties 136:
137: self.dbIDs = {}
138: self.rowcnt = 0
1.8 casties 139:
140: if self.id_field is not None:
1.5 casties 141: # prepare a list of ids for sync mode
1.8 casties 142: qstr="select %s from %s"%(self.id_field,self.table)
1.5 casties 143: for id in SimpleSearch(self.db, qstr):
144: # value 0: not updated
145: self.dbIDs[id[0]] = 0;
146: self.rowcnt += 1
147:
148: logging.info("%d entries in DB to sync"%self.rowcnt)
149:
1.8 casties 150: # names of fields in XML file
151: self.xml_field_names = []
152: # map XML field names to SQL field names
153: self.xml_field_map = {}
154: # and vice versa
155: self.sql_field_map = {}
1.5 casties 156:
157: return
158:
159: def handle_meta_fields(self, end_condition):
160: dispatcher = {
161: (saxtools.START_ELEMENT, fm_ns, u'FIELD'):
162: self.handle_meta_field,
163: }
164: #First round through the generator corresponds to the
165: #start element event
166: logging.debug("START METADATA")
167: yield None
1.1 casties 168:
1.5 casties 169: #delegate is a generator that handles all the events "within"
170: #this element
171: delegate = None
172: while not self.event == end_condition:
173: delegate = saxtools.tenorsax.event_loop_body(
174: dispatcher, delegate, self.event)
175: yield None
176:
177: #Element closed. Wrap up
178: logging.debug("END METADATA")
1.8 casties 179:
180: # rename table for backup
181: if self.backup_table:
182: self.orig_table = self.table
183: self.table = self.table + "_tmp"
184: # remove old temp table
185: qstr = "DROP TABLE %s"%(self.table)
186: try:
187: self.db.execute(qstr)
188: except:
189: pass
190:
191: self.dbCon.commit()
192:
193: if self.id_field:
194: # sync mode -- copy table
195: logging.info("copy table %s to %s"%(self.orig_table,self.table))
196: qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(self.table,self.orig_table)
197:
198: else:
199: # rename table and create empty new one
200: logging.info("create empty table %s"%(self.table))
201: qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
202:
203: self.db.execute(qstr)
204: self.dbCon.commit()
205:
206: # delete data from table for replace
207: if self.replace_table:
208: logging.info("delete data from table %s"%(self.table))
209: qstr = "TRUNCATE TABLE %s"%(self.table)
210: self.db.execute(qstr)
211: self.dbCon.commit()
212:
213: # try to match date style with XML
214: self.db.execute("set datestyle to 'german'")
215:
216: # translate id_field (SQL-name) to XML-name
217: self.xml_id = self.sql_field_map.get(self.id_field, None)
218:
219: #logging.debug("xml-fieldnames:"+repr(self.xml_field_names))
220: # get list of fields and types of db table
221: qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
222: self.sql_fields={}
223: for f in SimpleSearch(self.db, qstr%self.table):
224: n = f[0]
225: t = f[1]
226: #print "SQL fields: %s (%s)"%(n,t)
227: self.sql_fields[n] = TableColumn(n,t)
228:
229: # check fields to update
1.5 casties 230: if self.update_fields is None:
1.8 casties 231: if self.keep_fields:
1.13 ! casties 232: # update all existing fields from sql (when they are in the xml file)
! 233: self.update_fields = {}
! 234: for f in self.sql_fields.keys():
! 235: if self.sql_field_map.has_key(f):
! 236: xf = self.sql_field_map[f]
! 237: self.update_fields[f] = self.xml_field_map[xf]
! 238:
1.8 casties 239: else:
240: # update all fields
241: if self.lc_names:
242: # create dict with sql names
243: self.update_fields = {}
244: for f in self.xml_field_map.values():
245: self.update_fields[f.getName()] = f
246:
247: else:
248: self.update_fields = self.xml_field_map
249:
250: # and translate to list of xml fields
251: if self.lc_names:
252: self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields]
253: else:
254: self.xml_update_list = self.update_fields.keys()
1.5 casties 255:
1.8 casties 256: if not self.keep_fields:
257: # adjust db table to fields in XML and update_fields
258: for f in self.xml_field_map.values():
259: logging.debug("sync-fieldname: %s"%f.getName())
260: sf = self.sql_fields.get(f.getName(), None)
261: uf = self.update_fields.get(f.getName(), None)
262: if sf is not None:
263: # name in db -- check type
264: if f.getType() != sf.getType():
265: logging.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType()))
266: elif uf is not None:
267: # add field to table
268: qstr="alter table %s add %s %s"%(self.table,uf.getName(),uf.getType())
269: logging.info("db add field:"+qstr)
270:
271: if self.ascii_db and type(qstr)==types.UnicodeType:
272: qstr=qstr.encode('utf-8')
273:
274: self.db.execute(qstr)
275: self.dbCon.commit()
1.7 dwinter 276:
1.5 casties 277: # prepare sql statements for update
1.8 casties 278: setStr=string.join(["%s = %%s"%self.xml_field_map[f] for f in self.xml_update_list], ', ')
1.5 casties 279: self.updQuery="UPDATE %s SET %s WHERE %s = %%s"%(self.table,setStr,self.id_field)
280: # and insert
1.8 casties 281: fields=string.join([self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
282: values=string.join(['%s' for f in self.xml_update_list], ',')
1.5 casties 283: self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
1.8 casties 284: logging.debug("update-query: "+self.updQuery)
285: logging.debug("add-query: "+self.addQuery)
1.5 casties 286: return
287:
288: def handle_meta_field(self, end_condition):
289: name = self.params.get((None, u'NAME'))
290: yield None
291: #Element closed. Wrap up
1.8 casties 292: if self.lc_names:
293: # clean name
294: sqlname = name.replace(" ","_").lower()
295: else:
296: sqlname = name
297: self.xml_field_names.append(name)
298: # map to sql name and default text type
299: self.xml_field_map[name] = TableColumn(sqlname, 'text')
300: self.sql_field_map[sqlname] = name
1.5 casties 301: logging.debug("FIELD name: "+name)
302: return
303:
1.8 casties 304: def handle_data_fields(self, end_condition):
1.5 casties 305: dispatcher = {
306: (saxtools.START_ELEMENT, fm_ns, u'ROW'):
307: self.handle_row,
308: }
309: #First round through the generator corresponds to the
310: #start element event
311: logging.debug("START RESULTSET")
312: self.rowcnt = 0
313: yield None
1.1 casties 314:
1.5 casties 315: #delegate is a generator that handles all the events "within"
316: #this element
317: delegate = None
318: while not self.event == end_condition:
319: delegate = saxtools.tenorsax.event_loop_body(
320: dispatcher, delegate, self.event)
321: yield None
322:
323: #Element closed. Wrap up
324: logging.debug("END RESULTSET")
325: self.dbCon.commit()
1.1 casties 326:
1.5 casties 327: if self.sync_mode:
328: # delete unmatched entries in db
1.8 casties 329: logging.info("deleting unmatched rows from db")
1.6 casties 330: delQuery = "DELETE FROM %s WHERE %s = %%s"%(self.table,self.id_field)
1.5 casties 331: for id in self.dbIDs.keys():
332: # find all not-updated fields
333: if self.dbIDs[id] == 0:
334: logging.info(" delete:"+id)
1.8 casties 335: SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db)
1.6 casties 336: sys.exit(1)
1.1 casties 337:
1.5 casties 338: elif self.dbIDs[id] > 1:
1.8 casties 339: logging.info(" sync: ID %s used more than once?"%id)
340:
341: self.dbCon.commit()
1.1 casties 342:
1.8 casties 343: # reinstate backup tables
344: if self.backup_table:
345: backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
346: logging.info("rename backup table %s to %s"%(self.orig_table,backup_name))
347: qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
348: self.db.execute(qstr)
349: logging.info("rename working table %s to %s"%(self.table,self.orig_table))
350: qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
351: self.db.execute(qstr)
1.5 casties 352: self.dbCon.commit()
1.1 casties 353:
1.5 casties 354: return
355:
356: def handle_row(self, end_condition):
357: dispatcher = {
358: (saxtools.START_ELEMENT, fm_ns, u'COL'):
359: self.handle_col,
360: }
361: logging.debug("START ROW")
1.8 casties 362: self.xml_data = {}
1.5 casties 363: self.colIdx = 0
364: yield None
1.1 casties 365:
1.5 casties 366: #delegate is a generator that handles all the events "within"
367: #this element
368: delegate = None
369: while not self.event == end_condition:
370: delegate = saxtools.tenorsax.event_loop_body(
371: dispatcher, delegate, self.event)
372: yield None
373:
374: #Element closed. Wrap up
375: logging.debug("END ROW")
376: self.rowcnt += 1
377: # process collected row data
378: update=False
379: id_val=''
380: # synchronize by id_field
381: if self.id_field:
1.8 casties 382: id_val = self.xml_data[self.xml_id]
1.5 casties 383: if id_val in self.dbIDs:
384: self.dbIDs[id_val] += 1
385: update=True
1.8 casties 386:
387: # collect all values
388: args = []
389: for fn in self.xml_update_list:
390: f = self.xml_field_map[fn]
391: val = self.xml_data[fn]
392: type = self.sql_fields[f.getName()].getType()
393: if type == "date" and len(val) == 0:
394: # empty date field
395: val = None
396:
397: elif type == "integer" and len(val) == 0:
398: # empty int field
399: val = None
400:
401: args.append(val)
402:
1.5 casties 403: if update:
404: # update existing row (by id_field)
1.8 casties 405: # last argument is ID match
1.5 casties 406: args.append(id_val)
1.8 casties 407: logging.debug("update: %s = %s"%(id_val, args))
408: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
409:
1.5 casties 410: else:
411: # create new row
1.8 casties 412: logging.debug("insert: %s"%args)
413: SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db)
1.5 casties 414:
415: #logging.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
416: if (self.rowcnt % 10) == 0:
417: logging.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
418: self.dbCon.commit()
419:
420: return
421:
422: def handle_col(self, end_condition):
423: dispatcher = {
424: (saxtools.START_ELEMENT, fm_ns, u'DATA'):
425: self.handle_data_tag,
426: }
427: #print "START COL"
428: yield None
429: #delegate is a generator that handles all the events "within"
430: #this element
431: delegate = None
432: while not self.event == end_condition:
433: delegate = saxtools.tenorsax.event_loop_body(
434: dispatcher, delegate, self.event)
435: yield None
436: #Element closed. Wrap up
437: #print "END COL"
438: self.colIdx += 1
439: return
440:
441: def handle_data_tag(self, end_condition):
442: #print "START DATA"
443: content = u''
444: yield None
445: # gather child elements
446: while not self.event == end_condition:
447: if self.event[0] == saxtools.CHARACTER_DATA:
448: content += self.params
449: yield None
450: #Element closed. Wrap up
1.8 casties 451: fn = self.xml_field_names[self.colIdx]
452: self.xml_data[fn] = content
1.5 casties 453: return
454:
455:
1.11 dwinter 456: def importFMPXML(options):
457: """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
458: @param options: dict of options
459: @param options.dsn: database connection string
460: @param options.table: name of the table the xml shall be imported into
461: @param options.filename: xmlfile filename
462: @param options.update_fields: (optional) list of fields to update; default is to create all fields
463: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
464: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
465: @param options.lc_names: (optional) lower case and clean up field names from XML
466: @param options.keep_fields: (optional) don't add fields to SQL database
467: @param options.ascii_db: (optional) assume ascii encoding in db
468: @param options.replace_table: (optional) delete and re-insert data
469: """
470:
471:
472: if getattr(options,'update_fields',None):
473: uf = {}
474: for f in options.update_fields.split(','):
1.12 casties 475: if f.find(':') > 0:
476: (n,t) = f.split(':')
477: else:
478: n = f
479: t = None
1.11 dwinter 480: uf[n] = TableColumn(n,t)
481:
482: options.update_fields = uf
483:
484: if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
485: logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
486: sys.exit(1)
487:
488: parser = sax.make_parser()
489: #The "consumer" is our own handler
490: consumer = xml_handler(options)
491: #Initialize Tenorsax with handler
492: handler = saxtools.tenorsax(consumer)
493: #Resulting tenorsax instance is the SAX handler
494: parser.setContentHandler(handler)
495: parser.setFeature(sax.handler.feature_namespaces, 1)
496: parser.parse(options.filename)
497:
1.1 casties 498:
1.9 dwinter 499: if __name__ == "__main__":
500: from optparse import OptionParser
1.5 casties 501:
1.9 dwinter 502: opars = OptionParser()
503: opars.add_option("-f", "--file",
504: dest="filename",
505: help="FMPXML file name", metavar="FILE")
506: opars.add_option("-c", "--dsn",
507: dest="dsn",
508: help="database connection string")
509: opars.add_option("-t", "--table",
510: dest="table",
511: help="database table name")
512: opars.add_option("--fields", default=None,
513: dest="update_fields",
514: help="list of fields to update (comma separated, sql-names)", metavar="LIST")
515: opars.add_option("--id-field", default=None,
516: dest="id_field",
517: help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
518: opars.add_option("--sync", "--sync-mode", default=False, action="store_true",
519: dest="sync_mode",
520: help="do full sync based on id field (remove unmatched fields from db)")
521: opars.add_option("--lc-names", default=False, action="store_true",
522: dest="lc_names",
523: help="clean and lower case field names from XML")
524: opars.add_option("--keep-fields", default=False, action="store_true",
525: dest="keep_fields",
526: help="don't add fields from XML to SQL table")
527: opars.add_option("--ascii-db", default=False, action="store_true",
528: dest="ascii_db",
529: help="the SQL database stores ASCII instead of unicode")
530: opars.add_option("--replace", default=False, action="store_true",
531: dest="replace_table",
532: help="replace table i.e. delete and re-insert data")
533: opars.add_option("--backup", default=False, action="store_true",
534: dest="backup_table",
535: help="create backup of old table (breaks indices)")
536: opars.add_option("-d", "--debug", default=False, action="store_true",
537: dest="debug",
538: help="debug mode (more output)")
539:
540: (options, args) = opars.parse_args()
541:
542: if len(sys.argv) < 2 or options.filename is None or options.dsn is None:
543: print "importFMPXML "+version_string
544: opars.print_help()
545: sys.exit(1)
546:
547: if options.debug:
548: loglevel = logging.DEBUG
549: else:
550: loglevel = logging.INFO
551:
552: logging.basicConfig(level=loglevel,
553: format='%(asctime)s %(levelname)s %(message)s',
554: datefmt='%H:%M:%S')
555:
1.11 dwinter 556: importFMPXML(options)
557:
1.9 dwinter 558:
559:
1.1 casties 560:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>