1: #!/usr/local/bin/python
2: #
3:
4: import string
5: import logging
6: import sys
7: import types
8: import time
9:
10: from xml import sax
11: from amara import saxtools
12:
13: try:
14: import psycopg2 as psycopg
15: psyco = 2
16: except:
17: import psycopg
18: psyco = 1
19:
20: fm_ns = 'http://www.filemaker.com/fmpxmlresult'
21:
22: version_string = "V0.4.1 ROC 9.8.2007"
23:
24: def getTextFromNode(nodename):
25: """get the cdata content of a node"""
26: if nodename is None:
27: return ""
28: nodelist=nodename.childNodes
29: rc = ""
30: for node in nodelist:
31: if node.nodeType == node.TEXT_NODE:
32: rc = rc + node.data
33: return rc
34:
35: def sql_quote(v):
36: # quote dictionary
37: quote_dict = {"\'": "''", "\\": "\\\\"}
38: for dkey in quote_dict.keys():
39: if string.find(v, dkey) >= 0:
40: v=string.join(string.split(v,dkey),quote_dict[dkey])
41: return "'%s'"%v
42:
43: def SimpleSearch(curs,query, args=None, ascii=False):
44: """execute sql query and return data"""
45: #logger.debug("executing: "+query)
46: if ascii:
47: # encode all in UTF-8
48: query = query.encode("UTF-8")
49: if args is not None:
50: encargs = []
51: for a in args:
52: if a is not None:
53: a = a.encode("UTF-8")
54: encargs.append(a)
55:
56: args = encargs
57:
58: curs.execute(query, args)
59: #logger.debug("sql done")
60: try:
61: return curs.fetchall()
62: except:
63: return None
64:
65:
66: class TableColumn:
67: """simple type for storing sql column name and type"""
68:
69: def __init__(self, name, type=None):
70: #print "new tablecolumn(%s,%s)"%(name, type)
71: self.name = name
72: self.type = type
73:
74: def getName(self):
75: return self.name
76:
77: def getType(self):
78: if self.type is not None:
79: return self.type
80: else:
81: return "text"
82:
83: def __str__(self):
84: return self.name
85:
86:
87: class xml_handler:
88: def __init__(self,options):
89: """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
90: @param options: dict of options
91: @param options.dsn: database connection string
92: @param options.table: name of the table the xml shall be imported into
93: @param options.filename: xmlfile filename
94: @param options.update_fields: (optional) list of fields to update; default is to create all fields
95: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
96: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
97: @param options.lc_names: (optional) lower case and clean up field names from XML
98: @param options.keep_fields: (optional) don't add fields to SQL database
99: @param options.ascii_db: (optional) assume ascii encoding in db
100: @param options.replace_table: (optional) delete and re-insert data
101: @param options.backup_table: (optional) create backup of old table (breaks indices)
102: @param options.use_logger_instance: (optional) use this instance of a logger
103: """
104:
105: # set up logger
106: if hasattr(options, 'use_logger_instance'):
107: self.logger = options.use_logger_instance
108: else:
109: self.logger = logging.getLogger('db.import.fmpxml')
110:
111:
112: # set up parser
113: self.event = None
114: self.top_dispatcher = {
115: (saxtools.START_ELEMENT, fm_ns, u'METADATA'):
116: self.handle_meta_fields,
117: (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'):
118: self.handle_data_fields,
119: }
120:
121: # connect database
122: self.dbCon = psycopg.connect(options.dsn)
123: self.db = self.dbCon.cursor()
124: assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
125:
126: self.table = getattr(options,"table",None)
127: self.update_fields = getattr(options,"update_fields",None)
128: self.id_field = getattr(options,"id_field",None)
129: self.sync_mode = getattr(options,"sync_mode",None)
130: self.lc_names = getattr(options,"lc_names",None)
131: self.keep_fields = getattr(options,"keep_fields",None)
132: self.ascii_db = getattr(options,"ascii_db",None)
133: self.replace_table = getattr(options,"replace_table",None)
134: self.backup_table = getattr(options,"backup_table",None)
135:
136: self.logger.debug("dsn: "+repr(getattr(options,"dsn",None)))
137: self.logger.debug("table: "+repr(self.table))
138: self.logger.debug("update_fields: "+repr(self.update_fields))
139: self.logger.debug("id_field: "+repr(self.id_field))
140: self.logger.debug("sync_mode: "+repr(self.sync_mode))
141: self.logger.debug("lc_names: "+repr(self.lc_names))
142: self.logger.debug("keep_fields: "+repr(self.keep_fields))
143: self.logger.debug("ascii_db: "+repr(self.ascii_db))
144: self.logger.debug("replace_table: "+repr(self.replace_table))
145: self.logger.debug("backup_table: "+repr(self.backup_table))
146:
147: self.dbIDs = {}
148: self.rowcnt = 0
149:
150: if self.id_field is not None:
151: # prepare a list of ids for sync mode
152: qstr="select %s from %s"%(self.id_field,self.table)
153: for id in SimpleSearch(self.db, qstr):
154: # value 0: not updated
155: self.dbIDs[id[0]] = 0;
156: self.rowcnt += 1
157:
158: self.logger.info("%d entries in DB to sync"%self.rowcnt)
159:
160: # names of fields in XML file
161: self.xml_field_names = []
162: # map XML field names to SQL field names
163: self.xml_field_map = {}
164: # and vice versa
165: self.sql_field_map = {}
166:
167: return
168:
169: def handle_meta_fields(self, end_condition):
170: dispatcher = {
171: (saxtools.START_ELEMENT, fm_ns, u'FIELD'):
172: self.handle_meta_field,
173: }
174: #First round through the generator corresponds to the
175: #start element event
176: self.logger.info("reading metadata...")
177: self.logger.debug("START METADATA")
178: yield None
179:
180: #delegate is a generator that handles all the events "within"
181: #this element
182: delegate = None
183: while not self.event == end_condition:
184: delegate = saxtools.tenorsax.event_loop_body(
185: dispatcher, delegate, self.event)
186: yield None
187:
188: #Element closed. Wrap up
189: self.logger.debug("END METADATA")
190:
191: # rename table for backup
192: if self.backup_table:
193: self.orig_table = self.table
194: self.table = self.table + "_tmp"
195: # remove old temp table
196: qstr = "DROP TABLE %s"%(self.table)
197: try:
198: self.db.execute(qstr)
199: except:
200: pass
201:
202: self.dbCon.commit()
203:
204: if self.id_field:
205: # sync mode -- copy table
206: self.logger.info("copy table %s to %s"%(self.orig_table,self.table))
207: qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(self.table,self.orig_table)
208:
209: else:
210: # rename table and create empty new one
211: self.logger.info("create empty table %s"%(self.table))
212: qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
213:
214: self.db.execute(qstr)
215: self.dbCon.commit()
216:
217: # delete data from table for replace
218: if self.replace_table:
219: self.logger.info("delete data from table %s"%(self.table))
220: qstr = "TRUNCATE TABLE %s"%(self.table)
221: self.db.execute(qstr)
222: self.dbCon.commit()
223:
224: # try to match date style with XML
225: self.db.execute("set datestyle to 'german'")
226:
227: # translate id_field (SQL-name) to XML-name
228: self.xml_id = self.sql_field_map.get(self.id_field, None)
229:
230: #self.logger.debug("xml-fieldnames:"+repr(self.xml_field_names))
231: # get list of fields and types of db table
232: qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
233: self.sql_fields={}
234: for f in SimpleSearch(self.db, qstr%self.table):
235: n = f[0]
236: t = f[1]
237: #print "SQL fields: %s (%s)"%(n,t)
238: self.sql_fields[n] = TableColumn(n,t)
239:
240: # check fields to update
241: if self.update_fields is None:
242: if self.keep_fields:
243: # update all existing fields from sql (when they are in the xml file)
244: self.update_fields = {}
245: for f in self.sql_fields.keys():
246: if self.sql_field_map.has_key(f):
247: xf = self.sql_field_map[f]
248: self.update_fields[f] = self.xml_field_map[xf]
249:
250: else:
251: # update all fields
252: if self.lc_names:
253: # create dict with sql names
254: self.update_fields = {}
255: for f in self.xml_field_map.values():
256: self.update_fields[f.getName()] = f
257:
258: else:
259: self.update_fields = self.xml_field_map
260:
261: # and translate to list of xml fields
262: if self.lc_names:
263: self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields]
264: else:
265: self.xml_update_list = self.update_fields.keys()
266:
267: if not self.keep_fields:
268: # adjust db table to fields in XML and update_fields
269: for f in self.xml_field_map.values():
270: self.logger.debug("sync-fieldname: %s"%f.getName())
271: sf = self.sql_fields.get(f.getName(), None)
272: uf = self.update_fields.get(f.getName(), None)
273: if sf is not None:
274: # name in db -- check type
275: if f.getType() != sf.getType():
276: self.logger.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType()))
277: elif uf is not None:
278: # add field to table
279: qstr="alter table %s add %s %s"%(self.table,uf.getName(),uf.getType())
280: self.logger.info("db add field:"+qstr)
281:
282: if self.ascii_db and type(qstr)==types.UnicodeType:
283: qstr=qstr.encode('utf-8')
284:
285: self.db.execute(qstr)
286: self.dbCon.commit()
287:
288: # prepare sql statements for update
289: setStr=string.join(["%s = %%s"%self.xml_field_map[f] for f in self.xml_update_list], ', ')
290: self.updQuery="UPDATE %s SET %s WHERE %s = %%s"%(self.table,setStr,self.id_field)
291: # and insert
292: fields=string.join([self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
293: values=string.join(['%s' for f in self.xml_update_list], ',')
294: self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
295: self.logger.debug("update-query: "+self.updQuery)
296: self.logger.debug("add-query: "+self.addQuery)
297: return
298:
299: def handle_meta_field(self, end_condition):
300: name = self.params.get((None, u'NAME'))
301: yield None
302: #Element closed. Wrap up
303: if self.lc_names:
304: # clean name
305: sqlname = name.replace(" ","_").lower()
306: else:
307: sqlname = name
308: self.xml_field_names.append(name)
309: # map to sql name and default text type
310: self.xml_field_map[name] = TableColumn(sqlname, 'text')
311: self.sql_field_map[sqlname] = name
312: self.logger.debug("FIELD name: "+name)
313: return
314:
315: def handle_data_fields(self, end_condition):
316: dispatcher = {
317: (saxtools.START_ELEMENT, fm_ns, u'ROW'):
318: self.handle_row,
319: }
320: #First round through the generator corresponds to the
321: #start element event
322: self.logger.info("reading data...")
323: self.logger.debug("START RESULTSET")
324: self.rowcnt = 0
325: yield None
326:
327: #delegate is a generator that handles all the events "within"
328: #this element
329: delegate = None
330: while not self.event == end_condition:
331: delegate = saxtools.tenorsax.event_loop_body(
332: dispatcher, delegate, self.event)
333: yield None
334:
335: #Element closed. Wrap up
336: self.logger.debug("END RESULTSET")
337: self.dbCon.commit()
338:
339: if self.sync_mode:
340: # delete unmatched entries in db
341: self.logger.info("deleting unmatched rows from db")
342: delQuery = "DELETE FROM %s WHERE %s = %%s"%(self.table,self.id_field)
343: for id in self.dbIDs.keys():
344: # find all not-updated fields
345: if self.dbIDs[id] == 0:
346: self.logger.info(" delete:"+id)
347: SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db)
348: sys.exit(1)
349:
350: elif self.dbIDs[id] > 1:
351: self.logger.info(" sync: ID %s used more than once?"%id)
352:
353: self.dbCon.commit()
354:
355: # reinstate backup tables
356: if self.backup_table:
357: backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
358: self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name))
359: qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
360: self.db.execute(qstr)
361: self.logger.info("rename working table %s to %s"%(self.table,self.orig_table))
362: qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
363: self.db.execute(qstr)
364: self.dbCon.commit()
365:
366: return
367:
368: def handle_row(self, end_condition):
369: dispatcher = {
370: (saxtools.START_ELEMENT, fm_ns, u'COL'):
371: self.handle_col,
372: }
373: self.logger.debug("START ROW")
374: self.xml_data = {}
375: self.colIdx = 0
376: yield None
377:
378: #delegate is a generator that handles all the events "within"
379: #this element
380: delegate = None
381: while not self.event == end_condition:
382: delegate = saxtools.tenorsax.event_loop_body(
383: dispatcher, delegate, self.event)
384: yield None
385:
386: #Element closed. Wrap up
387: self.logger.debug("END ROW")
388: self.rowcnt += 1
389: # process collected row data
390: update=False
391: id_val=''
392: # synchronize by id_field
393: if self.id_field:
394: id_val = self.xml_data[self.xml_id]
395: if id_val in self.dbIDs:
396: self.dbIDs[id_val] += 1
397: update=True
398:
399: # collect all values
400: args = []
401: for fn in self.xml_update_list:
402: f = self.xml_field_map[fn]
403: val = self.xml_data[fn]
404: type = self.sql_fields[f.getName()].getType()
405: if type == "date" and len(val) == 0:
406: # empty date field
407: val = None
408:
409: elif type == "integer" and len(val) == 0:
410: # empty int field
411: val = None
412:
413: args.append(val)
414:
415: if update:
416: # update existing row (by id_field)
417: # last argument is ID match
418: args.append(id_val)
419: self.logger.debug("update: %s = %s"%(id_val, args))
420: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
421:
422: else:
423: # create new row
424: self.logger.debug("insert: %s"%args)
425: SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db)
426:
427: #self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
428: if (self.rowcnt % 100) == 0:
429: self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val))
430: self.dbCon.commit()
431:
432: return
433:
434: def handle_col(self, end_condition):
435: dispatcher = {
436: (saxtools.START_ELEMENT, fm_ns, u'DATA'):
437: self.handle_data_tag,
438: }
439: #print "START COL"
440: yield None
441: #delegate is a generator that handles all the events "within"
442: #this element
443: delegate = None
444: while not self.event == end_condition:
445: delegate = saxtools.tenorsax.event_loop_body(
446: dispatcher, delegate, self.event)
447: yield None
448: #Element closed. Wrap up
449: #print "END COL"
450: self.colIdx += 1
451: return
452:
453: def handle_data_tag(self, end_condition):
454: #print "START DATA"
455: content = u''
456: yield None
457: # gather child elements
458: while not self.event == end_condition:
459: if self.event[0] == saxtools.CHARACTER_DATA:
460: content += self.params
461: yield None
462: #Element closed. Wrap up
463: fn = self.xml_field_names[self.colIdx]
464: self.xml_data[fn] = content
465: return
466:
467:
468: def importFMPXML(options):
469: """import FileMaker XML file (FMPXMLRESULT format) into the table.
470: @param options: dict of options
471: @param options.dsn: database connection string
472: @param options.table: name of the table the xml shall be imported into
473: @param options.filename: xmlfile filename
474: @param options.update_fields: (optional) list of fields to update; default is to create all fields
475: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
476: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
477: @param options.lc_names: (optional) lower case and clean up field names from XML
478: @param options.keep_fields: (optional) don't add fields to SQL database
479: @param options.ascii_db: (optional) assume ascii encoding in db
480: @param options.replace_table: (optional) delete and re-insert data
481: @param options.backup_table: (optional) create backup of old table (breaks indices)
482: """
483:
484: if getattr(options,'update_fields',None):
485: uf = {}
486: for f in options.update_fields.split(','):
487: if f.find(':') > 0:
488: (n,t) = f.split(':')
489: else:
490: n = f
491: t = None
492: uf[n] = TableColumn(n,t)
493:
494: options.update_fields = uf
495:
496: if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
497: logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
498: return
499:
500: parser = sax.make_parser()
501: #The "consumer" is our own handler
502: consumer = xml_handler(options)
503: #Initialize Tenorsax with handler
504: handler = saxtools.tenorsax(consumer)
505: #Resulting tenorsax instance is the SAX handler
506: parser.setContentHandler(handler)
507: parser.setFeature(sax.handler.feature_namespaces, 1)
508: parser.parse(options.filename)
509:
510:
511: if __name__ == "__main__":
512: from optparse import OptionParser
513:
514: opars = OptionParser()
515: opars.add_option("-f", "--file",
516: dest="filename",
517: help="FMPXML file name", metavar="FILE")
518: opars.add_option("-c", "--dsn",
519: dest="dsn",
520: help="database connection string")
521: opars.add_option("-t", "--table",
522: dest="table",
523: help="database table name")
524: opars.add_option("--fields", default=None,
525: dest="update_fields",
526: help="list of fields to update (comma separated, sql-names)", metavar="LIST")
527: opars.add_option("--id-field", default=None,
528: dest="id_field",
529: help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
530: opars.add_option("--sync", "--sync-mode", default=False, action="store_true",
531: dest="sync_mode",
532: help="do full sync based on id field (remove unmatched fields from db)")
533: opars.add_option("--lc-names", default=False, action="store_true",
534: dest="lc_names",
535: help="clean and lower case field names from XML")
536: opars.add_option("--keep-fields", default=False, action="store_true",
537: dest="keep_fields",
538: help="don't add fields from XML to SQL table")
539: opars.add_option("--ascii-db", default=False, action="store_true",
540: dest="ascii_db",
541: help="the SQL database stores ASCII instead of unicode")
542: opars.add_option("--replace", default=False, action="store_true",
543: dest="replace_table",
544: help="replace table i.e. delete and re-insert data")
545: opars.add_option("--backup", default=False, action="store_true",
546: dest="backup_table",
547: help="create backup of old table (breaks indices)")
548: opars.add_option("-d", "--debug", default=False, action="store_true",
549: dest="debug",
550: help="debug mode (more output)")
551:
552: (options, args) = opars.parse_args()
553:
554: if len(sys.argv) < 2 or options.filename is None or options.dsn is None:
555: print "importFMPXML "+version_string
556: opars.print_help()
557: sys.exit(1)
558:
559: if options.debug:
560: loglevel = logging.DEBUG
561: else:
562: loglevel = logging.INFO
563:
564: logging.basicConfig(level=loglevel,
565: format='%(asctime)s %(levelname)s %(message)s',
566: datefmt='%H:%M:%S')
567:
568: importFMPXML(options)
569:
570:
571:
572:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>