1: #!/usr/local/bin/python
2: #
3:
4: import string
5: import logging
6: import sys
7: import types
8: import time
9:
10: from xml import sax
11: from amara import saxtools
12:
13: try:
14: import psycopg2 as psycopg
15: psyco = 2
16: except:
17: import psycopg
18: psyco = 1
19:
20: fm_ns = 'http://www.filemaker.com/fmpxmlresult'
21:
22: version_string = "V0.5.2 ROC 4.2.2008"
23:
24: def unicodify(text, withNone=False):
25: """decode str (utf-8 or latin-1 representation) into unicode object"""
26: if withNone and text is None:
27: return None
28: if not text:
29: return u""
30: if isinstance(text, str):
31: try:
32: return text.decode('utf-8')
33: except:
34: return text.decode('latin-1')
35: else:
36: return text
37:
38: def utf8ify(text, withNone=False):
39: """encode unicode object or string into byte string in utf-8 representation"""
40: if withNone and text is None:
41: return None
42: if not text:
43: return ""
44: if isinstance(text, unicode):
45: return text.encode('utf-8')
46: else:
47: return text
48:
49: def getTextFromNode(nodename):
50: """get the cdata content of a node"""
51: if nodename is None:
52: return ""
53: nodelist=nodename.childNodes
54: rc = ""
55: for node in nodelist:
56: if node.nodeType == node.TEXT_NODE:
57: rc = rc + node.data
58: return rc
59:
60: def sql_quote(v):
61: # quote dictionary
62: quote_dict = {"\'": "''", "\\": "\\\\"}
63: for dkey in quote_dict.keys():
64: if string.find(v, dkey) >= 0:
65: v=string.join(string.split(v,dkey),quote_dict[dkey])
66: return "'%s'"%v
67:
68: def SimpleSearch(curs,query, args=None, ascii=False):
69: """execute sql query and return data"""
70: #logger.debug("executing: "+query)
71: if ascii:
72: # encode all in UTF-8
73: query = utf8ify(query)
74: if args is not None:
75: encargs = []
76: for a in args:
77: encargs.append(utf8ify(a, withNone=True))
78:
79: args = encargs
80:
81: curs.execute(query, args)
82: #logger.debug("sql done")
83: try:
84: return curs.fetchall()
85: except:
86: return None
87:
88:
89: class TableColumn:
90: """simple type for storing sql column name and type"""
91:
92: def __init__(self, name, type=None):
93: #print "new tablecolumn(%s,%s)"%(name, type)
94: self.name = name
95: self.type = type
96:
97: def getName(self):
98: return self.name
99:
100: def getType(self):
101: if self.type is not None:
102: return self.type
103: else:
104: return "text"
105:
106: def __str__(self):
107: return self.name
108:
109:
110: class xml_handler:
111: def __init__(self,options):
112: """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
113: @param options: dict of options
114: @param options.dsn: database connection string
115: @param options.table: name of the table the xml shall be imported into
116: @param options.filename: xmlfile filename
117: @param options.update_fields: (optional) list of fields to update; default is to create all fields
118: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
119: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
120: @param options.lc_names: (optional) lower case and clean up field names from XML
121: @param options.keep_fields: (optional) don't add fields to SQL database
122: @param options.ascii_db: (optional) assume ascii encoding in db
123: @param options.replace_table: (optional) delete and re-insert data
124: @param options.backup_table: (optional) create backup of old table (breaks indices)
125: @param options.use_logger_instance: (optional) use this instance of a logger
126: """
127:
128: # set up logger
129: if hasattr(options, 'use_logger_instance'):
130: self.logger = options.use_logger_instance
131: else:
132: self.logger = logging.getLogger('db.import.fmpxml')
133:
134:
135: # set up parser
136: self.event = None
137: self.top_dispatcher = {
138: (saxtools.START_ELEMENT, fm_ns, u'METADATA'):
139: self.handle_meta_fields,
140: (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'):
141: self.handle_data_fields,
142: }
143:
144: # connect database
145: self.dbCon = psycopg.connect(options.dsn)
146: self.db = self.dbCon.cursor()
147: assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
148:
149: self.table = getattr(options,"table",None)
150: self.update_fields = getattr(options,"update_fields",None)
151: self.id_field = getattr(options,"id_field",None)
152: self.sync_mode = getattr(options,"sync_mode",None)
153: self.lc_names = getattr(options,"lc_names",None)
154: self.keep_fields = getattr(options,"keep_fields",None)
155: self.ascii_db = getattr(options,"ascii_db",None)
156: self.replace_table = getattr(options,"replace_table",None)
157: self.backup_table = getattr(options,"backup_table",None)
158:
159: self.logger.debug("dsn: "+repr(getattr(options,"dsn",None)))
160: self.logger.debug("table: "+repr(self.table))
161: self.logger.debug("update_fields: "+repr(self.update_fields))
162: self.logger.debug("id_field: "+repr(self.id_field))
163: self.logger.debug("sync_mode: "+repr(self.sync_mode))
164: self.logger.debug("lc_names: "+repr(self.lc_names))
165: self.logger.debug("keep_fields: "+repr(self.keep_fields))
166: self.logger.debug("ascii_db: "+repr(self.ascii_db))
167: self.logger.debug("replace_table: "+repr(self.replace_table))
168: self.logger.debug("backup_table: "+repr(self.backup_table))
169:
170: self.dbIDs = {}
171: self.rowcnt = 0
172:
173: if self.id_field is not None:
174: # prepare a list of ids for sync mode
175: qstr="select %s from %s"%(self.id_field,self.table)
176: for id in SimpleSearch(self.db, qstr):
177: # value 0: not updated
178: self.dbIDs[id[0]] = 0;
179: self.rowcnt += 1
180:
181: self.logger.info("%d entries in DB to sync"%self.rowcnt)
182:
183: # names of fields in XML file
184: self.xml_field_names = []
185: # map XML field names to SQL field names
186: self.xml_field_map = {}
187: # and vice versa
188: self.sql_field_map = {}
189:
190: return
191:
192: def handle_meta_fields(self, end_condition):
193: dispatcher = {
194: (saxtools.START_ELEMENT, fm_ns, u'FIELD'):
195: self.handle_meta_field,
196: }
197: #First round through the generator corresponds to the
198: #start element event
199: self.logger.info("reading metadata...")
200: self.logger.debug("START METADATA")
201: yield None
202:
203: #delegate is a generator that handles all the events "within"
204: #this element
205: delegate = None
206: while not self.event == end_condition:
207: delegate = saxtools.tenorsax.event_loop_body(
208: dispatcher, delegate, self.event)
209: yield None
210:
211: #Element closed. Wrap up
212: self.logger.debug("END METADATA")
213:
214: # rename table for backup
215: if self.backup_table:
216: self.orig_table = self.table
217: self.tmp_table = self.table + "_tmp"
218: backup_name = "%s_%s"%(self.table,time.strftime('%Y_%m_%d_%H_%M_%S'))
219:
220: # remove old temp table
221: qstr = "DROP TABLE %s"%(self.tmp_table)
222: try:
223: self.db.execute(qstr)
224: except:
225: pass
226:
227: self.dbCon.commit()
228:
229: if self.id_field:
230: # sync mode -- copy backup table, update current table
231: self.logger.info("copy table %s to %s"%(self.table,backup_name))
232: qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(backup_name,self.table)
233:
234: else:
235: # replace mode -- create empty tmp table, insert into tmp table
236: self.table = self.tmp_table
237: self.logger.info("create empty table %s"%(self.table))
238: qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
239:
240: self.db.execute(qstr)
241: self.dbCon.commit()
242:
243: # delete data from table for replace
244: if self.replace_table:
245: self.logger.info("delete data from table %s"%(self.table))
246: qstr = "TRUNCATE TABLE %s"%(self.table)
247: self.db.execute(qstr)
248: self.dbCon.commit()
249:
250: # try to match date style with XML
251: self.db.execute("set datestyle to 'german'")
252:
253: #self.logger.debug("xml-fieldnames:"+repr(self.xml_field_names))
254: # get list of fields and types of db table
255: qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
256: self.sql_fields={}
257: for f in SimpleSearch(self.db, qstr%self.table):
258: n = f[0]
259: t = f[1]
260: #print "SQL fields: %s (%s)"%(n,t)
261: self.sql_fields[n] = TableColumn(n,t)
262:
263: # translate id_field (SQL-name) to XML-name
264: self.xml_id = self.sql_field_map.get(self.id_field, None)
265: # get type of id_field
266: if self.id_field:
267: self.id_type = self.sql_fields[self.id_field].getType()
268: else:
269: self.id_type = None
270:
271: # check fields to update
272: if self.update_fields is None:
273: if self.keep_fields:
274: # update all existing fields from sql (when they are in the xml file)
275: self.update_fields = {}
276: for f in self.sql_fields.keys():
277: if self.sql_field_map.has_key(f):
278: xf = self.sql_field_map[f]
279: self.update_fields[f] = self.xml_field_map[xf]
280:
281: else:
282: # update all fields
283: if self.lc_names:
284: # create dict with sql names
285: self.update_fields = {}
286: for f in self.xml_field_map.values():
287: self.update_fields[f.getName()] = f
288:
289: else:
290: self.update_fields = self.xml_field_map
291:
292: # and translate to list of xml fields
293: if self.lc_names:
294: self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields]
295: else:
296: self.xml_update_list = self.update_fields.keys()
297:
298: if not self.keep_fields:
299: # adjust db table to fields in XML and update_fields
300: for f in self.xml_field_map.values():
301: self.logger.debug("sync-fieldname: %s"%f.getName())
302: sf = self.sql_fields.get(f.getName(), None)
303: uf = self.update_fields.get(f.getName(), None)
304: if sf is not None:
305: # name in db -- check type
306: if f.getType() != sf.getType():
307: self.logger.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType()))
308: elif uf is not None:
309: # add field to table
310: qstr="alter table %s add %s %s"%(self.table,uf.getName(),uf.getType())
311: self.logger.info("db add field:"+qstr)
312:
313: if self.ascii_db and type(qstr)==types.UnicodeType:
314: qstr=qstr.encode('utf-8')
315:
316: self.db.execute(qstr)
317: self.dbCon.commit()
318:
319: # prepare sql statements for update (do not update id_field)
320: setStr=string.join(["%s = %%s"%self.xml_field_map[f] for f in self.xml_update_list if f != self.xml_id], ', ')
321: self.updQuery="UPDATE %s SET %s WHERE %s = %%s"%(self.table,setStr,self.id_field)
322: # and insert
323: fields=string.join([self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
324: values=string.join(['%s' for f in self.xml_update_list], ',')
325: self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
326: self.logger.debug("update-query: "+self.updQuery)
327: self.logger.debug("add-query: "+self.addQuery)
328: return
329:
330: def handle_meta_field(self, end_condition):
331: name = self.params.get((None, u'NAME'))
332: yield None
333: #Element closed. Wrap up
334: if self.lc_names:
335: # clean name
336: sqlname = name.replace(" ","_").lower()
337: else:
338: sqlname = name
339: self.xml_field_names.append(name)
340: # map to sql name and default text type
341: self.xml_field_map[name] = TableColumn(sqlname, 'text')
342: self.sql_field_map[sqlname] = name
343: self.logger.debug("FIELD name: "+name)
344: return
345:
346: def handle_data_fields(self, end_condition):
347: dispatcher = {
348: (saxtools.START_ELEMENT, fm_ns, u'ROW'):
349: self.handle_row,
350: }
351: #First round through the generator corresponds to the
352: #start element event
353: self.logger.info("reading data...")
354: self.logger.debug("START RESULTSET")
355: self.rowcnt = 0
356: yield None
357:
358: #delegate is a generator that handles all the events "within"
359: #this element
360: delegate = None
361: while not self.event == end_condition:
362: delegate = saxtools.tenorsax.event_loop_body(
363: dispatcher, delegate, self.event)
364: yield None
365:
366: #Element closed. Wrap up
367: self.logger.debug("END RESULTSET")
368: self.dbCon.commit()
369:
370: if self.sync_mode:
371: # delete unmatched entries in db
372: self.logger.info("deleting unmatched rows from db")
373: delQuery = "DELETE FROM %s WHERE %s = %%s"%(self.table,self.id_field)
374: for id in self.dbIDs.keys():
375: # find all not-updated fields
376: if self.dbIDs[id] == 0:
377: self.logger.info(" delete:"+id)
378: SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db)
379: sys.exit(1)
380:
381: elif self.dbIDs[id] > 1:
382: self.logger.info(" sync: ID %s used more than once?"%id)
383:
384: self.dbCon.commit()
385:
386: # reinstate backup tables
387: if self.backup_table and not self.id_field:
388: backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
389: self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name))
390: qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
391: self.db.execute(qstr)
392: self.logger.info("rename working table %s to %s"%(self.table,self.orig_table))
393: qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
394: self.db.execute(qstr)
395: self.dbCon.commit()
396:
397: return
398:
399: def handle_row(self, end_condition):
400: dispatcher = {
401: (saxtools.START_ELEMENT, fm_ns, u'COL'):
402: self.handle_col,
403: }
404: self.logger.debug("START ROW")
405: self.xml_data = {}
406: self.colIdx = 0
407: yield None
408:
409: #delegate is a generator that handles all the events "within"
410: #this element
411: delegate = None
412: while not self.event == end_condition:
413: delegate = saxtools.tenorsax.event_loop_body(
414: dispatcher, delegate, self.event)
415: yield None
416:
417: #Element closed. Wrap up
418: self.logger.debug("END ROW")
419: self.rowcnt += 1
420: # process collected row data
421: update=False
422: id_val=''
423: # synchronize by id_field
424: if self.id_field:
425: if self.id_type == 'integer':
426: id_val = int(self.xml_data[self.xml_id])
427: else:
428: id_val = self.xml_data[self.xml_id]
429:
430: if id_val in self.dbIDs:
431: self.dbIDs[id_val] += 1
432: update=True
433:
434: # collect all values
435: args = []
436: for fn in self.xml_update_list:
437: # do not update id_field
438: if update and fn == self.xml_id:
439: continue
440:
441: f = self.xml_field_map[fn]
442: val = self.xml_data[fn]
443: type = self.sql_fields[f.getName()].getType()
444: if type == "date" and len(val) == 0:
445: # empty date field
446: val = None
447:
448: elif type == "integer" and len(val) == 0:
449: # empty int field
450: val = None
451:
452: args.append(val)
453:
454: if update:
455: # update existing row (by id_field)
456: # last argument is ID match
457: args.append(id_val)
458: self.logger.debug("update: %s = %s"%(id_val, args))
459: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
460:
461: else:
462: # create new row
463: self.logger.debug("insert: %s"%args)
464: SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db)
465:
466: #self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
467: if (self.rowcnt % 100) == 0:
468: self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val))
469: self.dbCon.commit()
470:
471: return
472:
473: def handle_col(self, end_condition):
474: dispatcher = {
475: (saxtools.START_ELEMENT, fm_ns, u'DATA'):
476: self.handle_data_tag,
477: }
478: #print "START COL"
479: yield None
480: #delegate is a generator that handles all the events "within"
481: #this element
482: delegate = None
483: while not self.event == end_condition:
484: delegate = saxtools.tenorsax.event_loop_body(
485: dispatcher, delegate, self.event)
486: yield None
487: #Element closed. Wrap up
488: #print "END COL"
489: self.colIdx += 1
490: return
491:
492: def handle_data_tag(self, end_condition):
493: #print "START DATA"
494: content = u''
495: yield None
496: # gather child elements
497: while not self.event == end_condition:
498: if self.event[0] == saxtools.CHARACTER_DATA:
499: content += self.params
500: yield None
501: #Element closed. Wrap up
502: fn = self.xml_field_names[self.colIdx]
503: self.xml_data[fn] = content
504: return
505:
506:
507: def importFMPXML(options):
508: """import FileMaker XML file (FMPXMLRESULT format) into the table.
509: @param options: dict of options
510: @param options.dsn: database connection string
511: @param options.table: name of the table the xml shall be imported into
512: @param options.filename: xmlfile filename
513: @param options.update_fields: (optional) list of fields to update; default is to create all fields
514: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
515: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
516: @param options.lc_names: (optional) lower case and clean up field names from XML
517: @param options.keep_fields: (optional) don't add fields to SQL database
518: @param options.ascii_db: (optional) assume ascii encoding in db
519: @param options.replace_table: (optional) delete and re-insert data
520: @param options.backup_table: (optional) create backup of old table
521: """
522:
523: if getattr(options,'update_fields',None):
524: uf = {}
525: for f in options.update_fields.split(','):
526: if f.find(':') > 0:
527: (n,t) = f.split(':')
528: else:
529: n = f
530: t = None
531: uf[n] = TableColumn(n,t)
532:
533: options.update_fields = uf
534:
535: if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
536: logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
537: return
538:
539: parser = sax.make_parser()
540: #The "consumer" is our own handler
541: consumer = xml_handler(options)
542: #Initialize Tenorsax with handler
543: handler = saxtools.tenorsax(consumer)
544: #Resulting tenorsax instance is the SAX handler
545: parser.setContentHandler(handler)
546: parser.setFeature(sax.handler.feature_namespaces, 1)
547: parser.parse(options.filename)
548:
549:
550: if __name__ == "__main__":
551: from optparse import OptionParser
552:
553: opars = OptionParser()
554: opars.add_option("-f", "--file",
555: dest="filename",
556: help="FMPXML file name", metavar="FILE")
557: opars.add_option("-c", "--dsn",
558: dest="dsn",
559: help="database connection string")
560: opars.add_option("-t", "--table",
561: dest="table",
562: help="database table name")
563: opars.add_option("--fields", default=None,
564: dest="update_fields",
565: help="list of fields to update (comma separated, sql-names)", metavar="LIST")
566: opars.add_option("--id-field", default=None,
567: dest="id_field",
568: help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
569: opars.add_option("--sync", "--sync-mode", default=False, action="store_true",
570: dest="sync_mode",
571: help="do full sync based on id field (remove unmatched fields from db)")
572: opars.add_option("--lc-names", default=False, action="store_true",
573: dest="lc_names",
574: help="clean and lower case field names from XML")
575: opars.add_option("--keep-fields", default=False, action="store_true",
576: dest="keep_fields",
577: help="don't add fields from XML to SQL table")
578: opars.add_option("--ascii-db", default=False, action="store_true",
579: dest="ascii_db",
580: help="the SQL database stores ASCII instead of unicode")
581: opars.add_option("--replace", default=False, action="store_true",
582: dest="replace_table",
583: help="replace table i.e. delete and re-insert data")
584: opars.add_option("--backup", default=False, action="store_true",
585: dest="backup_table",
586: help="create backup of old table")
587: opars.add_option("-d", "--debug", default=False, action="store_true",
588: dest="debug",
589: help="debug mode (more output)")
590:
591: (options, args) = opars.parse_args()
592:
593: if len(sys.argv) < 2 or options.filename is None or options.dsn is None:
594: print "importFMPXML "+version_string
595: opars.print_help()
596: sys.exit(1)
597:
598: if options.debug:
599: loglevel = logging.DEBUG
600: else:
601: loglevel = logging.INFO
602:
603: logging.basicConfig(level=loglevel,
604: format='%(asctime)s %(levelname)s %(message)s',
605: datefmt='%H:%M:%S')
606:
607: importFMPXML(options)
608:
609:
610:
611:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>