Annotation of ZSQLExtend/importFMPXML.py, revision 1.29
1.1 casties 1: #!/usr/local/bin/python
2: #
3:
4: import string
5: import logging
6: import sys
1.7 dwinter 7: import types
1.8 casties 8: import time
1.1 casties 9:
1.5 casties 10: from xml import sax
11: from amara import saxtools
12:
1.2 casties 13: try:
14: import psycopg2 as psycopg
1.23 casties 15: import psycopg2.extensions
16: # switch to unicode
17: psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
1.2 casties 18: psyco = 2
19: except:
20: import psycopg
21: psyco = 1
22:
1.5 casties 23: fm_ns = 'http://www.filemaker.com/fmpxmlresult'
1.1 casties 24:
1.29 ! casties 25: version_string = "V0.6.3 ROC 10.2.2009"
1.20 casties 26:
1.21 casties 27: def unicodify(text, withNone=False):
1.20 casties 28: """decode str (utf-8 or latin-1 representation) into unicode object"""
1.21 casties 29: if withNone and text is None:
1.20 casties 30: return None
1.21 casties 31: if not text:
1.20 casties 32: return u""
1.21 casties 33: if isinstance(text, str):
1.20 casties 34: try:
1.21 casties 35: return text.decode('utf-8')
1.20 casties 36: except:
1.21 casties 37: return text.decode('latin-1')
1.20 casties 38: else:
1.21 casties 39: return text
1.20 casties 40:
1.21 casties 41: def utf8ify(text, withNone=False):
1.20 casties 42: """encode unicode object or string into byte string in utf-8 representation"""
1.21 casties 43: if withNone and text is None:
1.20 casties 44: return None
1.21 casties 45: if not text:
1.20 casties 46: return ""
1.22 casties 47: if isinstance(text, unicode):
48: return text.encode('utf-8')
49: else:
1.21 casties 50: return text
1.8 casties 51:
1.1 casties 52: def getTextFromNode(nodename):
53: """get the cdata content of a node"""
54: if nodename is None:
55: return ""
56: nodelist=nodename.childNodes
57: rc = ""
58: for node in nodelist:
59: if node.nodeType == node.TEXT_NODE:
60: rc = rc + node.data
61: return rc
62:
63: def sql_quote(v):
64: # quote dictionary
65: quote_dict = {"\'": "''", "\\": "\\\\"}
66: for dkey in quote_dict.keys():
67: if string.find(v, dkey) >= 0:
68: v=string.join(string.split(v,dkey),quote_dict[dkey])
1.5 casties 69: return "'%s'"%v
1.1 casties 70:
1.8 casties 71: def SimpleSearch(curs,query, args=None, ascii=False):
1.1 casties 72: """execute sql query and return data"""
1.15 casties 73: #logger.debug("executing: "+query)
1.8 casties 74: if ascii:
75: # encode all in UTF-8
1.20 casties 76: query = utf8ify(query)
1.8 casties 77: if args is not None:
78: encargs = []
79: for a in args:
1.20 casties 80: encargs.append(utf8ify(a, withNone=True))
1.8 casties 81:
82: args = encargs
1.7 dwinter 83:
1.1 casties 84: curs.execute(query, args)
1.15 casties 85: #logger.debug("sql done")
1.4 casties 86: try:
87: return curs.fetchall()
88: except:
89: return None
1.1 casties 90:
91:
1.8 casties 92: class TableColumn:
93: """simple type for storing sql column name and type"""
94:
95: def __init__(self, name, type=None):
96: #print "new tablecolumn(%s,%s)"%(name, type)
97: self.name = name
98: self.type = type
99:
100: def getName(self):
101: return self.name
102:
103: def getType(self):
104: if self.type is not None:
105: return self.type
106: else:
107: return "text"
1.5 casties 108:
1.8 casties 109: def __str__(self):
110: return self.name
111:
112:
1.5 casties 113: class xml_handler:
1.8 casties 114: def __init__(self,options):
1.9 dwinter 115: """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
1.8 casties 116: @param options: dict of options
117: @param options.dsn: database connection string
118: @param options.table: name of the table the xml shall be imported into
119: @param options.filename: xmlfile filename
120: @param options.update_fields: (optional) list of fields to update; default is to create all fields
121: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
122: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
123: @param options.lc_names: (optional) lower case and clean up field names from XML
124: @param options.keep_fields: (optional) don't add fields to SQL database
125: @param options.ascii_db: (optional) assume ascii encoding in db
126: @param options.replace_table: (optional) delete and re-insert data
1.14 casties 127: @param options.backup_table: (optional) create backup of old table (breaks indices)
1.15 casties 128: @param options.use_logger_instance: (optional) use this instance of a logger
1.9 dwinter 129: """
130:
1.15 casties 131: # set up logger
132: if hasattr(options, 'use_logger_instance'):
133: self.logger = options.use_logger_instance
134: else:
135: self.logger = logging.getLogger('db.import.fmpxml')
136:
137:
1.5 casties 138: # set up parser
139: self.event = None
140: self.top_dispatcher = {
141: (saxtools.START_ELEMENT, fm_ns, u'METADATA'):
142: self.handle_meta_fields,
143: (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'):
1.8 casties 144: self.handle_data_fields,
1.5 casties 145: }
146:
147: # connect database
1.8 casties 148: self.dbCon = psycopg.connect(options.dsn)
1.25 casties 149: logging.debug("DB encoding: %s"%getattr(self.dbCon, 'encoding', 'UNKNOWN'))
1.5 casties 150: self.db = self.dbCon.cursor()
1.8 casties 151: assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
1.1 casties 152:
1.9 dwinter 153: self.table = getattr(options,"table",None)
154: self.update_fields = getattr(options,"update_fields",None)
155: self.id_field = getattr(options,"id_field",None)
156: self.sync_mode = getattr(options,"sync_mode",None)
157: self.lc_names = getattr(options,"lc_names",None)
158: self.keep_fields = getattr(options,"keep_fields",None)
159: self.ascii_db = getattr(options,"ascii_db",None)
160: self.replace_table = getattr(options,"replace_table",None)
161: self.backup_table = getattr(options,"backup_table",None)
1.23 casties 162: self.read_before_update = getattr(options,"read_before_update",None)
1.28 casties 163: self.debug_data = getattr(options,"debug_data",None)
1.8 casties 164:
1.15 casties 165: self.logger.debug("dsn: "+repr(getattr(options,"dsn",None)))
166: self.logger.debug("table: "+repr(self.table))
167: self.logger.debug("update_fields: "+repr(self.update_fields))
168: self.logger.debug("id_field: "+repr(self.id_field))
169: self.logger.debug("sync_mode: "+repr(self.sync_mode))
170: self.logger.debug("lc_names: "+repr(self.lc_names))
171: self.logger.debug("keep_fields: "+repr(self.keep_fields))
172: self.logger.debug("ascii_db: "+repr(self.ascii_db))
173: self.logger.debug("replace_table: "+repr(self.replace_table))
174: self.logger.debug("backup_table: "+repr(self.backup_table))
1.23 casties 175: self.logger.debug("read_before_update: "+repr(self.read_before_update))
1.28 casties 176: self.logger.debug("debug_data: "+repr(self.debug_data))
1.5 casties 177:
178: self.dbIDs = {}
179: self.rowcnt = 0
1.8 casties 180:
181: if self.id_field is not None:
1.5 casties 182: # prepare a list of ids for sync mode
1.8 casties 183: qstr="select %s from %s"%(self.id_field,self.table)
1.5 casties 184: for id in SimpleSearch(self.db, qstr):
185: # value 0: not updated
186: self.dbIDs[id[0]] = 0;
187: self.rowcnt += 1
188:
1.15 casties 189: self.logger.info("%d entries in DB to sync"%self.rowcnt)
1.5 casties 190:
1.8 casties 191: # names of fields in XML file
192: self.xml_field_names = []
193: # map XML field names to SQL field names
194: self.xml_field_map = {}
195: # and vice versa
196: self.sql_field_map = {}
1.5 casties 197:
198: return
199:
200: def handle_meta_fields(self, end_condition):
201: dispatcher = {
202: (saxtools.START_ELEMENT, fm_ns, u'FIELD'):
203: self.handle_meta_field,
204: }
205: #First round through the generator corresponds to the
206: #start element event
1.15 casties 207: self.logger.info("reading metadata...")
208: self.logger.debug("START METADATA")
1.5 casties 209: yield None
1.1 casties 210:
1.5 casties 211: #delegate is a generator that handles all the events "within"
212: #this element
213: delegate = None
214: while not self.event == end_condition:
215: delegate = saxtools.tenorsax.event_loop_body(
216: dispatcher, delegate, self.event)
217: yield None
218:
219: #Element closed. Wrap up
1.15 casties 220: self.logger.debug("END METADATA")
1.8 casties 221:
222: # rename table for backup
223: if self.backup_table:
224: self.orig_table = self.table
1.16 casties 225: self.tmp_table = self.table + "_tmp"
226: backup_name = "%s_%s"%(self.table,time.strftime('%Y_%m_%d_%H_%M_%S'))
227:
1.8 casties 228: # remove old temp table
1.16 casties 229: qstr = "DROP TABLE %s"%(self.tmp_table)
1.8 casties 230: try:
231: self.db.execute(qstr)
232: except:
233: pass
234:
235: self.dbCon.commit()
236:
237: if self.id_field:
1.16 casties 238: # sync mode -- copy backup table, update current table
239: self.logger.info("copy table %s to %s"%(self.table,backup_name))
240: qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(backup_name,self.table)
1.8 casties 241:
242: else:
1.16 casties 243: # replace mode -- create empty tmp table, insert into tmp table
244: self.table = self.tmp_table
1.15 casties 245: self.logger.info("create empty table %s"%(self.table))
1.8 casties 246: qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
247:
248: self.db.execute(qstr)
249: self.dbCon.commit()
250:
251: # delete data from table for replace
252: if self.replace_table:
1.15 casties 253: self.logger.info("delete data from table %s"%(self.table))
1.8 casties 254: qstr = "TRUNCATE TABLE %s"%(self.table)
255: self.db.execute(qstr)
256: self.dbCon.commit()
257:
258: # try to match date style with XML
259: self.db.execute("set datestyle to 'german'")
260:
1.15 casties 261: #self.logger.debug("xml-fieldnames:"+repr(self.xml_field_names))
1.8 casties 262: # get list of fields and types of db table
263: qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
264: self.sql_fields={}
265: for f in SimpleSearch(self.db, qstr%self.table):
1.24 casties 266: fn = f[0]
267: ft = f[1]
1.8 casties 268: #print "SQL fields: %s (%s)"%(n,t)
1.24 casties 269: self.sql_fields[fn] = TableColumn(fn,ft)
1.8 casties 270:
1.17 casties 271: # translate id_field (SQL-name) to XML-name
272: self.xml_id = self.sql_field_map.get(self.id_field, None)
273: # get type of id_field
274: if self.id_field:
275: self.id_type = self.sql_fields[self.id_field].getType()
276: else:
277: self.id_type = None
278:
1.8 casties 279: # check fields to update
1.5 casties 280: if self.update_fields is None:
1.8 casties 281: if self.keep_fields:
1.13 casties 282: # update all existing fields from sql (when they are in the xml file)
283: self.update_fields = {}
284: for f in self.sql_fields.keys():
285: if self.sql_field_map.has_key(f):
286: xf = self.sql_field_map[f]
287: self.update_fields[f] = self.xml_field_map[xf]
288:
1.8 casties 289: else:
290: # update all fields
291: if self.lc_names:
292: # create dict with sql names
293: self.update_fields = {}
294: for f in self.xml_field_map.values():
295: self.update_fields[f.getName()] = f
296:
297: else:
298: self.update_fields = self.xml_field_map
1.17 casties 299:
1.8 casties 300: # and translate to list of xml fields
301: if self.lc_names:
302: self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields]
303: else:
304: self.xml_update_list = self.update_fields.keys()
1.17 casties 305:
1.8 casties 306: if not self.keep_fields:
307: # adjust db table to fields in XML and update_fields
308: for f in self.xml_field_map.values():
1.15 casties 309: self.logger.debug("sync-fieldname: %s"%f.getName())
1.8 casties 310: sf = self.sql_fields.get(f.getName(), None)
311: uf = self.update_fields.get(f.getName(), None)
312: if sf is not None:
313: # name in db -- check type
314: if f.getType() != sf.getType():
1.15 casties 315: self.logger.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType()))
1.8 casties 316: elif uf is not None:
317: # add field to table
1.24 casties 318: fn = uf.getName()
319: ft = uf.getType()
320: qstr="alter table %s add \"%s\" %s"%(self.table,fn,ft)
1.15 casties 321: self.logger.info("db add field:"+qstr)
1.8 casties 322:
323: if self.ascii_db and type(qstr)==types.UnicodeType:
324: qstr=qstr.encode('utf-8')
325:
326: self.db.execute(qstr)
327: self.dbCon.commit()
1.24 casties 328: # add field to field list
329: self.sql_fields[fn] = TableColumn(fn, ft)
1.7 dwinter 330:
1.17 casties 331: # prepare sql statements for update (do not update id_field)
1.24 casties 332: setStr=string.join(["\"%s\" = %%s"%self.xml_field_map[f] for f in self.xml_update_list if f != self.xml_id], ', ')
333: self.updQuery="UPDATE %s SET %s WHERE \"%s\" = %%s"%(self.table,setStr,self.id_field)
1.23 casties 334: # and select (for update check)
335: selStr=string.join([self.xml_field_map[f].getName() for f in self.xml_update_list if f != self.xml_id], ', ')
1.24 casties 336: self.selQuery="SELECT %s FROM %s WHERE \"%s\" = %%s"%(selStr,self.table,self.id_field)
1.5 casties 337: # and insert
1.24 casties 338: fields=string.join(["\"%s\""%self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
1.8 casties 339: values=string.join(['%s' for f in self.xml_update_list], ',')
1.5 casties 340: self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
1.15 casties 341: self.logger.debug("update-query: "+self.updQuery)
1.23 casties 342: self.logger.debug("sel-query: "+self.selQuery)
1.15 casties 343: self.logger.debug("add-query: "+self.addQuery)
1.5 casties 344: return
345:
346: def handle_meta_field(self, end_condition):
347: name = self.params.get((None, u'NAME'))
348: yield None
349: #Element closed. Wrap up
1.8 casties 350: if self.lc_names:
351: # clean name
352: sqlname = name.replace(" ","_").lower()
353: else:
354: sqlname = name
355: self.xml_field_names.append(name)
356: # map to sql name and default text type
357: self.xml_field_map[name] = TableColumn(sqlname, 'text')
358: self.sql_field_map[sqlname] = name
1.15 casties 359: self.logger.debug("FIELD name: "+name)
1.5 casties 360: return
361:
1.8 casties 362: def handle_data_fields(self, end_condition):
1.5 casties 363: dispatcher = {
364: (saxtools.START_ELEMENT, fm_ns, u'ROW'):
365: self.handle_row,
366: }
367: #First round through the generator corresponds to the
368: #start element event
1.15 casties 369: self.logger.info("reading data...")
370: self.logger.debug("START RESULTSET")
1.5 casties 371: self.rowcnt = 0
372: yield None
1.1 casties 373:
1.5 casties 374: #delegate is a generator that handles all the events "within"
375: #this element
376: delegate = None
377: while not self.event == end_condition:
378: delegate = saxtools.tenorsax.event_loop_body(
379: dispatcher, delegate, self.event)
380: yield None
381:
382: #Element closed. Wrap up
1.15 casties 383: self.logger.debug("END RESULTSET")
1.5 casties 384: self.dbCon.commit()
1.1 casties 385:
1.5 casties 386: if self.sync_mode:
387: # delete unmatched entries in db
1.15 casties 388: self.logger.info("deleting unmatched rows from db")
1.24 casties 389: delQuery = "DELETE FROM %s WHERE \"%s\" = %%s"%(self.table,self.id_field)
1.5 casties 390: for id in self.dbIDs.keys():
391: # find all not-updated fields
392: if self.dbIDs[id] == 0:
1.27 casties 393: self.logger.info(" delete: %s"%id)
1.8 casties 394: SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db)
1.1 casties 395:
1.5 casties 396: elif self.dbIDs[id] > 1:
1.15 casties 397: self.logger.info(" sync: ID %s used more than once?"%id)
1.8 casties 398:
399: self.dbCon.commit()
1.1 casties 400:
1.8 casties 401: # reinstate backup tables
1.16 casties 402: if self.backup_table and not self.id_field:
1.8 casties 403: backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
1.15 casties 404: self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name))
1.8 casties 405: qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
406: self.db.execute(qstr)
1.15 casties 407: self.logger.info("rename working table %s to %s"%(self.table,self.orig_table))
1.8 casties 408: qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
409: self.db.execute(qstr)
1.5 casties 410: self.dbCon.commit()
1.1 casties 411:
1.5 casties 412: return
413:
414: def handle_row(self, end_condition):
415: dispatcher = {
416: (saxtools.START_ELEMENT, fm_ns, u'COL'):
417: self.handle_col,
418: }
1.15 casties 419: self.logger.debug("START ROW")
1.8 casties 420: self.xml_data = {}
1.5 casties 421: self.colIdx = 0
422: yield None
1.1 casties 423:
1.5 casties 424: #delegate is a generator that handles all the events "within"
425: #this element
426: delegate = None
427: while not self.event == end_condition:
428: delegate = saxtools.tenorsax.event_loop_body(
429: dispatcher, delegate, self.event)
430: yield None
431:
432: #Element closed. Wrap up
1.15 casties 433: self.logger.debug("END ROW")
1.5 casties 434: self.rowcnt += 1
435: # process collected row data
436: update=False
437: id_val=''
438: # synchronize by id_field
439: if self.id_field:
1.17 casties 440: if self.id_type == 'integer':
1.29 ! casties 441: try:
! 442: id_val = int(self.xml_data[self.xml_id])
! 443: except:
! 444: pass
1.17 casties 445: else:
446: id_val = self.xml_data[self.xml_id]
1.29 ! casties 447:
! 448: if not id_val:
! 449: # abort update
! 450: self.logger.error("ERROR: unable to sync! emtpy id in row %s"%self.rowcnt)
! 451: return
1.17 casties 452:
1.5 casties 453: if id_val in self.dbIDs:
454: self.dbIDs[id_val] += 1
455: update=True
1.8 casties 456:
457: # collect all values
458: args = []
459: for fn in self.xml_update_list:
1.17 casties 460: # do not update id_field
1.19 casties 461: if update and fn == self.xml_id:
1.17 casties 462: continue
463:
1.8 casties 464: f = self.xml_field_map[fn]
465: val = self.xml_data[fn]
466: type = self.sql_fields[f.getName()].getType()
467: if type == "date" and len(val) == 0:
468: # empty date field
469: val = None
470:
471: elif type == "integer" and len(val) == 0:
472: # empty int field
473: val = None
474:
475: args.append(val)
476:
1.5 casties 477: if update:
478: # update existing row (by id_field)
1.23 casties 479: if self.read_before_update:
480: # read data
1.28 casties 481: if self.debug_data:
482: self.logger.debug("update check: %s = %s"%(id_val, args))
1.23 casties 483: oldrow = SimpleSearch(self.db, self.selQuery, [id_val], ascii=self.ascii_db)
484: #i = 0
485: #for v in oldrow[0]:
486: # logging.debug("v: %s = %s (%s)"%(v,args[i],v==args[i]))
487: # i += 1
488: if tuple(oldrow[0]) != tuple(args):
489: # data has changed -- update
1.28 casties 490: if self.debug_data:
491: self.logger.debug("really update: %s = %s"%(id_val, args))
1.23 casties 492: args.append(id_val) # last arg is id
493: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
494:
495: else:
496: # always update
1.28 casties 497: if self.debug_data:
498: self.logger.debug("update: %s = %s"%(id_val, args))
1.23 casties 499: args.append(id_val) # last arg is id
500: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
1.8 casties 501:
1.5 casties 502: else:
503: # create new row
1.28 casties 504: if self.debug_data:
505: self.logger.debug("insert: %s"%args)
1.8 casties 506: SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db)
1.5 casties 507:
1.15 casties 508: #self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
509: if (self.rowcnt % 100) == 0:
510: self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val))
1.5 casties 511: self.dbCon.commit()
512:
513: return
514:
515: def handle_col(self, end_condition):
516: dispatcher = {
517: (saxtools.START_ELEMENT, fm_ns, u'DATA'):
518: self.handle_data_tag,
519: }
520: #print "START COL"
521: yield None
522: #delegate is a generator that handles all the events "within"
523: #this element
524: delegate = None
525: while not self.event == end_condition:
526: delegate = saxtools.tenorsax.event_loop_body(
527: dispatcher, delegate, self.event)
528: yield None
529: #Element closed. Wrap up
530: #print "END COL"
531: self.colIdx += 1
532: return
533:
534: def handle_data_tag(self, end_condition):
535: #print "START DATA"
536: content = u''
537: yield None
538: # gather child elements
539: while not self.event == end_condition:
540: if self.event[0] == saxtools.CHARACTER_DATA:
541: content += self.params
542: yield None
543: #Element closed. Wrap up
1.8 casties 544: fn = self.xml_field_names[self.colIdx]
545: self.xml_data[fn] = content
1.5 casties 546: return
547:
548:
1.11 dwinter 549: def importFMPXML(options):
1.14 casties 550: """import FileMaker XML file (FMPXMLRESULT format) into the table.
1.11 dwinter 551: @param options: dict of options
552: @param options.dsn: database connection string
553: @param options.table: name of the table the xml shall be imported into
554: @param options.filename: xmlfile filename
555: @param options.update_fields: (optional) list of fields to update; default is to create all fields
556: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
557: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
558: @param options.lc_names: (optional) lower case and clean up field names from XML
559: @param options.keep_fields: (optional) don't add fields to SQL database
560: @param options.ascii_db: (optional) assume ascii encoding in db
561: @param options.replace_table: (optional) delete and re-insert data
1.16 casties 562: @param options.backup_table: (optional) create backup of old table
1.11 dwinter 563: """
1.15 casties 564:
1.11 dwinter 565: if getattr(options,'update_fields',None):
566: uf = {}
567: for f in options.update_fields.split(','):
1.12 casties 568: if f.find(':') > 0:
569: (n,t) = f.split(':')
570: else:
571: n = f
572: t = None
1.11 dwinter 573: uf[n] = TableColumn(n,t)
574:
575: options.update_fields = uf
576:
577: if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
578: logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
1.15 casties 579: return
1.11 dwinter 580:
581: parser = sax.make_parser()
582: #The "consumer" is our own handler
583: consumer = xml_handler(options)
584: #Initialize Tenorsax with handler
585: handler = saxtools.tenorsax(consumer)
586: #Resulting tenorsax instance is the SAX handler
587: parser.setContentHandler(handler)
588: parser.setFeature(sax.handler.feature_namespaces, 1)
589: parser.parse(options.filename)
590:
1.1 casties 591:
1.9 dwinter 592: if __name__ == "__main__":
593: from optparse import OptionParser
1.5 casties 594:
1.9 dwinter 595: opars = OptionParser()
596: opars.add_option("-f", "--file",
597: dest="filename",
598: help="FMPXML file name", metavar="FILE")
599: opars.add_option("-c", "--dsn",
600: dest="dsn",
601: help="database connection string")
602: opars.add_option("-t", "--table",
603: dest="table",
604: help="database table name")
605: opars.add_option("--fields", default=None,
606: dest="update_fields",
607: help="list of fields to update (comma separated, sql-names)", metavar="LIST")
608: opars.add_option("--id-field", default=None,
609: dest="id_field",
610: help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
611: opars.add_option("--sync", "--sync-mode", default=False, action="store_true",
612: dest="sync_mode",
613: help="do full sync based on id field (remove unmatched fields from db)")
614: opars.add_option("--lc-names", default=False, action="store_true",
615: dest="lc_names",
616: help="clean and lower case field names from XML")
617: opars.add_option("--keep-fields", default=False, action="store_true",
618: dest="keep_fields",
619: help="don't add fields from XML to SQL table")
620: opars.add_option("--ascii-db", default=False, action="store_true",
621: dest="ascii_db",
622: help="the SQL database stores ASCII instead of unicode")
623: opars.add_option("--replace", default=False, action="store_true",
624: dest="replace_table",
625: help="replace table i.e. delete and re-insert data")
626: opars.add_option("--backup", default=False, action="store_true",
627: dest="backup_table",
1.16 casties 628: help="create backup of old table")
1.23 casties 629: opars.add_option("--read-before-update", default=False, action="store_true",
630: dest="read_before_update",
631: help="read all data to check if it really changed")
1.9 dwinter 632: opars.add_option("-d", "--debug", default=False, action="store_true",
633: dest="debug",
634: help="debug mode (more output)")
1.28 casties 635: opars.add_option("--debug-data", default=False, action="store_true",
636: dest="debug_data",
637: help="debug mode for data (even more output)")
1.9 dwinter 638:
639: (options, args) = opars.parse_args()
640:
641: if len(sys.argv) < 2 or options.filename is None or options.dsn is None:
642: print "importFMPXML "+version_string
643: opars.print_help()
644: sys.exit(1)
645:
646: if options.debug:
647: loglevel = logging.DEBUG
648: else:
649: loglevel = logging.INFO
650:
651: logging.basicConfig(level=loglevel,
652: format='%(asctime)s %(levelname)s %(message)s',
653: datefmt='%H:%M:%S')
654:
1.11 dwinter 655: importFMPXML(options)
656:
1.9 dwinter 657:
658:
1.1 casties 659:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>