Annotation of ZSQLExtend/importFMPXML.py, revision 1.32
1.1 casties 1: #!/usr/local/bin/python
2: #
3:
4: import string
5: import logging
6: import sys
1.7 dwinter 7: import types
1.8 casties 8: import time
1.1 casties 9:
1.5 casties 10: from xml import sax
11: from amara import saxtools
12:
1.2 casties 13: try:
14: import psycopg2 as psycopg
1.23 casties 15: import psycopg2.extensions
16: # switch to unicode
17: psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
1.2 casties 18: psyco = 2
19: except:
20: import psycopg
21: psyco = 1
22:
1.5 casties 23: fm_ns = 'http://www.filemaker.com/fmpxmlresult'
1.1 casties 24:
1.32 ! casties 25: version_string = "V0.6.6 ROC 20.5.2011"
1.20 casties 26:
1.21 casties 27: def unicodify(text, withNone=False):
1.20 casties 28: """decode str (utf-8 or latin-1 representation) into unicode object"""
1.21 casties 29: if withNone and text is None:
1.20 casties 30: return None
1.21 casties 31: if not text:
1.20 casties 32: return u""
1.21 casties 33: if isinstance(text, str):
1.20 casties 34: try:
1.21 casties 35: return text.decode('utf-8')
1.20 casties 36: except:
1.21 casties 37: return text.decode('latin-1')
1.20 casties 38: else:
1.21 casties 39: return text
1.20 casties 40:
1.21 casties 41: def utf8ify(text, withNone=False):
1.20 casties 42: """encode unicode object or string into byte string in utf-8 representation"""
1.21 casties 43: if withNone and text is None:
1.20 casties 44: return None
1.21 casties 45: if not text:
1.20 casties 46: return ""
1.22 casties 47: if isinstance(text, unicode):
48: return text.encode('utf-8')
49: else:
1.21 casties 50: return text
1.8 casties 51:
1.1 casties 52: def getTextFromNode(nodename):
53: """get the cdata content of a node"""
54: if nodename is None:
55: return ""
56: nodelist=nodename.childNodes
57: rc = ""
58: for node in nodelist:
59: if node.nodeType == node.TEXT_NODE:
60: rc = rc + node.data
61: return rc
62:
63: def sql_quote(v):
64: # quote dictionary
65: quote_dict = {"\'": "''", "\\": "\\\\"}
66: for dkey in quote_dict.keys():
67: if string.find(v, dkey) >= 0:
68: v=string.join(string.split(v,dkey),quote_dict[dkey])
1.5 casties 69: return "'%s'"%v
1.1 casties 70:
1.8 casties 71: def SimpleSearch(curs,query, args=None, ascii=False):
1.1 casties 72: """execute sql query and return data"""
1.15 casties 73: #logger.debug("executing: "+query)
1.8 casties 74: if ascii:
75: # encode all in UTF-8
1.20 casties 76: query = utf8ify(query)
1.8 casties 77: if args is not None:
78: encargs = []
79: for a in args:
1.20 casties 80: encargs.append(utf8ify(a, withNone=True))
1.8 casties 81:
82: args = encargs
1.7 dwinter 83:
1.1 casties 84: curs.execute(query, args)
1.15 casties 85: #logger.debug("sql done")
1.4 casties 86: try:
87: return curs.fetchall()
88: except:
89: return None
1.1 casties 90:
91:
1.8 casties 92: class TableColumn:
93: """simple type for storing sql column name and type"""
94:
95: def __init__(self, name, type=None):
96: #print "new tablecolumn(%s,%s)"%(name, type)
97: self.name = name
98: self.type = type
99:
100: def getName(self):
101: return self.name
102:
103: def getType(self):
104: if self.type is not None:
105: return self.type
106: else:
107: return "text"
1.5 casties 108:
1.8 casties 109: def __str__(self):
110: return self.name
111:
112:
1.5 casties 113: class xml_handler:
1.8 casties 114: def __init__(self,options):
1.9 dwinter 115: """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
1.8 casties 116: @param options: dict of options
117: @param options.dsn: database connection string
118: @param options.table: name of the table the xml shall be imported into
119: @param options.filename: xmlfile filename
120: @param options.update_fields: (optional) list of fields to update; default is to create all fields
121: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
122: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
123: @param options.lc_names: (optional) lower case and clean up field names from XML
124: @param options.keep_fields: (optional) don't add fields to SQL database
125: @param options.ascii_db: (optional) assume ascii encoding in db
126: @param options.replace_table: (optional) delete and re-insert data
1.14 casties 127: @param options.backup_table: (optional) create backup of old table (breaks indices)
1.15 casties 128: @param options.use_logger_instance: (optional) use this instance of a logger
1.9 dwinter 129: """
130:
1.15 casties 131: # set up logger
132: if hasattr(options, 'use_logger_instance'):
133: self.logger = options.use_logger_instance
134: else:
135: self.logger = logging.getLogger('db.import.fmpxml')
136:
137:
1.5 casties 138: # set up parser
139: self.event = None
140: self.top_dispatcher = {
141: (saxtools.START_ELEMENT, fm_ns, u'METADATA'):
142: self.handle_meta_fields,
143: (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'):
1.8 casties 144: self.handle_data_fields,
1.5 casties 145: }
146:
147: # connect database
1.8 casties 148: self.dbCon = psycopg.connect(options.dsn)
1.25 casties 149: logging.debug("DB encoding: %s"%getattr(self.dbCon, 'encoding', 'UNKNOWN'))
1.5 casties 150: self.db = self.dbCon.cursor()
1.8 casties 151: assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
1.1 casties 152:
1.9 dwinter 153: self.table = getattr(options,"table",None)
154: self.update_fields = getattr(options,"update_fields",None)
155: self.id_field = getattr(options,"id_field",None)
156: self.sync_mode = getattr(options,"sync_mode",None)
157: self.lc_names = getattr(options,"lc_names",None)
158: self.keep_fields = getattr(options,"keep_fields",None)
159: self.ascii_db = getattr(options,"ascii_db",None)
160: self.replace_table = getattr(options,"replace_table",None)
161: self.backup_table = getattr(options,"backup_table",None)
1.23 casties 162: self.read_before_update = getattr(options,"read_before_update",None)
1.28 casties 163: self.debug_data = getattr(options,"debug_data",None)
1.8 casties 164:
1.15 casties 165: self.logger.debug("dsn: "+repr(getattr(options,"dsn",None)))
166: self.logger.debug("table: "+repr(self.table))
167: self.logger.debug("update_fields: "+repr(self.update_fields))
168: self.logger.debug("id_field: "+repr(self.id_field))
169: self.logger.debug("sync_mode: "+repr(self.sync_mode))
170: self.logger.debug("lc_names: "+repr(self.lc_names))
171: self.logger.debug("keep_fields: "+repr(self.keep_fields))
172: self.logger.debug("ascii_db: "+repr(self.ascii_db))
173: self.logger.debug("replace_table: "+repr(self.replace_table))
174: self.logger.debug("backup_table: "+repr(self.backup_table))
1.23 casties 175: self.logger.debug("read_before_update: "+repr(self.read_before_update))
1.28 casties 176: self.logger.debug("debug_data: "+repr(self.debug_data))
1.5 casties 177:
178: self.dbIDs = {}
179: self.rowcnt = 0
1.8 casties 180:
181: if self.id_field is not None:
1.5 casties 182: # prepare a list of ids for sync mode
1.8 casties 183: qstr="select %s from %s"%(self.id_field,self.table)
1.5 casties 184: for id in SimpleSearch(self.db, qstr):
185: # value 0: not updated
186: self.dbIDs[id[0]] = 0;
187: self.rowcnt += 1
188:
1.15 casties 189: self.logger.info("%d entries in DB to sync"%self.rowcnt)
1.5 casties 190:
1.8 casties 191: # names of fields in XML file
192: self.xml_field_names = []
193: # map XML field names to SQL field names
194: self.xml_field_map = {}
195: # and vice versa
196: self.sql_field_map = {}
1.5 casties 197:
198: return
199:
200: def handle_meta_fields(self, end_condition):
201: dispatcher = {
202: (saxtools.START_ELEMENT, fm_ns, u'FIELD'):
203: self.handle_meta_field,
204: }
205: #First round through the generator corresponds to the
206: #start element event
1.15 casties 207: self.logger.info("reading metadata...")
1.30 casties 208: if self.debug_data:
209: self.logger.debug("START METADATA")
1.5 casties 210: yield None
1.1 casties 211:
1.5 casties 212: #delegate is a generator that handles all the events "within"
213: #this element
214: delegate = None
215: while not self.event == end_condition:
216: delegate = saxtools.tenorsax.event_loop_body(
217: dispatcher, delegate, self.event)
218: yield None
219:
220: #Element closed. Wrap up
1.30 casties 221: if self.debug_data:
222: self.logger.debug("END METADATA")
1.8 casties 223:
224: # rename table for backup
225: if self.backup_table:
226: self.orig_table = self.table
1.16 casties 227: self.tmp_table = self.table + "_tmp"
228: backup_name = "%s_%s"%(self.table,time.strftime('%Y_%m_%d_%H_%M_%S'))
229:
1.8 casties 230: # remove old temp table
1.16 casties 231: qstr = "DROP TABLE %s"%(self.tmp_table)
1.8 casties 232: try:
233: self.db.execute(qstr)
234: except:
235: pass
236:
237: self.dbCon.commit()
238:
239: if self.id_field:
1.16 casties 240: # sync mode -- copy backup table, update current table
241: self.logger.info("copy table %s to %s"%(self.table,backup_name))
242: qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(backup_name,self.table)
1.8 casties 243:
244: else:
1.16 casties 245: # replace mode -- create empty tmp table, insert into tmp table
246: self.table = self.tmp_table
1.15 casties 247: self.logger.info("create empty table %s"%(self.table))
1.8 casties 248: qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
249:
250: self.db.execute(qstr)
251: self.dbCon.commit()
252:
253: # delete data from table for replace
254: if self.replace_table:
1.15 casties 255: self.logger.info("delete data from table %s"%(self.table))
1.8 casties 256: qstr = "TRUNCATE TABLE %s"%(self.table)
257: self.db.execute(qstr)
258: self.dbCon.commit()
259:
260: # try to match date style with XML
261: self.db.execute("set datestyle to 'german'")
262:
1.15 casties 263: #self.logger.debug("xml-fieldnames:"+repr(self.xml_field_names))
1.8 casties 264: # get list of fields and types of db table
265: qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
266: self.sql_fields={}
267: for f in SimpleSearch(self.db, qstr%self.table):
1.24 casties 268: fn = f[0]
269: ft = f[1]
1.8 casties 270: #print "SQL fields: %s (%s)"%(n,t)
1.24 casties 271: self.sql_fields[fn] = TableColumn(fn,ft)
1.8 casties 272:
1.17 casties 273: # translate id_field (SQL-name) to XML-name
274: self.xml_id = self.sql_field_map.get(self.id_field, None)
275: # get type of id_field
276: if self.id_field:
277: self.id_type = self.sql_fields[self.id_field].getType()
278: else:
279: self.id_type = None
280:
1.8 casties 281: # check fields to update
1.5 casties 282: if self.update_fields is None:
1.8 casties 283: if self.keep_fields:
1.13 casties 284: # update all existing fields from sql (when they are in the xml file)
285: self.update_fields = {}
286: for f in self.sql_fields.keys():
287: if self.sql_field_map.has_key(f):
288: xf = self.sql_field_map[f]
289: self.update_fields[f] = self.xml_field_map[xf]
290:
1.8 casties 291: else:
292: # update all fields
293: if self.lc_names:
294: # create dict with sql names
295: self.update_fields = {}
296: for f in self.xml_field_map.values():
297: self.update_fields[f.getName()] = f
298:
299: else:
300: self.update_fields = self.xml_field_map
1.17 casties 301:
1.8 casties 302: # and translate to list of xml fields
303: if self.lc_names:
304: self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields]
305: else:
306: self.xml_update_list = self.update_fields.keys()
1.17 casties 307:
1.8 casties 308: if not self.keep_fields:
309: # adjust db table to fields in XML and update_fields
310: for f in self.xml_field_map.values():
1.15 casties 311: self.logger.debug("sync-fieldname: %s"%f.getName())
1.8 casties 312: sf = self.sql_fields.get(f.getName(), None)
313: uf = self.update_fields.get(f.getName(), None)
314: if sf is not None:
315: # name in db -- check type
316: if f.getType() != sf.getType():
1.15 casties 317: self.logger.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType()))
1.8 casties 318: elif uf is not None:
319: # add field to table
1.24 casties 320: fn = uf.getName()
321: ft = uf.getType()
322: qstr="alter table %s add \"%s\" %s"%(self.table,fn,ft)
1.15 casties 323: self.logger.info("db add field:"+qstr)
1.8 casties 324:
325: if self.ascii_db and type(qstr)==types.UnicodeType:
326: qstr=qstr.encode('utf-8')
327:
328: self.db.execute(qstr)
329: self.dbCon.commit()
1.24 casties 330: # add field to field list
331: self.sql_fields[fn] = TableColumn(fn, ft)
1.7 dwinter 332:
1.17 casties 333: # prepare sql statements for update (do not update id_field)
1.24 casties 334: setStr=string.join(["\"%s\" = %%s"%self.xml_field_map[f] for f in self.xml_update_list if f != self.xml_id], ', ')
335: self.updQuery="UPDATE %s SET %s WHERE \"%s\" = %%s"%(self.table,setStr,self.id_field)
1.23 casties 336: # and select (for update check)
337: selStr=string.join([self.xml_field_map[f].getName() for f in self.xml_update_list if f != self.xml_id], ', ')
1.24 casties 338: self.selQuery="SELECT %s FROM %s WHERE \"%s\" = %%s"%(selStr,self.table,self.id_field)
1.5 casties 339: # and insert
1.24 casties 340: fields=string.join(["\"%s\""%self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
1.8 casties 341: values=string.join(['%s' for f in self.xml_update_list], ',')
1.5 casties 342: self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
1.15 casties 343: self.logger.debug("update-query: "+self.updQuery)
1.23 casties 344: self.logger.debug("sel-query: "+self.selQuery)
1.15 casties 345: self.logger.debug("add-query: "+self.addQuery)
1.5 casties 346: return
347:
348: def handle_meta_field(self, end_condition):
349: name = self.params.get((None, u'NAME'))
350: yield None
351: #Element closed. Wrap up
1.8 casties 352: if self.lc_names:
353: # clean name
354: sqlname = name.replace(" ","_").lower()
355: else:
356: sqlname = name
357: self.xml_field_names.append(name)
358: # map to sql name and default text type
359: self.xml_field_map[name] = TableColumn(sqlname, 'text')
360: self.sql_field_map[sqlname] = name
1.15 casties 361: self.logger.debug("FIELD name: "+name)
1.5 casties 362: return
363:
1.8 casties 364: def handle_data_fields(self, end_condition):
1.5 casties 365: dispatcher = {
366: (saxtools.START_ELEMENT, fm_ns, u'ROW'):
367: self.handle_row,
368: }
369: #First round through the generator corresponds to the
370: #start element event
1.15 casties 371: self.logger.info("reading data...")
1.30 casties 372: if self.debug_data:
373: self.logger.debug("START RESULTSET")
1.5 casties 374: self.rowcnt = 0
375: yield None
1.1 casties 376:
1.5 casties 377: #delegate is a generator that handles all the events "within"
378: #this element
379: delegate = None
380: while not self.event == end_condition:
381: delegate = saxtools.tenorsax.event_loop_body(
382: dispatcher, delegate, self.event)
383: yield None
384:
385: #Element closed. Wrap up
1.30 casties 386: if self.debug_data:
387: self.logger.debug("END RESULTSET")
1.5 casties 388: self.dbCon.commit()
1.1 casties 389:
1.5 casties 390: if self.sync_mode:
391: # delete unmatched entries in db
1.32 ! casties 392: if self.rowcnt > 0:
! 393: self.logger.info("deleting unmatched rows from db")
! 394: delQuery = "DELETE FROM %s WHERE \"%s\" = %%s"%(self.table,self.id_field)
! 395: for id in self.dbIDs.keys():
! 396: # find all not-updated fields
! 397: if self.dbIDs[id] == 0:
! 398: self.logger.info(" delete: %s"%id)
! 399: SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db)
! 400:
! 401: elif self.dbIDs[id] > 1:
! 402: self.logger.info(" sync: ID %s used more than once?"%id)
! 403:
! 404: self.dbCon.commit()
! 405:
! 406: else:
! 407: # safety in case we had an empty file
! 408: self.logger.warning("no rows read! not deleting unmatched rows!")
1.1 casties 409:
1.8 casties 410: # reinstate backup tables
1.16 casties 411: if self.backup_table and not self.id_field:
1.8 casties 412: backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
1.15 casties 413: self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name))
1.8 casties 414: qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
415: self.db.execute(qstr)
1.15 casties 416: self.logger.info("rename working table %s to %s"%(self.table,self.orig_table))
1.8 casties 417: qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
418: self.db.execute(qstr)
1.5 casties 419: self.dbCon.commit()
1.1 casties 420:
1.30 casties 421: self.logger.info("Done (%s rows)"%self.rowcnt)
1.5 casties 422: return
423:
424: def handle_row(self, end_condition):
425: dispatcher = {
426: (saxtools.START_ELEMENT, fm_ns, u'COL'):
427: self.handle_col,
428: }
1.30 casties 429: if self.debug_data:
430: self.logger.debug("START ROW")
1.8 casties 431: self.xml_data = {}
1.5 casties 432: self.colIdx = 0
433: yield None
1.1 casties 434:
1.5 casties 435: #delegate is a generator that handles all the events "within"
436: #this element
437: delegate = None
438: while not self.event == end_condition:
439: delegate = saxtools.tenorsax.event_loop_body(
440: dispatcher, delegate, self.event)
441: yield None
442:
443: #Element closed. Wrap up
1.30 casties 444: if self.debug_data:
445: self.logger.debug("END ROW")
1.5 casties 446: self.rowcnt += 1
447: # process collected row data
448: update=False
449: id_val=''
450: # synchronize by id_field
451: if self.id_field:
1.17 casties 452: if self.id_type == 'integer':
1.29 casties 453: try:
454: id_val = int(self.xml_data[self.xml_id])
455: except:
456: pass
1.17 casties 457: else:
458: id_val = self.xml_data[self.xml_id]
1.29 casties 459:
460: if not id_val:
461: # abort update
462: self.logger.error("ERROR: unable to sync! emtpy id in row %s"%self.rowcnt)
463: return
1.17 casties 464:
1.5 casties 465: if id_val in self.dbIDs:
466: self.dbIDs[id_val] += 1
467: update=True
1.8 casties 468:
469: # collect all values
470: args = []
471: for fn in self.xml_update_list:
1.17 casties 472: # do not update id_field
1.19 casties 473: if update and fn == self.xml_id:
1.17 casties 474: continue
475:
1.8 casties 476: f = self.xml_field_map[fn]
477: val = self.xml_data[fn]
478: type = self.sql_fields[f.getName()].getType()
1.31 casties 479: if type == "date" and len(val.strip()) == 0:
1.8 casties 480: # empty date field
481: val = None
482:
483: elif type == "integer" and len(val) == 0:
484: # empty int field
485: val = None
486:
487: args.append(val)
488:
1.5 casties 489: if update:
490: # update existing row (by id_field)
1.23 casties 491: if self.read_before_update:
492: # read data
1.28 casties 493: if self.debug_data:
494: self.logger.debug("update check: %s = %s"%(id_val, args))
1.23 casties 495: oldrow = SimpleSearch(self.db, self.selQuery, [id_val], ascii=self.ascii_db)
496: #i = 0
497: #for v in oldrow[0]:
498: # logging.debug("v: %s = %s (%s)"%(v,args[i],v==args[i]))
499: # i += 1
500: if tuple(oldrow[0]) != tuple(args):
501: # data has changed -- update
1.28 casties 502: if self.debug_data:
503: self.logger.debug("really update: %s = %s"%(id_val, args))
1.23 casties 504: args.append(id_val) # last arg is id
505: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
506:
507: else:
508: # always update
1.28 casties 509: if self.debug_data:
510: self.logger.debug("update: %s = %s"%(id_val, args))
1.23 casties 511: args.append(id_val) # last arg is id
512: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
1.8 casties 513:
1.5 casties 514: else:
515: # create new row
1.28 casties 516: if self.debug_data:
517: self.logger.debug("insert: %s"%args)
1.8 casties 518: SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db)
1.5 casties 519:
1.15 casties 520: #self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
521: if (self.rowcnt % 100) == 0:
522: self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val))
1.5 casties 523: self.dbCon.commit()
524:
525: return
526:
527: def handle_col(self, end_condition):
528: dispatcher = {
529: (saxtools.START_ELEMENT, fm_ns, u'DATA'):
530: self.handle_data_tag,
531: }
532: #print "START COL"
533: yield None
534: #delegate is a generator that handles all the events "within"
535: #this element
536: delegate = None
537: while not self.event == end_condition:
538: delegate = saxtools.tenorsax.event_loop_body(
539: dispatcher, delegate, self.event)
540: yield None
541: #Element closed. Wrap up
542: #print "END COL"
543: self.colIdx += 1
544: return
545:
546: def handle_data_tag(self, end_condition):
547: #print "START DATA"
548: content = u''
549: yield None
550: # gather child elements
551: while not self.event == end_condition:
552: if self.event[0] == saxtools.CHARACTER_DATA:
553: content += self.params
554: yield None
555: #Element closed. Wrap up
1.8 casties 556: fn = self.xml_field_names[self.colIdx]
557: self.xml_data[fn] = content
1.5 casties 558: return
559:
560:
1.11 dwinter 561: def importFMPXML(options):
1.14 casties 562: """import FileMaker XML file (FMPXMLRESULT format) into the table.
1.11 dwinter 563: @param options: dict of options
564: @param options.dsn: database connection string
565: @param options.table: name of the table the xml shall be imported into
566: @param options.filename: xmlfile filename
567: @param options.update_fields: (optional) list of fields to update; default is to create all fields
568: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
569: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
570: @param options.lc_names: (optional) lower case and clean up field names from XML
571: @param options.keep_fields: (optional) don't add fields to SQL database
572: @param options.ascii_db: (optional) assume ascii encoding in db
573: @param options.replace_table: (optional) delete and re-insert data
1.16 casties 574: @param options.backup_table: (optional) create backup of old table
1.11 dwinter 575: """
1.15 casties 576:
1.11 dwinter 577: if getattr(options,'update_fields',None):
578: uf = {}
579: for f in options.update_fields.split(','):
1.12 casties 580: if f.find(':') > 0:
581: (n,t) = f.split(':')
582: else:
583: n = f
584: t = None
1.11 dwinter 585: uf[n] = TableColumn(n,t)
586:
587: options.update_fields = uf
588:
589: if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
590: logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
1.15 casties 591: return
1.11 dwinter 592:
593: parser = sax.make_parser()
594: #The "consumer" is our own handler
595: consumer = xml_handler(options)
596: #Initialize Tenorsax with handler
597: handler = saxtools.tenorsax(consumer)
598: #Resulting tenorsax instance is the SAX handler
599: parser.setContentHandler(handler)
600: parser.setFeature(sax.handler.feature_namespaces, 1)
601: parser.parse(options.filename)
602:
1.1 casties 603:
1.9 dwinter 604: if __name__ == "__main__":
605: from optparse import OptionParser
1.5 casties 606:
1.9 dwinter 607: opars = OptionParser()
608: opars.add_option("-f", "--file",
609: dest="filename",
610: help="FMPXML file name", metavar="FILE")
611: opars.add_option("-c", "--dsn",
612: dest="dsn",
613: help="database connection string")
614: opars.add_option("-t", "--table",
615: dest="table",
616: help="database table name")
617: opars.add_option("--fields", default=None,
618: dest="update_fields",
619: help="list of fields to update (comma separated, sql-names)", metavar="LIST")
620: opars.add_option("--id-field", default=None,
621: dest="id_field",
622: help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
623: opars.add_option("--sync", "--sync-mode", default=False, action="store_true",
624: dest="sync_mode",
625: help="do full sync based on id field (remove unmatched fields from db)")
626: opars.add_option("--lc-names", default=False, action="store_true",
627: dest="lc_names",
628: help="clean and lower case field names from XML")
629: opars.add_option("--keep-fields", default=False, action="store_true",
630: dest="keep_fields",
631: help="don't add fields from XML to SQL table")
632: opars.add_option("--ascii-db", default=False, action="store_true",
633: dest="ascii_db",
634: help="the SQL database stores ASCII instead of unicode")
635: opars.add_option("--replace", default=False, action="store_true",
636: dest="replace_table",
637: help="replace table i.e. delete and re-insert data")
638: opars.add_option("--backup", default=False, action="store_true",
639: dest="backup_table",
1.16 casties 640: help="create backup of old table")
1.23 casties 641: opars.add_option("--read-before-update", default=False, action="store_true",
642: dest="read_before_update",
643: help="read all data to check if it really changed")
1.9 dwinter 644: opars.add_option("-d", "--debug", default=False, action="store_true",
645: dest="debug",
646: help="debug mode (more output)")
1.28 casties 647: opars.add_option("--debug-data", default=False, action="store_true",
648: dest="debug_data",
649: help="debug mode for data (even more output)")
1.9 dwinter 650:
651: (options, args) = opars.parse_args()
652:
653: if len(sys.argv) < 2 or options.filename is None or options.dsn is None:
654: print "importFMPXML "+version_string
655: opars.print_help()
656: sys.exit(1)
657:
658: if options.debug:
659: loglevel = logging.DEBUG
660: else:
661: loglevel = logging.INFO
662:
663: logging.basicConfig(level=loglevel,
664: format='%(asctime)s %(levelname)s %(message)s',
665: datefmt='%H:%M:%S')
666:
1.11 dwinter 667: importFMPXML(options)
668:
1.9 dwinter 669:
670:
1.1 casties 671:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>