Annotation of ZSQLExtend/importFMPXML.py, revision 1.31
1.1 casties 1: #!/usr/local/bin/python
2: #
3:
4: import string
5: import logging
6: import sys
1.7 dwinter 7: import types
1.8 casties 8: import time
1.1 casties 9:
1.5 casties 10: from xml import sax
11: from amara import saxtools
12:
1.2 casties 13: try:
14: import psycopg2 as psycopg
1.23 casties 15: import psycopg2.extensions
16: # switch to unicode
17: psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
1.2 casties 18: psyco = 2
19: except:
20: import psycopg
21: psyco = 1
22:
1.5 casties 23: fm_ns = 'http://www.filemaker.com/fmpxmlresult'
1.1 casties 24:
1.31 ! casties 25: version_string = "V0.6.5 ROC 10.5.2010"
1.20 casties 26:
1.21 casties 27: def unicodify(text, withNone=False):
1.20 casties 28: """decode str (utf-8 or latin-1 representation) into unicode object"""
1.21 casties 29: if withNone and text is None:
1.20 casties 30: return None
1.21 casties 31: if not text:
1.20 casties 32: return u""
1.21 casties 33: if isinstance(text, str):
1.20 casties 34: try:
1.21 casties 35: return text.decode('utf-8')
1.20 casties 36: except:
1.21 casties 37: return text.decode('latin-1')
1.20 casties 38: else:
1.21 casties 39: return text
1.20 casties 40:
1.21 casties 41: def utf8ify(text, withNone=False):
1.20 casties 42: """encode unicode object or string into byte string in utf-8 representation"""
1.21 casties 43: if withNone and text is None:
1.20 casties 44: return None
1.21 casties 45: if not text:
1.20 casties 46: return ""
1.22 casties 47: if isinstance(text, unicode):
48: return text.encode('utf-8')
49: else:
1.21 casties 50: return text
1.8 casties 51:
1.1 casties 52: def getTextFromNode(nodename):
53: """get the cdata content of a node"""
54: if nodename is None:
55: return ""
56: nodelist=nodename.childNodes
57: rc = ""
58: for node in nodelist:
59: if node.nodeType == node.TEXT_NODE:
60: rc = rc + node.data
61: return rc
62:
63: def sql_quote(v):
64: # quote dictionary
65: quote_dict = {"\'": "''", "\\": "\\\\"}
66: for dkey in quote_dict.keys():
67: if string.find(v, dkey) >= 0:
68: v=string.join(string.split(v,dkey),quote_dict[dkey])
1.5 casties 69: return "'%s'"%v
1.1 casties 70:
1.8 casties 71: def SimpleSearch(curs,query, args=None, ascii=False):
1.1 casties 72: """execute sql query and return data"""
1.15 casties 73: #logger.debug("executing: "+query)
1.8 casties 74: if ascii:
75: # encode all in UTF-8
1.20 casties 76: query = utf8ify(query)
1.8 casties 77: if args is not None:
78: encargs = []
79: for a in args:
1.20 casties 80: encargs.append(utf8ify(a, withNone=True))
1.8 casties 81:
82: args = encargs
1.7 dwinter 83:
1.1 casties 84: curs.execute(query, args)
1.15 casties 85: #logger.debug("sql done")
1.4 casties 86: try:
87: return curs.fetchall()
88: except:
89: return None
1.1 casties 90:
91:
1.8 casties 92: class TableColumn:
93: """simple type for storing sql column name and type"""
94:
95: def __init__(self, name, type=None):
96: #print "new tablecolumn(%s,%s)"%(name, type)
97: self.name = name
98: self.type = type
99:
100: def getName(self):
101: return self.name
102:
103: def getType(self):
104: if self.type is not None:
105: return self.type
106: else:
107: return "text"
1.5 casties 108:
1.8 casties 109: def __str__(self):
110: return self.name
111:
112:
1.5 casties 113: class xml_handler:
1.8 casties 114: def __init__(self,options):
1.9 dwinter 115: """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
1.8 casties 116: @param options: dict of options
117: @param options.dsn: database connection string
118: @param options.table: name of the table the xml shall be imported into
119: @param options.filename: xmlfile filename
120: @param options.update_fields: (optional) list of fields to update; default is to create all fields
121: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
122: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
123: @param options.lc_names: (optional) lower case and clean up field names from XML
124: @param options.keep_fields: (optional) don't add fields to SQL database
125: @param options.ascii_db: (optional) assume ascii encoding in db
126: @param options.replace_table: (optional) delete and re-insert data
1.14 casties 127: @param options.backup_table: (optional) create backup of old table (breaks indices)
1.15 casties 128: @param options.use_logger_instance: (optional) use this instance of a logger
1.9 dwinter 129: """
130:
1.15 casties 131: # set up logger
132: if hasattr(options, 'use_logger_instance'):
133: self.logger = options.use_logger_instance
134: else:
135: self.logger = logging.getLogger('db.import.fmpxml')
136:
137:
1.5 casties 138: # set up parser
139: self.event = None
140: self.top_dispatcher = {
141: (saxtools.START_ELEMENT, fm_ns, u'METADATA'):
142: self.handle_meta_fields,
143: (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'):
1.8 casties 144: self.handle_data_fields,
1.5 casties 145: }
146:
147: # connect database
1.8 casties 148: self.dbCon = psycopg.connect(options.dsn)
1.25 casties 149: logging.debug("DB encoding: %s"%getattr(self.dbCon, 'encoding', 'UNKNOWN'))
1.5 casties 150: self.db = self.dbCon.cursor()
1.8 casties 151: assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
1.1 casties 152:
1.9 dwinter 153: self.table = getattr(options,"table",None)
154: self.update_fields = getattr(options,"update_fields",None)
155: self.id_field = getattr(options,"id_field",None)
156: self.sync_mode = getattr(options,"sync_mode",None)
157: self.lc_names = getattr(options,"lc_names",None)
158: self.keep_fields = getattr(options,"keep_fields",None)
159: self.ascii_db = getattr(options,"ascii_db",None)
160: self.replace_table = getattr(options,"replace_table",None)
161: self.backup_table = getattr(options,"backup_table",None)
1.23 casties 162: self.read_before_update = getattr(options,"read_before_update",None)
1.28 casties 163: self.debug_data = getattr(options,"debug_data",None)
1.8 casties 164:
1.15 casties 165: self.logger.debug("dsn: "+repr(getattr(options,"dsn",None)))
166: self.logger.debug("table: "+repr(self.table))
167: self.logger.debug("update_fields: "+repr(self.update_fields))
168: self.logger.debug("id_field: "+repr(self.id_field))
169: self.logger.debug("sync_mode: "+repr(self.sync_mode))
170: self.logger.debug("lc_names: "+repr(self.lc_names))
171: self.logger.debug("keep_fields: "+repr(self.keep_fields))
172: self.logger.debug("ascii_db: "+repr(self.ascii_db))
173: self.logger.debug("replace_table: "+repr(self.replace_table))
174: self.logger.debug("backup_table: "+repr(self.backup_table))
1.23 casties 175: self.logger.debug("read_before_update: "+repr(self.read_before_update))
1.28 casties 176: self.logger.debug("debug_data: "+repr(self.debug_data))
1.5 casties 177:
178: self.dbIDs = {}
179: self.rowcnt = 0
1.8 casties 180:
181: if self.id_field is not None:
1.5 casties 182: # prepare a list of ids for sync mode
1.8 casties 183: qstr="select %s from %s"%(self.id_field,self.table)
1.5 casties 184: for id in SimpleSearch(self.db, qstr):
185: # value 0: not updated
186: self.dbIDs[id[0]] = 0;
187: self.rowcnt += 1
188:
1.15 casties 189: self.logger.info("%d entries in DB to sync"%self.rowcnt)
1.5 casties 190:
1.8 casties 191: # names of fields in XML file
192: self.xml_field_names = []
193: # map XML field names to SQL field names
194: self.xml_field_map = {}
195: # and vice versa
196: self.sql_field_map = {}
1.5 casties 197:
198: return
199:
200: def handle_meta_fields(self, end_condition):
201: dispatcher = {
202: (saxtools.START_ELEMENT, fm_ns, u'FIELD'):
203: self.handle_meta_field,
204: }
205: #First round through the generator corresponds to the
206: #start element event
1.15 casties 207: self.logger.info("reading metadata...")
1.30 casties 208: if self.debug_data:
209: self.logger.debug("START METADATA")
1.5 casties 210: yield None
1.1 casties 211:
1.5 casties 212: #delegate is a generator that handles all the events "within"
213: #this element
214: delegate = None
215: while not self.event == end_condition:
216: delegate = saxtools.tenorsax.event_loop_body(
217: dispatcher, delegate, self.event)
218: yield None
219:
220: #Element closed. Wrap up
1.30 casties 221: if self.debug_data:
222: self.logger.debug("END METADATA")
1.8 casties 223:
224: # rename table for backup
225: if self.backup_table:
226: self.orig_table = self.table
1.16 casties 227: self.tmp_table = self.table + "_tmp"
228: backup_name = "%s_%s"%(self.table,time.strftime('%Y_%m_%d_%H_%M_%S'))
229:
1.8 casties 230: # remove old temp table
1.16 casties 231: qstr = "DROP TABLE %s"%(self.tmp_table)
1.8 casties 232: try:
233: self.db.execute(qstr)
234: except:
235: pass
236:
237: self.dbCon.commit()
238:
239: if self.id_field:
1.16 casties 240: # sync mode -- copy backup table, update current table
241: self.logger.info("copy table %s to %s"%(self.table,backup_name))
242: qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(backup_name,self.table)
1.8 casties 243:
244: else:
1.16 casties 245: # replace mode -- create empty tmp table, insert into tmp table
246: self.table = self.tmp_table
1.15 casties 247: self.logger.info("create empty table %s"%(self.table))
1.8 casties 248: qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
249:
250: self.db.execute(qstr)
251: self.dbCon.commit()
252:
253: # delete data from table for replace
254: if self.replace_table:
1.15 casties 255: self.logger.info("delete data from table %s"%(self.table))
1.8 casties 256: qstr = "TRUNCATE TABLE %s"%(self.table)
257: self.db.execute(qstr)
258: self.dbCon.commit()
259:
260: # try to match date style with XML
261: self.db.execute("set datestyle to 'german'")
262:
1.15 casties 263: #self.logger.debug("xml-fieldnames:"+repr(self.xml_field_names))
1.8 casties 264: # get list of fields and types of db table
265: qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
266: self.sql_fields={}
267: for f in SimpleSearch(self.db, qstr%self.table):
1.24 casties 268: fn = f[0]
269: ft = f[1]
1.8 casties 270: #print "SQL fields: %s (%s)"%(n,t)
1.24 casties 271: self.sql_fields[fn] = TableColumn(fn,ft)
1.8 casties 272:
1.17 casties 273: # translate id_field (SQL-name) to XML-name
274: self.xml_id = self.sql_field_map.get(self.id_field, None)
275: # get type of id_field
276: if self.id_field:
277: self.id_type = self.sql_fields[self.id_field].getType()
278: else:
279: self.id_type = None
280:
1.8 casties 281: # check fields to update
1.5 casties 282: if self.update_fields is None:
1.8 casties 283: if self.keep_fields:
1.13 casties 284: # update all existing fields from sql (when they are in the xml file)
285: self.update_fields = {}
286: for f in self.sql_fields.keys():
287: if self.sql_field_map.has_key(f):
288: xf = self.sql_field_map[f]
289: self.update_fields[f] = self.xml_field_map[xf]
290:
1.8 casties 291: else:
292: # update all fields
293: if self.lc_names:
294: # create dict with sql names
295: self.update_fields = {}
296: for f in self.xml_field_map.values():
297: self.update_fields[f.getName()] = f
298:
299: else:
300: self.update_fields = self.xml_field_map
1.17 casties 301:
1.8 casties 302: # and translate to list of xml fields
303: if self.lc_names:
304: self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields]
305: else:
306: self.xml_update_list = self.update_fields.keys()
1.17 casties 307:
1.8 casties 308: if not self.keep_fields:
309: # adjust db table to fields in XML and update_fields
310: for f in self.xml_field_map.values():
1.15 casties 311: self.logger.debug("sync-fieldname: %s"%f.getName())
1.8 casties 312: sf = self.sql_fields.get(f.getName(), None)
313: uf = self.update_fields.get(f.getName(), None)
314: if sf is not None:
315: # name in db -- check type
316: if f.getType() != sf.getType():
1.15 casties 317: self.logger.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType()))
1.8 casties 318: elif uf is not None:
319: # add field to table
1.24 casties 320: fn = uf.getName()
321: ft = uf.getType()
322: qstr="alter table %s add \"%s\" %s"%(self.table,fn,ft)
1.15 casties 323: self.logger.info("db add field:"+qstr)
1.8 casties 324:
325: if self.ascii_db and type(qstr)==types.UnicodeType:
326: qstr=qstr.encode('utf-8')
327:
328: self.db.execute(qstr)
329: self.dbCon.commit()
1.24 casties 330: # add field to field list
331: self.sql_fields[fn] = TableColumn(fn, ft)
1.7 dwinter 332:
1.17 casties 333: # prepare sql statements for update (do not update id_field)
1.24 casties 334: setStr=string.join(["\"%s\" = %%s"%self.xml_field_map[f] for f in self.xml_update_list if f != self.xml_id], ', ')
335: self.updQuery="UPDATE %s SET %s WHERE \"%s\" = %%s"%(self.table,setStr,self.id_field)
1.23 casties 336: # and select (for update check)
337: selStr=string.join([self.xml_field_map[f].getName() for f in self.xml_update_list if f != self.xml_id], ', ')
1.24 casties 338: self.selQuery="SELECT %s FROM %s WHERE \"%s\" = %%s"%(selStr,self.table,self.id_field)
1.5 casties 339: # and insert
1.24 casties 340: fields=string.join(["\"%s\""%self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
1.8 casties 341: values=string.join(['%s' for f in self.xml_update_list], ',')
1.5 casties 342: self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
1.15 casties 343: self.logger.debug("update-query: "+self.updQuery)
1.23 casties 344: self.logger.debug("sel-query: "+self.selQuery)
1.15 casties 345: self.logger.debug("add-query: "+self.addQuery)
1.5 casties 346: return
347:
348: def handle_meta_field(self, end_condition):
349: name = self.params.get((None, u'NAME'))
350: yield None
351: #Element closed. Wrap up
1.8 casties 352: if self.lc_names:
353: # clean name
354: sqlname = name.replace(" ","_").lower()
355: else:
356: sqlname = name
357: self.xml_field_names.append(name)
358: # map to sql name and default text type
359: self.xml_field_map[name] = TableColumn(sqlname, 'text')
360: self.sql_field_map[sqlname] = name
1.15 casties 361: self.logger.debug("FIELD name: "+name)
1.5 casties 362: return
363:
1.8 casties 364: def handle_data_fields(self, end_condition):
1.5 casties 365: dispatcher = {
366: (saxtools.START_ELEMENT, fm_ns, u'ROW'):
367: self.handle_row,
368: }
369: #First round through the generator corresponds to the
370: #start element event
1.15 casties 371: self.logger.info("reading data...")
1.30 casties 372: if self.debug_data:
373: self.logger.debug("START RESULTSET")
1.5 casties 374: self.rowcnt = 0
375: yield None
1.1 casties 376:
1.5 casties 377: #delegate is a generator that handles all the events "within"
378: #this element
379: delegate = None
380: while not self.event == end_condition:
381: delegate = saxtools.tenorsax.event_loop_body(
382: dispatcher, delegate, self.event)
383: yield None
384:
385: #Element closed. Wrap up
1.30 casties 386: if self.debug_data:
387: self.logger.debug("END RESULTSET")
1.5 casties 388: self.dbCon.commit()
1.1 casties 389:
1.5 casties 390: if self.sync_mode:
391: # delete unmatched entries in db
1.15 casties 392: self.logger.info("deleting unmatched rows from db")
1.24 casties 393: delQuery = "DELETE FROM %s WHERE \"%s\" = %%s"%(self.table,self.id_field)
1.5 casties 394: for id in self.dbIDs.keys():
395: # find all not-updated fields
396: if self.dbIDs[id] == 0:
1.27 casties 397: self.logger.info(" delete: %s"%id)
1.8 casties 398: SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db)
1.1 casties 399:
1.5 casties 400: elif self.dbIDs[id] > 1:
1.15 casties 401: self.logger.info(" sync: ID %s used more than once?"%id)
1.8 casties 402:
403: self.dbCon.commit()
1.1 casties 404:
1.8 casties 405: # reinstate backup tables
1.16 casties 406: if self.backup_table and not self.id_field:
1.8 casties 407: backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
1.15 casties 408: self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name))
1.8 casties 409: qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
410: self.db.execute(qstr)
1.15 casties 411: self.logger.info("rename working table %s to %s"%(self.table,self.orig_table))
1.8 casties 412: qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
413: self.db.execute(qstr)
1.5 casties 414: self.dbCon.commit()
1.1 casties 415:
1.30 casties 416: self.logger.info("Done (%s rows)"%self.rowcnt)
1.5 casties 417: return
418:
419: def handle_row(self, end_condition):
420: dispatcher = {
421: (saxtools.START_ELEMENT, fm_ns, u'COL'):
422: self.handle_col,
423: }
1.30 casties 424: if self.debug_data:
425: self.logger.debug("START ROW")
1.8 casties 426: self.xml_data = {}
1.5 casties 427: self.colIdx = 0
428: yield None
1.1 casties 429:
1.5 casties 430: #delegate is a generator that handles all the events "within"
431: #this element
432: delegate = None
433: while not self.event == end_condition:
434: delegate = saxtools.tenorsax.event_loop_body(
435: dispatcher, delegate, self.event)
436: yield None
437:
438: #Element closed. Wrap up
1.30 casties 439: if self.debug_data:
440: self.logger.debug("END ROW")
1.5 casties 441: self.rowcnt += 1
442: # process collected row data
443: update=False
444: id_val=''
445: # synchronize by id_field
446: if self.id_field:
1.17 casties 447: if self.id_type == 'integer':
1.29 casties 448: try:
449: id_val = int(self.xml_data[self.xml_id])
450: except:
451: pass
1.17 casties 452: else:
453: id_val = self.xml_data[self.xml_id]
1.29 casties 454:
455: if not id_val:
456: # abort update
457: self.logger.error("ERROR: unable to sync! emtpy id in row %s"%self.rowcnt)
458: return
1.17 casties 459:
1.5 casties 460: if id_val in self.dbIDs:
461: self.dbIDs[id_val] += 1
462: update=True
1.8 casties 463:
464: # collect all values
465: args = []
466: for fn in self.xml_update_list:
1.17 casties 467: # do not update id_field
1.19 casties 468: if update and fn == self.xml_id:
1.17 casties 469: continue
470:
1.8 casties 471: f = self.xml_field_map[fn]
472: val = self.xml_data[fn]
473: type = self.sql_fields[f.getName()].getType()
1.31 ! casties 474: if type == "date" and len(val.strip()) == 0:
1.8 casties 475: # empty date field
476: val = None
477:
478: elif type == "integer" and len(val) == 0:
479: # empty int field
480: val = None
481:
482: args.append(val)
483:
1.5 casties 484: if update:
485: # update existing row (by id_field)
1.23 casties 486: if self.read_before_update:
487: # read data
1.28 casties 488: if self.debug_data:
489: self.logger.debug("update check: %s = %s"%(id_val, args))
1.23 casties 490: oldrow = SimpleSearch(self.db, self.selQuery, [id_val], ascii=self.ascii_db)
491: #i = 0
492: #for v in oldrow[0]:
493: # logging.debug("v: %s = %s (%s)"%(v,args[i],v==args[i]))
494: # i += 1
495: if tuple(oldrow[0]) != tuple(args):
496: # data has changed -- update
1.28 casties 497: if self.debug_data:
498: self.logger.debug("really update: %s = %s"%(id_val, args))
1.23 casties 499: args.append(id_val) # last arg is id
500: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
501:
502: else:
503: # always update
1.28 casties 504: if self.debug_data:
505: self.logger.debug("update: %s = %s"%(id_val, args))
1.23 casties 506: args.append(id_val) # last arg is id
507: SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
1.8 casties 508:
1.5 casties 509: else:
510: # create new row
1.28 casties 511: if self.debug_data:
512: self.logger.debug("insert: %s"%args)
1.8 casties 513: SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db)
1.5 casties 514:
1.15 casties 515: #self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
516: if (self.rowcnt % 100) == 0:
517: self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val))
1.5 casties 518: self.dbCon.commit()
519:
520: return
521:
522: def handle_col(self, end_condition):
523: dispatcher = {
524: (saxtools.START_ELEMENT, fm_ns, u'DATA'):
525: self.handle_data_tag,
526: }
527: #print "START COL"
528: yield None
529: #delegate is a generator that handles all the events "within"
530: #this element
531: delegate = None
532: while not self.event == end_condition:
533: delegate = saxtools.tenorsax.event_loop_body(
534: dispatcher, delegate, self.event)
535: yield None
536: #Element closed. Wrap up
537: #print "END COL"
538: self.colIdx += 1
539: return
540:
541: def handle_data_tag(self, end_condition):
542: #print "START DATA"
543: content = u''
544: yield None
545: # gather child elements
546: while not self.event == end_condition:
547: if self.event[0] == saxtools.CHARACTER_DATA:
548: content += self.params
549: yield None
550: #Element closed. Wrap up
1.8 casties 551: fn = self.xml_field_names[self.colIdx]
552: self.xml_data[fn] = content
1.5 casties 553: return
554:
555:
1.11 dwinter 556: def importFMPXML(options):
1.14 casties 557: """import FileMaker XML file (FMPXMLRESULT format) into the table.
1.11 dwinter 558: @param options: dict of options
559: @param options.dsn: database connection string
560: @param options.table: name of the table the xml shall be imported into
561: @param options.filename: xmlfile filename
562: @param options.update_fields: (optional) list of fields to update; default is to create all fields
563: @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
564: @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
565: @param options.lc_names: (optional) lower case and clean up field names from XML
566: @param options.keep_fields: (optional) don't add fields to SQL database
567: @param options.ascii_db: (optional) assume ascii encoding in db
568: @param options.replace_table: (optional) delete and re-insert data
1.16 casties 569: @param options.backup_table: (optional) create backup of old table
1.11 dwinter 570: """
1.15 casties 571:
1.11 dwinter 572: if getattr(options,'update_fields',None):
573: uf = {}
574: for f in options.update_fields.split(','):
1.12 casties 575: if f.find(':') > 0:
576: (n,t) = f.split(':')
577: else:
578: n = f
579: t = None
1.11 dwinter 580: uf[n] = TableColumn(n,t)
581:
582: options.update_fields = uf
583:
584: if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
585: logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
1.15 casties 586: return
1.11 dwinter 587:
588: parser = sax.make_parser()
589: #The "consumer" is our own handler
590: consumer = xml_handler(options)
591: #Initialize Tenorsax with handler
592: handler = saxtools.tenorsax(consumer)
593: #Resulting tenorsax instance is the SAX handler
594: parser.setContentHandler(handler)
595: parser.setFeature(sax.handler.feature_namespaces, 1)
596: parser.parse(options.filename)
597:
1.1 casties 598:
1.9 dwinter 599: if __name__ == "__main__":
600: from optparse import OptionParser
1.5 casties 601:
1.9 dwinter 602: opars = OptionParser()
603: opars.add_option("-f", "--file",
604: dest="filename",
605: help="FMPXML file name", metavar="FILE")
606: opars.add_option("-c", "--dsn",
607: dest="dsn",
608: help="database connection string")
609: opars.add_option("-t", "--table",
610: dest="table",
611: help="database table name")
612: opars.add_option("--fields", default=None,
613: dest="update_fields",
614: help="list of fields to update (comma separated, sql-names)", metavar="LIST")
615: opars.add_option("--id-field", default=None,
616: dest="id_field",
617: help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
618: opars.add_option("--sync", "--sync-mode", default=False, action="store_true",
619: dest="sync_mode",
620: help="do full sync based on id field (remove unmatched fields from db)")
621: opars.add_option("--lc-names", default=False, action="store_true",
622: dest="lc_names",
623: help="clean and lower case field names from XML")
624: opars.add_option("--keep-fields", default=False, action="store_true",
625: dest="keep_fields",
626: help="don't add fields from XML to SQL table")
627: opars.add_option("--ascii-db", default=False, action="store_true",
628: dest="ascii_db",
629: help="the SQL database stores ASCII instead of unicode")
630: opars.add_option("--replace", default=False, action="store_true",
631: dest="replace_table",
632: help="replace table i.e. delete and re-insert data")
633: opars.add_option("--backup", default=False, action="store_true",
634: dest="backup_table",
1.16 casties 635: help="create backup of old table")
1.23 casties 636: opars.add_option("--read-before-update", default=False, action="store_true",
637: dest="read_before_update",
638: help="read all data to check if it really changed")
1.9 dwinter 639: opars.add_option("-d", "--debug", default=False, action="store_true",
640: dest="debug",
641: help="debug mode (more output)")
1.28 casties 642: opars.add_option("--debug-data", default=False, action="store_true",
643: dest="debug_data",
644: help="debug mode for data (even more output)")
1.9 dwinter 645:
646: (options, args) = opars.parse_args()
647:
648: if len(sys.argv) < 2 or options.filename is None or options.dsn is None:
649: print "importFMPXML "+version_string
650: opars.print_help()
651: sys.exit(1)
652:
653: if options.debug:
654: loglevel = logging.DEBUG
655: else:
656: loglevel = logging.INFO
657:
658: logging.basicConfig(level=loglevel,
659: format='%(asctime)s %(levelname)s %(message)s',
660: datefmt='%H:%M:%S')
661:
1.11 dwinter 662: importFMPXML(options)
663:
1.9 dwinter 664:
665:
1.1 casties 666:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>