File:  [Repository] / ZSQLExtend / importFMPXML.py
Revision 1.30: download - view: text, annotated - select for diffs - revision graph
Tue Nov 17 18:21:28 2009 UTC (14 years, 6 months ago) by casties
Branches: MAIN
CVS tags: HEAD
tried to improve logging behaviour of importFPMXML (still issues when used through mod_rewrite/mod_proxy)

    1: #!/usr/local/bin/python
    2: #
    3: 
    4: import string
    5: import logging
    6: import sys
    7: import types
    8: import time
    9: 
   10: from xml import sax
   11: from amara import saxtools
   12: 
   13: try:
   14:     import psycopg2 as psycopg
   15:     import psycopg2.extensions
   16:     # switch to unicode
   17:     psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
   18:     psyco = 2
   19: except:
   20:     import psycopg
   21:     psyco = 1
   22: 
   23: fm_ns = 'http://www.filemaker.com/fmpxmlresult'
   24: 
   25: version_string = "V0.6.4 ROC 17.11.2009"
   26: 
   27: def unicodify(text, withNone=False):
   28:     """decode str (utf-8 or latin-1 representation) into unicode object"""
   29:     if withNone and text is None:
   30:         return None
   31:     if not text:
   32:         return u""
   33:     if isinstance(text, str):
   34:         try:
   35:             return text.decode('utf-8')
   36:         except:
   37:             return text.decode('latin-1')
   38:     else:
   39:         return text
   40: 
   41: def utf8ify(text, withNone=False):
   42:     """encode unicode object or string into byte string in utf-8 representation"""
   43:     if withNone and text is None:
   44:         return None
   45:     if not text:
   46:         return ""
   47:     if isinstance(text, unicode):
   48:         return text.encode('utf-8')
   49:     else:
   50:         return text
   51: 
   52: def getTextFromNode(nodename):
   53:     """get the cdata content of a node"""
   54:     if nodename is None:
   55:         return ""
   56:     nodelist=nodename.childNodes
   57:     rc = ""
   58:     for node in nodelist:
   59:         if node.nodeType == node.TEXT_NODE:
   60:            rc = rc + node.data
   61:     return rc
   62: 
   63: def sql_quote(v):
   64:     # quote dictionary
   65:     quote_dict = {"\'": "''", "\\": "\\\\"}
   66:     for dkey in quote_dict.keys():
   67:         if string.find(v, dkey) >= 0:
   68:             v=string.join(string.split(v,dkey),quote_dict[dkey])
   69:     return "'%s'"%v
   70: 
   71: def SimpleSearch(curs,query, args=None, ascii=False):
   72:     """execute sql query and return data"""
   73:     #logger.debug("executing: "+query)
   74:     if ascii:
   75:         # encode all in UTF-8
   76:         query = utf8ify(query)
   77:         if args is not None:
   78:             encargs = []
   79:             for a in args:
   80:                 encargs.append(utf8ify(a, withNone=True))
   81:             
   82:             args = encargs
   83: 
   84:     curs.execute(query, args)
   85:     #logger.debug("sql done")
   86:     try:
   87:         return curs.fetchall()
   88:     except:
   89:         return None
   90: 
   91: 
   92: class TableColumn:
   93:     """simple type for storing sql column name and type"""
   94:     
   95:     def __init__(self, name, type=None):
   96:         #print "new tablecolumn(%s,%s)"%(name, type)
   97:         self.name = name
   98:         self.type = type
   99:         
  100:     def getName(self):
  101:         return self.name
  102:     
  103:     def getType(self):
  104:         if self.type is not None:
  105:             return self.type
  106:         else:
  107:             return "text"
  108: 
  109:     def __str__(self):
  110:         return self.name
  111:     
  112:     
  113: class xml_handler:
  114:     def __init__(self,options):
  115:         """SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table.
  116:         @param options: dict of options
  117:         @param options.dsn: database connection string
  118:         @param options.table: name of the table the xml shall be imported into
  119:         @param options.filename: xmlfile filename
  120:         @param options.update_fields: (optional) list of fields to update; default is to create all fields
  121:         @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
  122:         @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
  123:         @param options.lc_names: (optional) lower case and clean up field names from XML
  124:         @param options.keep_fields: (optional) don't add fields to SQL database
  125:         @param options.ascii_db: (optional) assume ascii encoding in db
  126:         @param options.replace_table: (optional) delete and re-insert data
  127:         @param options.backup_table: (optional) create backup of old table (breaks indices)
  128:         @param options.use_logger_instance: (optional) use this instance of a logger
  129:         """
  130:         
  131:         # set up logger
  132:         if hasattr(options, 'use_logger_instance'):
  133:             self.logger = options.use_logger_instance
  134:         else:
  135:             self.logger = logging.getLogger('db.import.fmpxml')
  136: 
  137:         
  138:         # set up parser
  139:         self.event = None
  140:         self.top_dispatcher = { 
  141:             (saxtools.START_ELEMENT, fm_ns, u'METADATA'): 
  142:             self.handle_meta_fields,
  143:             (saxtools.START_ELEMENT, fm_ns, u'RESULTSET'): 
  144:             self.handle_data_fields,
  145:             }
  146:         
  147:         # connect database
  148:         self.dbCon = psycopg.connect(options.dsn)
  149:         logging.debug("DB encoding: %s"%getattr(self.dbCon, 'encoding', 'UNKNOWN'))
  150:         self.db = self.dbCon.cursor()
  151:         assert self.db, "AIIEE no db cursor for %s!!"%options.dsn
  152:     
  153:         self.table = getattr(options,"table",None)
  154:         self.update_fields = getattr(options,"update_fields",None)
  155:         self.id_field = getattr(options,"id_field",None)
  156:         self.sync_mode = getattr(options,"sync_mode",None)
  157:         self.lc_names = getattr(options,"lc_names",None)
  158:         self.keep_fields = getattr(options,"keep_fields",None)
  159:         self.ascii_db = getattr(options,"ascii_db",None)
  160:         self.replace_table = getattr(options,"replace_table",None)
  161:         self.backup_table = getattr(options,"backup_table",None)
  162:         self.read_before_update = getattr(options,"read_before_update",None)
  163:         self.debug_data = getattr(options,"debug_data",None)
  164: 
  165:         self.logger.debug("dsn: "+repr(getattr(options,"dsn",None)))
  166:         self.logger.debug("table: "+repr(self.table))
  167:         self.logger.debug("update_fields: "+repr(self.update_fields))
  168:         self.logger.debug("id_field: "+repr(self.id_field))
  169:         self.logger.debug("sync_mode: "+repr(self.sync_mode))
  170:         self.logger.debug("lc_names: "+repr(self.lc_names))
  171:         self.logger.debug("keep_fields: "+repr(self.keep_fields))
  172:         self.logger.debug("ascii_db: "+repr(self.ascii_db))
  173:         self.logger.debug("replace_table: "+repr(self.replace_table))
  174:         self.logger.debug("backup_table: "+repr(self.backup_table))
  175:         self.logger.debug("read_before_update: "+repr(self.read_before_update))
  176:         self.logger.debug("debug_data: "+repr(self.debug_data))
  177:         
  178:         self.dbIDs = {}
  179:         self.rowcnt = 0
  180:         
  181:         if self.id_field is not None:
  182:             # prepare a list of ids for sync mode
  183:             qstr="select %s from %s"%(self.id_field,self.table)
  184:             for id in SimpleSearch(self.db, qstr):
  185:                 # value 0: not updated
  186:                 self.dbIDs[id[0]] = 0;
  187:                 self.rowcnt += 1
  188:                 
  189:             self.logger.info("%d entries in DB to sync"%self.rowcnt)
  190:         
  191:         # names of fields in XML file
  192:         self.xml_field_names = []
  193:         # map XML field names to SQL field names
  194:         self.xml_field_map = {}
  195:         # and vice versa
  196:         self.sql_field_map = {}
  197:         
  198:         return
  199: 
  200:     def handle_meta_fields(self, end_condition):
  201:         dispatcher = {
  202:             (saxtools.START_ELEMENT, fm_ns, u'FIELD'):
  203:             self.handle_meta_field,
  204:             }
  205:         #First round through the generator corresponds to the
  206:         #start element event
  207:         self.logger.info("reading metadata...")
  208:         if self.debug_data:
  209:             self.logger.debug("START METADATA")
  210:         yield None
  211:     
  212:         #delegate is a generator that handles all the events "within"
  213:         #this element
  214:         delegate = None
  215:         while not self.event == end_condition:
  216:             delegate = saxtools.tenorsax.event_loop_body(
  217:                 dispatcher, delegate, self.event)
  218:             yield None
  219:         
  220:         #Element closed. Wrap up
  221:         if self.debug_data:
  222:             self.logger.debug("END METADATA")
  223:         
  224:         # rename table for backup
  225:         if self.backup_table:
  226:             self.orig_table = self.table
  227:             self.tmp_table = self.table + "_tmp"
  228:             backup_name = "%s_%s"%(self.table,time.strftime('%Y_%m_%d_%H_%M_%S'))
  229:             
  230:             # remove old temp table
  231:             qstr = "DROP TABLE %s"%(self.tmp_table)
  232:             try:
  233:                 self.db.execute(qstr)
  234:             except:
  235:                 pass
  236:             
  237:             self.dbCon.commit()
  238:            
  239:             if self.id_field:
  240:                 # sync mode -- copy backup table, update current table 
  241:                 self.logger.info("copy table %s to %s"%(self.table,backup_name))
  242:                 qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(backup_name,self.table)
  243: 
  244:             else:
  245:                 # replace mode -- create empty tmp table, insert into tmp table
  246:                 self.table = self.tmp_table
  247:                 self.logger.info("create empty table %s"%(self.table))
  248:                 qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table)
  249:             
  250:             self.db.execute(qstr)
  251:             self.dbCon.commit()
  252:         
  253:         # delete data from table for replace
  254:         if self.replace_table:
  255:             self.logger.info("delete data from table %s"%(self.table))
  256:             qstr = "TRUNCATE TABLE %s"%(self.table)
  257:             self.db.execute(qstr)
  258:             self.dbCon.commit()
  259:            
  260:         # try to match date style with XML
  261:         self.db.execute("set datestyle to 'german'")
  262:         
  263:         #self.logger.debug("xml-fieldnames:"+repr(self.xml_field_names))
  264:         # get list of fields and types of db table
  265:         qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'"
  266:         self.sql_fields={}
  267:         for f in SimpleSearch(self.db, qstr%self.table):
  268:             fn = f[0]
  269:             ft = f[1]
  270:             #print "SQL fields: %s (%s)"%(n,t)
  271:             self.sql_fields[fn] = TableColumn(fn,ft)
  272:         
  273:         # translate id_field (SQL-name) to XML-name
  274:         self.xml_id = self.sql_field_map.get(self.id_field, None)
  275:         # get type of id_field
  276:         if self.id_field:
  277:             self.id_type = self.sql_fields[self.id_field].getType()
  278:         else:
  279:             self.id_type = None
  280:         
  281:         # check fields to update
  282:         if self.update_fields is None:
  283:             if self.keep_fields:
  284:                 # update all existing fields from sql (when they are in the xml file)
  285:                 self.update_fields = {}
  286:                 for f in self.sql_fields.keys():
  287:                     if self.sql_field_map.has_key(f):
  288:                         xf = self.sql_field_map[f]
  289:                         self.update_fields[f] = self.xml_field_map[xf]
  290: 
  291:             else:
  292:                 # update all fields
  293:                 if self.lc_names:
  294:                     # create dict with sql names
  295:                     self.update_fields = {}
  296:                     for f in self.xml_field_map.values():
  297:                         self.update_fields[f.getName()] = f
  298:                         
  299:                 else:
  300:                     self.update_fields = self.xml_field_map
  301:                                 
  302:         # and translate to list of xml fields
  303:         if self.lc_names:
  304:             self.xml_update_list = [self.sql_field_map[x] for x in self.update_fields]
  305:         else:
  306:             self.xml_update_list = self.update_fields.keys()
  307: 
  308:         if not self.keep_fields:
  309:             # adjust db table to fields in XML and update_fields
  310:             for f in self.xml_field_map.values():
  311:                 self.logger.debug("sync-fieldname: %s"%f.getName())
  312:                 sf = self.sql_fields.get(f.getName(), None)
  313:                 uf = self.update_fields.get(f.getName(), None)
  314:                 if sf is not None:
  315:                     # name in db -- check type
  316:                     if f.getType() != sf.getType():
  317:                         self.logger.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType()))
  318:                 elif uf is not None:
  319:                     # add field to table
  320:                     fn = uf.getName()
  321:                     ft = uf.getType()
  322:                     qstr="alter table %s add \"%s\" %s"%(self.table,fn,ft)
  323:                     self.logger.info("db add field:"+qstr)
  324:                     
  325:                     if self.ascii_db and type(qstr)==types.UnicodeType:
  326:                         qstr=qstr.encode('utf-8')
  327:                         
  328:                     self.db.execute(qstr)
  329:                     self.dbCon.commit()
  330:                     # add field to field list
  331:                     self.sql_fields[fn] = TableColumn(fn, ft)
  332:                 
  333:         # prepare sql statements for update (do not update id_field)
  334:         setStr=string.join(["\"%s\" = %%s"%self.xml_field_map[f] for f in self.xml_update_list if f != self.xml_id], ', ')
  335:         self.updQuery="UPDATE %s SET %s WHERE \"%s\" = %%s"%(self.table,setStr,self.id_field)
  336:         # and select (for update check)
  337:         selStr=string.join([self.xml_field_map[f].getName() for f in self.xml_update_list if f != self.xml_id], ', ')
  338:         self.selQuery="SELECT %s FROM %s WHERE \"%s\" = %%s"%(selStr,self.table,self.id_field)
  339:         # and insert
  340:         fields=string.join(["\"%s\""%self.xml_field_map[x].getName() for x in self.xml_update_list], ',')
  341:         values=string.join(['%s' for f in self.xml_update_list], ',')
  342:         self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values)
  343:         self.logger.debug("update-query: "+self.updQuery)
  344:         self.logger.debug("sel-query: "+self.selQuery)
  345:         self.logger.debug("add-query: "+self.addQuery)
  346:         return
  347: 
  348:     def handle_meta_field(self, end_condition):
  349:         name = self.params.get((None, u'NAME'))
  350:         yield None
  351:         #Element closed.  Wrap up
  352:         if self.lc_names:
  353:             # clean name
  354:             sqlname = name.replace(" ","_").lower() 
  355:         else:
  356:             sqlname = name
  357:         self.xml_field_names.append(name)
  358:         # map to sql name and default text type
  359:         self.xml_field_map[name] = TableColumn(sqlname, 'text')
  360:         self.sql_field_map[sqlname] = name
  361:         self.logger.debug("FIELD name: "+name)
  362:         return
  363: 
  364:     def handle_data_fields(self, end_condition):
  365:         dispatcher = {
  366:             (saxtools.START_ELEMENT, fm_ns, u'ROW'):
  367:             self.handle_row,
  368:             }
  369:         #First round through the generator corresponds to the
  370:         #start element event
  371:         self.logger.info("reading data...")
  372:         if self.debug_data:
  373:             self.logger.debug("START RESULTSET")
  374:         self.rowcnt = 0
  375:         yield None
  376:     
  377:         #delegate is a generator that handles all the events "within"
  378:         #this element
  379:         delegate = None
  380:         while not self.event == end_condition:
  381:             delegate = saxtools.tenorsax.event_loop_body(
  382:                 dispatcher, delegate, self.event)
  383:             yield None
  384:         
  385:         #Element closed.  Wrap up
  386:         if self.debug_data:
  387:             self.logger.debug("END RESULTSET")
  388:         self.dbCon.commit()
  389:         
  390:         if self.sync_mode:
  391:             # delete unmatched entries in db
  392:             self.logger.info("deleting unmatched rows from db")
  393:             delQuery = "DELETE FROM %s WHERE \"%s\" = %%s"%(self.table,self.id_field)
  394:             for id in self.dbIDs.keys():
  395:                 # find all not-updated fields
  396:                 if self.dbIDs[id] == 0:
  397:                     self.logger.info(" delete: %s"%id)
  398:                     SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db)
  399:                     
  400:                 elif self.dbIDs[id] > 1:
  401:                     self.logger.info(" sync: ID %s used more than once?"%id)
  402:             
  403:             self.dbCon.commit()
  404:             
  405:         # reinstate backup tables
  406:         if self.backup_table and not self.id_field:
  407:             backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S'))
  408:             self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name))
  409:             qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name)
  410:             self.db.execute(qstr)
  411:             self.logger.info("rename working table %s to %s"%(self.table,self.orig_table))
  412:             qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table)
  413:             self.db.execute(qstr)
  414:             self.dbCon.commit()
  415:         
  416:         self.logger.info("Done (%s rows)"%self.rowcnt)
  417:         return
  418: 
  419:     def handle_row(self, end_condition):
  420:         dispatcher = {
  421:             (saxtools.START_ELEMENT, fm_ns, u'COL'):
  422:             self.handle_col,
  423:             }
  424:         if self.debug_data:
  425:             self.logger.debug("START ROW")
  426:         self.xml_data = {}
  427:         self.colIdx = 0
  428:         yield None
  429:     
  430:         #delegate is a generator that handles all the events "within"
  431:         #this element
  432:         delegate = None
  433:         while not self.event == end_condition:
  434:             delegate = saxtools.tenorsax.event_loop_body(
  435:                 dispatcher, delegate, self.event)
  436:             yield None
  437:         
  438:         #Element closed.  Wrap up
  439:         if self.debug_data:
  440:             self.logger.debug("END ROW")
  441:         self.rowcnt += 1
  442:         # process collected row data
  443:         update=False
  444:         id_val=''
  445:         # synchronize by id_field
  446:         if self.id_field:
  447:             if self.id_type == 'integer':
  448:                 try:
  449:                     id_val = int(self.xml_data[self.xml_id])
  450:                 except:
  451:                     pass
  452:             else:
  453:                 id_val = self.xml_data[self.xml_id]
  454: 
  455:             if not id_val:
  456:                 # abort update
  457:                 self.logger.error("ERROR: unable to sync! emtpy id in row %s"%self.rowcnt)
  458:                 return
  459:                 
  460:             if id_val in self.dbIDs:
  461:                 self.dbIDs[id_val] += 1
  462:                 update=True
  463: 
  464:         # collect all values
  465:         args = []
  466:         for fn in self.xml_update_list:
  467:             # do not update id_field
  468:             if update and fn == self.xml_id:
  469:                 continue
  470:             
  471:             f = self.xml_field_map[fn]
  472:             val = self.xml_data[fn]
  473:             type = self.sql_fields[f.getName()].getType()
  474:             if type == "date" and len(val) == 0: 
  475:                 # empty date field
  476:                 val = None
  477:                 
  478:             elif type == "integer" and len(val) == 0: 
  479:                 # empty int field
  480:                 val = None
  481:                 
  482:             args.append(val)
  483:                     
  484:         if update:
  485:             # update existing row (by id_field)
  486:             if self.read_before_update:
  487:                 # read data
  488:                 if self.debug_data:
  489:                     self.logger.debug("update check: %s = %s"%(id_val, args))
  490:                 oldrow = SimpleSearch(self.db, self.selQuery, [id_val], ascii=self.ascii_db)
  491:                 #i = 0
  492:                 #for v in oldrow[0]:
  493:                 #    logging.debug("v: %s = %s (%s)"%(v,args[i],v==args[i]))
  494:                 #    i += 1
  495:                 if tuple(oldrow[0]) != tuple(args):
  496:                     # data has changed -- update
  497:                     if self.debug_data:
  498:                         self.logger.debug("really update: %s = %s"%(id_val, args))
  499:                     args.append(id_val) # last arg is id
  500:                     SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
  501:                     
  502:             else:
  503:                 # always update
  504:                 if self.debug_data:
  505:                     self.logger.debug("update: %s = %s"%(id_val, args))
  506:                 args.append(id_val) # last arg is id
  507:                 SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db)
  508: 
  509:         else:
  510:             # create new row
  511:             if self.debug_data:
  512:                 self.logger.debug("insert: %s"%args)
  513:             SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db)
  514: 
  515:         #self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val))
  516:         if (self.rowcnt % 100) == 0:
  517:             self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val))
  518:             self.dbCon.commit()
  519:             
  520:         return
  521: 
  522:     def handle_col(self, end_condition):
  523:         dispatcher = {
  524:             (saxtools.START_ELEMENT, fm_ns, u'DATA'):
  525:             self.handle_data_tag,
  526:             }
  527:         #print "START COL"
  528:         yield None
  529:         #delegate is a generator that handles all the events "within"
  530:         #this element
  531:         delegate = None
  532:         while not self.event == end_condition:
  533:             delegate = saxtools.tenorsax.event_loop_body(
  534:                 dispatcher, delegate, self.event)
  535:             yield None
  536:         #Element closed.  Wrap up
  537:         #print "END COL"
  538:         self.colIdx += 1
  539:         return
  540: 
  541:     def handle_data_tag(self, end_condition):
  542:         #print "START DATA"
  543:         content = u''
  544:         yield None
  545:         # gather child elements
  546:         while not self.event == end_condition:
  547:             if self.event[0] == saxtools.CHARACTER_DATA:
  548:                 content += self.params
  549:             yield None
  550:         #Element closed.  Wrap up
  551:         fn = self.xml_field_names[self.colIdx]
  552:         self.xml_data[fn] = content
  553:         return
  554: 
  555: 
  556: def importFMPXML(options):
  557:     """import FileMaker XML file (FMPXMLRESULT format) into the table.     
  558:         @param options: dict of options
  559:         @param options.dsn: database connection string
  560:         @param options.table: name of the table the xml shall be imported into
  561:         @param options.filename: xmlfile filename
  562:         @param options.update_fields: (optional) list of fields to update; default is to create all fields
  563:         @param options.id_field: (optional) field which uniquely identifies an entry for updating purposes.
  564:         @param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
  565:         @param options.lc_names: (optional) lower case and clean up field names from XML
  566:         @param options.keep_fields: (optional) don't add fields to SQL database
  567:         @param options.ascii_db: (optional) assume ascii encoding in db
  568:         @param options.replace_table: (optional) delete and re-insert data
  569:         @param options.backup_table: (optional) create backup of old table
  570:         """
  571:         
  572:     if getattr(options,'update_fields',None):
  573:         uf = {}
  574:         for f in options.update_fields.split(','):
  575:             if f.find(':') > 0:
  576:                 (n,t) = f.split(':')
  577:             else:
  578:                 n = f
  579:                 t = None
  580:             uf[n] = TableColumn(n,t)
  581:             
  582:         options.update_fields = uf
  583:     
  584:     if getattr(options,'id_field',None) and getattr(options,'replace_table',None):
  585:         logging.error("ABORT: sorry, you can't do both sync (id_field) and replace")
  586:         return
  587:         
  588:     parser = sax.make_parser()
  589:     #The "consumer" is our own handler
  590:     consumer = xml_handler(options)
  591:     #Initialize Tenorsax with handler
  592:     handler = saxtools.tenorsax(consumer)
  593:     #Resulting tenorsax instance is the SAX handler 
  594:     parser.setContentHandler(handler)
  595:     parser.setFeature(sax.handler.feature_namespaces, 1)
  596:     parser.parse(options.filename)  
  597:     
  598: 
  599: if __name__ == "__main__":
  600:     from optparse import OptionParser
  601: 
  602:     opars = OptionParser()
  603:     opars.add_option("-f", "--file", 
  604:                      dest="filename",
  605:                      help="FMPXML file name", metavar="FILE")
  606:     opars.add_option("-c", "--dsn", 
  607:                      dest="dsn", 
  608:                      help="database connection string")
  609:     opars.add_option("-t", "--table", 
  610:                      dest="table", 
  611:                      help="database table name")
  612:     opars.add_option("--fields", default=None, 
  613:                      dest="update_fields", 
  614:                      help="list of fields to update (comma separated, sql-names)", metavar="LIST")
  615:     opars.add_option("--id-field", default=None, 
  616:                      dest="id_field", 
  617:                      help="name of id field for synchronisation (only appends data otherwise, sql-name)", metavar="NAME")
  618:     opars.add_option("--sync", "--sync-mode", default=False, action="store_true", 
  619:                      dest="sync_mode", 
  620:                      help="do full sync based on id field (remove unmatched fields from db)")
  621:     opars.add_option("--lc-names", default=False, action="store_true", 
  622:                      dest="lc_names", 
  623:                      help="clean and lower case field names from XML")
  624:     opars.add_option("--keep-fields", default=False, action="store_true", 
  625:                      dest="keep_fields", 
  626:                      help="don't add fields from XML to SQL table")
  627:     opars.add_option("--ascii-db", default=False, action="store_true", 
  628:                      dest="ascii_db", 
  629:                      help="the SQL database stores ASCII instead of unicode")
  630:     opars.add_option("--replace", default=False, action="store_true", 
  631:                      dest="replace_table", 
  632:                      help="replace table i.e. delete and re-insert data")
  633:     opars.add_option("--backup", default=False, action="store_true", 
  634:                      dest="backup_table", 
  635:                      help="create backup of old table")
  636:     opars.add_option("--read-before-update", default=False, action="store_true", 
  637:                      dest="read_before_update", 
  638:                      help="read all data to check if it really changed")
  639:     opars.add_option("-d", "--debug", default=False, action="store_true", 
  640:                      dest="debug", 
  641:                      help="debug mode (more output)")
  642:     opars.add_option("--debug-data", default=False, action="store_true", 
  643:                      dest="debug_data", 
  644:                      help="debug mode for data (even more output)")
  645:     
  646:     (options, args) = opars.parse_args()
  647:     
  648:     if len(sys.argv) < 2 or options.filename is None or options.dsn is None:
  649:         print "importFMPXML "+version_string
  650:         opars.print_help()
  651:         sys.exit(1)
  652:     
  653:     if options.debug:
  654:         loglevel = logging.DEBUG
  655:     else:
  656:         loglevel = logging.INFO
  657:     
  658:     logging.basicConfig(level=loglevel, 
  659:                         format='%(asctime)s %(levelname)s %(message)s',
  660:                         datefmt='%H:%M:%S')
  661: 
  662:     importFMPXML(options)
  663: 
  664: 
  665:     
  666: 

FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>