version 1.10, 2007/04/05 14:20:08
|
version 1.27, 2008/09/05 19:05:57
|
Line 12 from amara import saxtools
|
Line 12 from amara import saxtools
|
|
|
try: |
try: |
import psycopg2 as psycopg |
import psycopg2 as psycopg |
|
import psycopg2.extensions |
|
# switch to unicode |
|
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) |
psyco = 2 |
psyco = 2 |
except: |
except: |
import psycopg |
import psycopg |
Line 19 except:
|
Line 22 except:
|
|
|
fm_ns = 'http://www.filemaker.com/fmpxmlresult' |
fm_ns = 'http://www.filemaker.com/fmpxmlresult' |
|
|
version_string = "V0.4 ROC 29.3.2007" |
version_string = "V0.6.1 ROC 2.7.2008" |
|
|
|
def unicodify(text, withNone=False): |
|
"""decode str (utf-8 or latin-1 representation) into unicode object""" |
|
if withNone and text is None: |
|
return None |
|
if not text: |
|
return u"" |
|
if isinstance(text, str): |
|
try: |
|
return text.decode('utf-8') |
|
except: |
|
return text.decode('latin-1') |
|
else: |
|
return text |
|
|
|
def utf8ify(text, withNone=False): |
|
"""encode unicode object or string into byte string in utf-8 representation""" |
|
if withNone and text is None: |
|
return None |
|
if not text: |
|
return "" |
|
if isinstance(text, unicode): |
|
return text.encode('utf-8') |
|
else: |
|
return text |
|
|
def getTextFromNode(nodename): |
def getTextFromNode(nodename): |
"""get the cdata content of a node""" |
"""get the cdata content of a node""" |
Line 42 def sql_quote(v):
|
Line 70 def sql_quote(v):
|
|
|
def SimpleSearch(curs,query, args=None, ascii=False): |
def SimpleSearch(curs,query, args=None, ascii=False): |
"""execute sql query and return data""" |
"""execute sql query and return data""" |
#logging.debug("executing: "+query) |
#logger.debug("executing: "+query) |
if ascii: |
if ascii: |
# encode all in UTF-8 |
# encode all in UTF-8 |
query = query.encode("UTF-8") |
query = utf8ify(query) |
if args is not None: |
if args is not None: |
encargs = [] |
encargs = [] |
for a in args: |
for a in args: |
if a is not None: |
encargs.append(utf8ify(a, withNone=True)) |
a = a.encode("UTF-8") |
|
encargs.append(a) |
|
|
|
args = encargs |
args = encargs |
|
|
curs.execute(query, args) |
curs.execute(query, args) |
#logging.debug("sql done") |
#logger.debug("sql done") |
try: |
try: |
return curs.fetchall() |
return curs.fetchall() |
except: |
except: |
Line 98 class xml_handler:
|
Line 124 class xml_handler:
|
@param options.keep_fields: (optional) don't add fields to SQL database |
@param options.keep_fields: (optional) don't add fields to SQL database |
@param options.ascii_db: (optional) assume ascii encoding in db |
@param options.ascii_db: (optional) assume ascii encoding in db |
@param options.replace_table: (optional) delete and re-insert data |
@param options.replace_table: (optional) delete and re-insert data |
|
@param options.backup_table: (optional) create backup of old table (breaks indices) |
|
@param options.use_logger_instance: (optional) use this instance of a logger |
""" |
""" |
|
|
|
# set up logger |
|
if hasattr(options, 'use_logger_instance'): |
|
self.logger = options.use_logger_instance |
|
else: |
|
self.logger = logging.getLogger('db.import.fmpxml') |
|
|
|
|
# set up parser |
# set up parser |
self.event = None |
self.event = None |
self.top_dispatcher = { |
self.top_dispatcher = { |
Line 111 class xml_handler:
|
Line 146 class xml_handler:
|
|
|
# connect database |
# connect database |
self.dbCon = psycopg.connect(options.dsn) |
self.dbCon = psycopg.connect(options.dsn) |
|
logging.debug("DB encoding: %s"%getattr(self.dbCon, 'encoding', 'UNKNOWN')) |
self.db = self.dbCon.cursor() |
self.db = self.dbCon.cursor() |
assert self.db, "AIIEE no db cursor for %s!!"%options.dsn |
assert self.db, "AIIEE no db cursor for %s!!"%options.dsn |
|
|
Line 123 class xml_handler:
|
Line 159 class xml_handler:
|
self.ascii_db = getattr(options,"ascii_db",None) |
self.ascii_db = getattr(options,"ascii_db",None) |
self.replace_table = getattr(options,"replace_table",None) |
self.replace_table = getattr(options,"replace_table",None) |
self.backup_table = getattr(options,"backup_table",None) |
self.backup_table = getattr(options,"backup_table",None) |
|
self.read_before_update = getattr(options,"read_before_update",None) |
|
|
logging.debug("dsn: "+repr(getattr(options,"dsn",None))) |
self.logger.debug("dsn: "+repr(getattr(options,"dsn",None))) |
logging.debug("table: "+repr(self.table)) |
self.logger.debug("table: "+repr(self.table)) |
logging.debug("update_fields: "+repr(self.update_fields)) |
self.logger.debug("update_fields: "+repr(self.update_fields)) |
logging.debug("id_field: "+repr(self.id_field)) |
self.logger.debug("id_field: "+repr(self.id_field)) |
logging.debug("sync_mode: "+repr(self.sync_mode)) |
self.logger.debug("sync_mode: "+repr(self.sync_mode)) |
logging.debug("lc_names: "+repr(self.lc_names)) |
self.logger.debug("lc_names: "+repr(self.lc_names)) |
logging.debug("keep_fields: "+repr(self.keep_fields)) |
self.logger.debug("keep_fields: "+repr(self.keep_fields)) |
logging.debug("ascii_db: "+repr(self.ascii_db)) |
self.logger.debug("ascii_db: "+repr(self.ascii_db)) |
logging.debug("replace_table: "+repr(self.replace_table)) |
self.logger.debug("replace_table: "+repr(self.replace_table)) |
|
self.logger.debug("backup_table: "+repr(self.backup_table)) |
|
self.logger.debug("read_before_update: "+repr(self.read_before_update)) |
|
|
self.dbIDs = {} |
self.dbIDs = {} |
self.rowcnt = 0 |
self.rowcnt = 0 |
Line 145 class xml_handler:
|
Line 184 class xml_handler:
|
self.dbIDs[id[0]] = 0; |
self.dbIDs[id[0]] = 0; |
self.rowcnt += 1 |
self.rowcnt += 1 |
|
|
logging.info("%d entries in DB to sync"%self.rowcnt) |
self.logger.info("%d entries in DB to sync"%self.rowcnt) |
|
|
# names of fields in XML file |
# names of fields in XML file |
self.xml_field_names = [] |
self.xml_field_names = [] |
Line 163 class xml_handler:
|
Line 202 class xml_handler:
|
} |
} |
#First round through the generator corresponds to the |
#First round through the generator corresponds to the |
#start element event |
#start element event |
logging.debug("START METADATA") |
self.logger.info("reading metadata...") |
|
self.logger.debug("START METADATA") |
yield None |
yield None |
|
|
#delegate is a generator that handles all the events "within" |
#delegate is a generator that handles all the events "within" |
Line 175 class xml_handler:
|
Line 215 class xml_handler:
|
yield None |
yield None |
|
|
#Element closed. Wrap up |
#Element closed. Wrap up |
logging.debug("END METADATA") |
self.logger.debug("END METADATA") |
|
|
# rename table for backup |
# rename table for backup |
if self.backup_table: |
if self.backup_table: |
self.orig_table = self.table |
self.orig_table = self.table |
self.table = self.table + "_tmp" |
self.tmp_table = self.table + "_tmp" |
|
backup_name = "%s_%s"%(self.table,time.strftime('%Y_%m_%d_%H_%M_%S')) |
|
|
# remove old temp table |
# remove old temp table |
qstr = "DROP TABLE %s"%(self.table) |
qstr = "DROP TABLE %s"%(self.tmp_table) |
try: |
try: |
self.db.execute(qstr) |
self.db.execute(qstr) |
except: |
except: |
Line 191 class xml_handler:
|
Line 233 class xml_handler:
|
self.dbCon.commit() |
self.dbCon.commit() |
|
|
if self.id_field: |
if self.id_field: |
# sync mode -- copy table |
# sync mode -- copy backup table, update current table |
logging.info("copy table %s to %s"%(self.orig_table,self.table)) |
self.logger.info("copy table %s to %s"%(self.table,backup_name)) |
qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(self.table,self.orig_table) |
qstr = "CREATE TABLE %s AS (SELECT * FROM %s)"%(backup_name,self.table) |
|
|
else: |
else: |
# rename table and create empty new one |
# replace mode -- create empty tmp table, insert into tmp table |
logging.info("create empty table %s"%(self.table)) |
self.table = self.tmp_table |
|
self.logger.info("create empty table %s"%(self.table)) |
qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table) |
qstr = "CREATE TABLE %s AS (SELECT * FROM %s WHERE 1=0)"%(self.table,self.orig_table) |
|
|
self.db.execute(qstr) |
self.db.execute(qstr) |
Line 205 class xml_handler:
|
Line 248 class xml_handler:
|
|
|
# delete data from table for replace |
# delete data from table for replace |
if self.replace_table: |
if self.replace_table: |
logging.info("delete data from table %s"%(self.table)) |
self.logger.info("delete data from table %s"%(self.table)) |
qstr = "TRUNCATE TABLE %s"%(self.table) |
qstr = "TRUNCATE TABLE %s"%(self.table) |
self.db.execute(qstr) |
self.db.execute(qstr) |
self.dbCon.commit() |
self.dbCon.commit() |
Line 213 class xml_handler:
|
Line 256 class xml_handler:
|
# try to match date style with XML |
# try to match date style with XML |
self.db.execute("set datestyle to 'german'") |
self.db.execute("set datestyle to 'german'") |
|
|
# translate id_field (SQL-name) to XML-name |
#self.logger.debug("xml-fieldnames:"+repr(self.xml_field_names)) |
self.xml_id = self.sql_field_map.get(self.id_field, None) |
|
|
|
#logging.debug("xml-fieldnames:"+repr(self.xml_field_names)) |
|
# get list of fields and types of db table |
# get list of fields and types of db table |
qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'" |
qstr="select attname, format_type(pg_attribute.atttypid, pg_attribute.atttypmod) from pg_attribute, pg_class where attrelid = pg_class.oid and pg_attribute.attnum > 0 and relname = '%s'" |
self.sql_fields={} |
self.sql_fields={} |
for f in SimpleSearch(self.db, qstr%self.table): |
for f in SimpleSearch(self.db, qstr%self.table): |
n = f[0] |
fn = f[0] |
t = f[1] |
ft = f[1] |
#print "SQL fields: %s (%s)"%(n,t) |
#print "SQL fields: %s (%s)"%(n,t) |
self.sql_fields[n] = TableColumn(n,t) |
self.sql_fields[fn] = TableColumn(fn,ft) |
|
|
|
# translate id_field (SQL-name) to XML-name |
|
self.xml_id = self.sql_field_map.get(self.id_field, None) |
|
# get type of id_field |
|
if self.id_field: |
|
self.id_type = self.sql_fields[self.id_field].getType() |
|
else: |
|
self.id_type = None |
|
|
# check fields to update |
# check fields to update |
if self.update_fields is None: |
if self.update_fields is None: |
if self.keep_fields: |
if self.keep_fields: |
# update existing fields |
# update all existing fields from sql (when they are in the xml file) |
self.update_fields = self.sql_fields |
self.update_fields = {} |
|
for f in self.sql_fields.keys(): |
|
if self.sql_field_map.has_key(f): |
|
xf = self.sql_field_map[f] |
|
self.update_fields[f] = self.xml_field_map[xf] |
|
|
else: |
else: |
# update all fields |
# update all fields |
Line 253 class xml_handler:
|
Line 304 class xml_handler:
|
if not self.keep_fields: |
if not self.keep_fields: |
# adjust db table to fields in XML and update_fields |
# adjust db table to fields in XML and update_fields |
for f in self.xml_field_map.values(): |
for f in self.xml_field_map.values(): |
logging.debug("sync-fieldname: %s"%f.getName()) |
self.logger.debug("sync-fieldname: %s"%f.getName()) |
sf = self.sql_fields.get(f.getName(), None) |
sf = self.sql_fields.get(f.getName(), None) |
uf = self.update_fields.get(f.getName(), None) |
uf = self.update_fields.get(f.getName(), None) |
if sf is not None: |
if sf is not None: |
# name in db -- check type |
# name in db -- check type |
if f.getType() != sf.getType(): |
if f.getType() != sf.getType(): |
logging.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType())) |
self.logger.debug("field %s has different type (%s vs %s)"%(f,f.getType(),sf.getType())) |
elif uf is not None: |
elif uf is not None: |
# add field to table |
# add field to table |
qstr="alter table %s add %s %s"%(self.table,uf.getName(),uf.getType()) |
fn = uf.getName() |
logging.info("db add field:"+qstr) |
ft = uf.getType() |
|
qstr="alter table %s add \"%s\" %s"%(self.table,fn,ft) |
|
self.logger.info("db add field:"+qstr) |
|
|
if self.ascii_db and type(qstr)==types.UnicodeType: |
if self.ascii_db and type(qstr)==types.UnicodeType: |
qstr=qstr.encode('utf-8') |
qstr=qstr.encode('utf-8') |
|
|
self.db.execute(qstr) |
self.db.execute(qstr) |
self.dbCon.commit() |
self.dbCon.commit() |
|
# add field to field list |
|
self.sql_fields[fn] = TableColumn(fn, ft) |
|
|
# prepare sql statements for update |
# prepare sql statements for update (do not update id_field) |
setStr=string.join(["%s = %%s"%self.xml_field_map[f] for f in self.xml_update_list], ', ') |
setStr=string.join(["\"%s\" = %%s"%self.xml_field_map[f] for f in self.xml_update_list if f != self.xml_id], ', ') |
self.updQuery="UPDATE %s SET %s WHERE %s = %%s"%(self.table,setStr,self.id_field) |
self.updQuery="UPDATE %s SET %s WHERE \"%s\" = %%s"%(self.table,setStr,self.id_field) |
|
# and select (for update check) |
|
selStr=string.join([self.xml_field_map[f].getName() for f in self.xml_update_list if f != self.xml_id], ', ') |
|
self.selQuery="SELECT %s FROM %s WHERE \"%s\" = %%s"%(selStr,self.table,self.id_field) |
# and insert |
# and insert |
fields=string.join([self.xml_field_map[x].getName() for x in self.xml_update_list], ',') |
fields=string.join(["\"%s\""%self.xml_field_map[x].getName() for x in self.xml_update_list], ',') |
values=string.join(['%s' for f in self.xml_update_list], ',') |
values=string.join(['%s' for f in self.xml_update_list], ',') |
self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values) |
self.addQuery="INSERT INTO %s (%s) VALUES (%s)"%(self.table,fields,values) |
logging.debug("update-query: "+self.updQuery) |
self.logger.debug("update-query: "+self.updQuery) |
logging.debug("add-query: "+self.addQuery) |
self.logger.debug("sel-query: "+self.selQuery) |
|
self.logger.debug("add-query: "+self.addQuery) |
return |
return |
|
|
def handle_meta_field(self, end_condition): |
def handle_meta_field(self, end_condition): |
Line 295 class xml_handler:
|
Line 354 class xml_handler:
|
# map to sql name and default text type |
# map to sql name and default text type |
self.xml_field_map[name] = TableColumn(sqlname, 'text') |
self.xml_field_map[name] = TableColumn(sqlname, 'text') |
self.sql_field_map[sqlname] = name |
self.sql_field_map[sqlname] = name |
logging.debug("FIELD name: "+name) |
self.logger.debug("FIELD name: "+name) |
return |
return |
|
|
def handle_data_fields(self, end_condition): |
def handle_data_fields(self, end_condition): |
Line 305 class xml_handler:
|
Line 364 class xml_handler:
|
} |
} |
#First round through the generator corresponds to the |
#First round through the generator corresponds to the |
#start element event |
#start element event |
logging.debug("START RESULTSET") |
self.logger.info("reading data...") |
|
self.logger.debug("START RESULTSET") |
self.rowcnt = 0 |
self.rowcnt = 0 |
yield None |
yield None |
|
|
Line 318 class xml_handler:
|
Line 378 class xml_handler:
|
yield None |
yield None |
|
|
#Element closed. Wrap up |
#Element closed. Wrap up |
logging.debug("END RESULTSET") |
self.logger.debug("END RESULTSET") |
self.dbCon.commit() |
self.dbCon.commit() |
|
|
if self.sync_mode: |
if self.sync_mode: |
# delete unmatched entries in db |
# delete unmatched entries in db |
logging.info("deleting unmatched rows from db") |
self.logger.info("deleting unmatched rows from db") |
delQuery = "DELETE FROM %s WHERE %s = %%s"%(self.table,self.id_field) |
delQuery = "DELETE FROM %s WHERE \"%s\" = %%s"%(self.table,self.id_field) |
for id in self.dbIDs.keys(): |
for id in self.dbIDs.keys(): |
# find all not-updated fields |
# find all not-updated fields |
if self.dbIDs[id] == 0: |
if self.dbIDs[id] == 0: |
logging.info(" delete:"+id) |
self.logger.info(" delete: %s"%id) |
SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db) |
SimpleSearch(self.db, delQuery, [id], ascii=self.ascii_db) |
sys.exit(1) |
|
|
|
elif self.dbIDs[id] > 1: |
elif self.dbIDs[id] > 1: |
logging.info(" sync: ID %s used more than once?"%id) |
self.logger.info(" sync: ID %s used more than once?"%id) |
|
|
self.dbCon.commit() |
self.dbCon.commit() |
|
|
# reinstate backup tables |
# reinstate backup tables |
if self.backup_table: |
if self.backup_table and not self.id_field: |
backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S')) |
backup_name = "%s_%s"%(self.orig_table,time.strftime('%Y_%m_%d_%H_%M_%S')) |
logging.info("rename backup table %s to %s"%(self.orig_table,backup_name)) |
self.logger.info("rename backup table %s to %s"%(self.orig_table,backup_name)) |
qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name) |
qstr = "ALTER TABLE %s RENAME TO %s"%(self.orig_table,backup_name) |
self.db.execute(qstr) |
self.db.execute(qstr) |
logging.info("rename working table %s to %s"%(self.table,self.orig_table)) |
self.logger.info("rename working table %s to %s"%(self.table,self.orig_table)) |
qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table) |
qstr = "ALTER TABLE %s RENAME TO %s"%(self.table,self.orig_table) |
self.db.execute(qstr) |
self.db.execute(qstr) |
self.dbCon.commit() |
self.dbCon.commit() |
Line 355 class xml_handler:
|
Line 414 class xml_handler:
|
(saxtools.START_ELEMENT, fm_ns, u'COL'): |
(saxtools.START_ELEMENT, fm_ns, u'COL'): |
self.handle_col, |
self.handle_col, |
} |
} |
logging.debug("START ROW") |
self.logger.debug("START ROW") |
self.xml_data = {} |
self.xml_data = {} |
self.colIdx = 0 |
self.colIdx = 0 |
yield None |
yield None |
Line 369 class xml_handler:
|
Line 428 class xml_handler:
|
yield None |
yield None |
|
|
#Element closed. Wrap up |
#Element closed. Wrap up |
logging.debug("END ROW") |
self.logger.debug("END ROW") |
self.rowcnt += 1 |
self.rowcnt += 1 |
# process collected row data |
# process collected row data |
update=False |
update=False |
id_val='' |
id_val='' |
# synchronize by id_field |
# synchronize by id_field |
if self.id_field: |
if self.id_field: |
|
if self.id_type == 'integer': |
|
id_val = int(self.xml_data[self.xml_id]) |
|
else: |
id_val = self.xml_data[self.xml_id] |
id_val = self.xml_data[self.xml_id] |
|
|
if id_val in self.dbIDs: |
if id_val in self.dbIDs: |
self.dbIDs[id_val] += 1 |
self.dbIDs[id_val] += 1 |
update=True |
update=True |
Line 384 class xml_handler:
|
Line 447 class xml_handler:
|
# collect all values |
# collect all values |
args = [] |
args = [] |
for fn in self.xml_update_list: |
for fn in self.xml_update_list: |
|
# do not update id_field |
|
if update and fn == self.xml_id: |
|
continue |
|
|
f = self.xml_field_map[fn] |
f = self.xml_field_map[fn] |
val = self.xml_data[fn] |
val = self.xml_data[fn] |
type = self.sql_fields[f.getName()].getType() |
type = self.sql_fields[f.getName()].getType() |
Line 399 class xml_handler:
|
Line 466 class xml_handler:
|
|
|
if update: |
if update: |
# update existing row (by id_field) |
# update existing row (by id_field) |
# last argument is ID match |
if self.read_before_update: |
args.append(id_val) |
# read data |
logging.debug("update: %s = %s"%(id_val, args)) |
self.logger.debug("update check: %s = %s"%(id_val, args)) |
|
oldrow = SimpleSearch(self.db, self.selQuery, [id_val], ascii=self.ascii_db) |
|
#i = 0 |
|
#for v in oldrow[0]: |
|
# logging.debug("v: %s = %s (%s)"%(v,args[i],v==args[i])) |
|
# i += 1 |
|
if tuple(oldrow[0]) != tuple(args): |
|
# data has changed -- update |
|
self.logger.debug("really update: %s = %s"%(id_val, args)) |
|
args.append(id_val) # last arg is id |
|
SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db) |
|
|
|
else: |
|
# always update |
|
self.logger.debug("update: %s = %s"%(id_val, args)) |
|
args.append(id_val) # last arg is id |
SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db) |
SimpleSearch(self.db, self.updQuery, args, ascii=self.ascii_db) |
|
|
else: |
else: |
# create new row |
# create new row |
logging.debug("insert: %s"%args) |
self.logger.debug("insert: %s"%args) |
SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db) |
SimpleSearch(self.db, self.addQuery, args, ascii=self.ascii_db) |
|
|
#logging.info(" row:"+"%d (%s)"%(self.rowcnt,id_val)) |
#self.logger.info(" row:"+"%d (%s)"%(self.rowcnt,id_val)) |
if (self.rowcnt % 10) == 0: |
if (self.rowcnt % 100) == 0: |
logging.info(" row:"+"%d (%s)"%(self.rowcnt,id_val)) |
self.logger.info(" row:"+"%d (id:%s)"%(self.rowcnt,id_val)) |
self.dbCon.commit() |
self.dbCon.commit() |
|
|
return |
return |
Line 450 class xml_handler:
|
Line 532 class xml_handler:
|
return |
return |
|
|
|
|
|
def importFMPXML(options): |
|
"""import FileMaker XML file (FMPXMLRESULT format) into the table. |
|
@param options: dict of options |
|
@param options.dsn: database connection string |
|
@param options.table: name of the table the xml shall be imported into |
|
@param options.filename: xmlfile filename |
|
@param options.update_fields: (optional) list of fields to update; default is to create all fields |
|
@param options.id_field: (optional) field which uniquely identifies an entry for updating purposes. |
|
@param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file |
|
@param options.lc_names: (optional) lower case and clean up field names from XML |
|
@param options.keep_fields: (optional) don't add fields to SQL database |
|
@param options.ascii_db: (optional) assume ascii encoding in db |
|
@param options.replace_table: (optional) delete and re-insert data |
|
@param options.backup_table: (optional) create backup of old table |
|
""" |
|
|
|
if getattr(options,'update_fields',None): |
|
uf = {} |
|
for f in options.update_fields.split(','): |
|
if f.find(':') > 0: |
|
(n,t) = f.split(':') |
|
else: |
|
n = f |
|
t = None |
|
uf[n] = TableColumn(n,t) |
|
|
|
options.update_fields = uf |
|
|
|
if getattr(options,'id_field',None) and getattr(options,'replace_table',None): |
|
logging.error("ABORT: sorry, you can't do both sync (id_field) and replace") |
|
return |
|
|
|
parser = sax.make_parser() |
|
#The "consumer" is our own handler |
|
consumer = xml_handler(options) |
|
#Initialize Tenorsax with handler |
|
handler = saxtools.tenorsax(consumer) |
|
#Resulting tenorsax instance is the SAX handler |
|
parser.setContentHandler(handler) |
|
parser.setFeature(sax.handler.feature_namespaces, 1) |
|
parser.parse(options.filename) |
|
|
|
|
if __name__ == "__main__": |
if __name__ == "__main__": |
Line 489 if __name__ == "__main__":
|
Line 611 if __name__ == "__main__":
|
help="replace table i.e. delete and re-insert data") |
help="replace table i.e. delete and re-insert data") |
opars.add_option("--backup", default=False, action="store_true", |
opars.add_option("--backup", default=False, action="store_true", |
dest="backup_table", |
dest="backup_table", |
help="create backup of old table (breaks indices)") |
help="create backup of old table") |
|
opars.add_option("--read-before-update", default=False, action="store_true", |
|
dest="read_before_update", |
|
help="read all data to check if it really changed") |
opars.add_option("-d", "--debug", default=False, action="store_true", |
opars.add_option("-d", "--debug", default=False, action="store_true", |
dest="debug", |
dest="debug", |
help="debug mode (more output)") |
help="debug mode (more output)") |
Line 510 if __name__ == "__main__":
|
Line 635 if __name__ == "__main__":
|
format='%(asctime)s %(levelname)s %(message)s', |
format='%(asctime)s %(levelname)s %(message)s', |
datefmt='%H:%M:%S') |
datefmt='%H:%M:%S') |
|
|
|
importFMPXML(options) |
|
|
def importFMPXML(options): |
|
"""SAX handler to import FileMaker XML file (FMPXMLRESULT format) into the table. |
|
@param options: dict of options |
|
@param options.dsn: database connection string |
|
@param options.table: name of the table the xml shall be imported into |
|
@param options.filename: xmlfile filename |
|
@param options.update_fields: (optional) list of fields to update; default is to create all fields |
|
@param options.id_field: (optional) field which uniquely identifies an entry for updating purposes. |
|
@param options.sync_mode: (optional) really synchronise, i.e. delete entries not in XML file |
|
@param options.lc_names: (optional) lower case and clean up field names from XML |
|
@param options.keep_fields: (optional) don't add fields to SQL database |
|
@param options.ascii_db: (optional) assume ascii encoding in db |
|
@param options.replace_table: (optional) delete and re-insert data |
|
""" |
|
|
|
|
|
if getattr(options,'update_fields',None): |
|
uf = {} |
|
for f in options.update_fields.split(','): |
|
(n,t) = f.split(':') |
|
uf[n] = TableColumn(n,t) |
|
|
|
options.update_fields = uf |
|
|
|
if getattr(options,'id_field',None) and getattr(options,'replace_table',None): |
|
logging.error("ABORT: sorry, you can't do both sync (id_field) and replace") |
|
sys.exit(1) |
|
|
|
parser = sax.make_parser() |
|
#The "consumer" is our own handler |
|
consumer = xml_handler(options) |
|
#Initialize Tenorsax with handler |
|
handler = saxtools.tenorsax(consumer) |
|
#Resulting tenorsax instance is the SAX handler |
|
parser.setContentHandler(handler) |
|
parser.setFeature(sax.handler.feature_namespaces, 1) |
|
parser.parse(options.filename) |
|
|
|
|
|
|
|