--- ZSQLExtend/ZSQLExtend.py 2007/04/20 08:33:17 1.115
+++ ZSQLExtend/ZSQLExtend.py 2008/02/15 13:05:41 1.124
@@ -56,6 +56,36 @@ def analyseIntSearch(word):
else:
return "BETWEEN "+splitted[0]+" AND "+splitted[1]
+def unicodify(str):
+ """decode str (utf-8 or latin-1 representation) into unicode object"""
+ if not str:
+ return u""
+ if type(str) is StringType:
+ try:
+ return str.decode('utf-8')
+ except:
+ return str.decode('latin-1')
+ else:
+ return str
+
+def utf8ify(str):
+ """encode unicode object or string into byte string in utf-8 representation"""
+ if not str:
+ return ""
+ if type(str) is StringType:
+ return str
+ else:
+ return str.encode('utf-8')
+
+
+def setPsycopg2UseUnicode():
+ """force Psycopg2DA to return unicode objects"""
+ try:
+ import psycopg2
+ import psycopg2.extensions
+ psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
+ except:
+ logging.error("Unable to force psycopg2 to use unicode")
def sql_quote(v):
@@ -314,9 +344,8 @@ class ZSQLExtendFolder(Folder,Persistent
logger("update xml",logging.INFO,queryStr)
self.ZSQLSimpleSearch(queryStr)
ret+="ud: %s \n"%field
- else:
-
+ else:
fields=",".join(dataSet.keys())
values=",".join([""" %s """%self.ZSQLQuote(dataSet[x]) for x in dataSet.keys()])
@@ -325,101 +354,13 @@ class ZSQLExtendFolder(Folder,Persistent
self.ZSQLSimpleSearch(queryStr)
logger("update xml",logging.INFO,queryStr)
-
-
-
return ret
- def importXMLFile(self,table,containerTagName,fieldNames,data=None,identify=None,filename=None,RESPONSE=None):
- #TODO: finish importXMLFile
- '''
- Import XML file into the table
- @param table: name of the table the xml shall be imported into
- @param containerTagName: XML-Tag which describes a dataset
- @param file: xmlfile handle
- @param identify: (optional) field res. tag which identifies a entry uniquely for updating purposes.
- @param RESPONSE: (optional)
- '''
- ret=""
- from xml.dom.pulldom import parseString
-
- doc=parseString(file.read())
- while 1:
- node=doc.getEvent()
-
- if node is None:
- break;
- else:
- if node[1].nodeName==containerTagName:
- doc.expandNode(node[1])
- cols=node[1].getElementsByTagName('COL')
- dataSet=[]
- for col in cols:
- data=col.getElementsByTagName('DATA')
- dataSet.append(getTextFromNode(data[0]))
- update=False
- if identify:
-
- nr=fieldNames.index(identify)
- field=dataSet[nr]
-
- searchStr="""select %s from %s where %s = '%s'"""%(identify,table,identify,field)
- logger("import xml",logging.INFO,searchStr)
- search=self.ZSQLSimpleSearch(searchStr)
- if search:
- update=True
-
- if update:
- tmp=[]
- for fieldName in fieldNames:
- tmp.append("""%s = %s"""%(fieldName,self.ZSQLQuote(dataSet[fieldNames.index(fieldName)])))
- setStr=",".join(tmp)
- nr=fieldNames.index(identify)
- field=dataSet[nr]
-
- queryStr="""UPDATE %s SET %s WHERE %s = '%s' """%(table,setStr,identify,field)
- logger("update xml",logging.INFO,queryStr)
- self.ZSQLSimpleSearch(queryStr)
- ret+="ud: %s \n"%field
- else:
-
-
- fields=",".join(fieldNames)
- values=",".join([""" %s """%self.ZSQLQuote(x) for x in dataSet])
-
-
- queryStr="""INSERT INTO %s (%s) VALUES (%s)"""%(table,fields,values)
- self.ZSQLSimpleSearch(queryStr)
- logger("update xml",logging.INFO,queryStr)
- ret+="ad: %s \n"%field
-
- elif node[1].nodeName=="METADATA":
- fieldNames=[]
- doc.expandNode(node[1])
-
- names=node[1].getElementsByTagName('FIELD')
-
- for name in names:
- fieldNames.append(name.getAttribute('NAME'))
-
- logger("update xml: fieldnames",logging.INFO,repr(fieldNames))
- qstr="""select attname from pg_attribute, pg_class where attrelid = pg_class.oid and relname = '%s' """
- columns=[x.attname for x in self.ZSQLSimpleSearch(qstr%table)]
-
- for fieldName in fieldNames:
- logger("update xml: fieldname",logging.INFO,repr(fieldName))
- if fieldName not in columns:
- qstr="""alter table %s add %s %s"""
- self.ZSQLSimpleSearch(qstr%(table,fieldName,'text'))
- logger("update xml: fieldname add",logging.INFO,qstr%(table,fieldName,'text'))
- #fn=node[1].getAttribute("xml:id")
- #nf=file("xtf/"+fn+".xtf",'w')
- #nf.write(""""""+node[1].toxml()+"")
- #print "wrote: %s"%fn
-
-
- def importXMLFileFMP(self,table,dsn=None,uploadfile=None,update_fields=None,id_field=None,sync_mode=False,replace=False,redirect_url=None,ascii_db=False,RESPONSE=None):
+ def importXMLFileFMP(self,tables,dsn=None,uploadfile=None,update_fields=None,id_field=None,sync_mode=False,
+ lc_names=True,keep_fields=False,ascii_db=False,replace=False,backup=False,
+ debug=False,log_to_response=False,
+ redirect_url=None,RESPONSE=None):
'''
Import FileMaker XML file (FMPXMLRESULT format) into the table.
@param dsn: database connection string
@@ -428,13 +369,18 @@ class ZSQLExtendFolder(Folder,Persistent
@param update_fields: (optional) list of fields to update; default is to create all fields
@param id_field: (optional) field which uniquely identifies an entry for updating purposes.
@param sync_mode: (optional) really synchronise, i.e. delete entries not in XML file
+ @param lc_names: (optional) lower case and clean up field names from XML
+ @param keep_fields: (optional) don't add fields to SQL database
+ @param ascii_db: (optional) assume ascii encoding in db
+ @param replace: (optional) delete and re-insert data
+ @param backup: (optional) create backup of old table (breaks indices)
@param RESPONSE: (optional)
@param redirect_url: (optional) url for redirecting after the upload is done
'''
tfilehd,filename=tempfile.mkstemp()
tfile=os.fdopen(tfilehd,'w')
- logging.error("import %s"%uploadfile)
+ logging.info("import %s"%uploadfile)
for c in uploadfile.read():
tfile.write(c)
tfile.close()
@@ -443,23 +389,54 @@ class ZSQLExtendFolder(Folder,Persistent
if not dsn:
dsn=self.getConnectionObj().connection_string
+
+ tablelist=tables.split(',')
+ logging.debug("tablelist: %s" %tablelist)
+ #table=tables
+
+ for table in tablelist :
+ logging.debug("table: %s" %table)
+ options=Options()
+ options.dsn=dsn
+ options.table=table
+ options.filename=filename
+ options.update_fields=update_fields
+ options.id_field=id_field
+ options.sync_mode=sync_mode
+ options.lc_names=lc_names
+ options.replace_table=replace
+ options.keep_fields=keep_fields
+ options.ascii_db=ascii_db
+ options.replace_table=replace
+ options.backup_table=backup
+ options.debug=debug
+
+ if RESPONSE and log_to_response:
+ # set up logging to response as plain text
+ RESPONSE.setHeader("Content-Type","text/plain; charset=utf-8")
+ RESPONSE.write("Import FMPXML file...\n\n")
+ loghandler = logging.StreamHandler(RESPONSE)
+ if debug:
+ loghandler.setLevel(logging.DEBUG)
+ else:
+ loghandler.setLevel(logging.INFO)
+ logger = logging.getLogger('db.import.fmpxml')
+ logger.addHandler(loghandler)
+ options.use_logger_instance = logger
+
+ importFMPXML(options)
+
- options=Options()
- options.dsn=dsn
- options.table=table
- options.filename=filename
- options.update_fields=update_fields
- options.id_field=id_field
- options.sync_mode=sync_mode
- options.replace_table=replace
- options.lc_names=True
- options.ascii_db=ascii_db
- importFMPXML(options)
+ if RESPONSE and log_to_response:
+ loghandler.flush()
+ RESPONSE.write("\n\n DONE!")
+
+ elif RESPONSE and redirect_url:
+ RESPONSE.redirect(redirect_url)
+
os.remove(filename)
-
- if RESPONSE and redirect_url:
- RESPONSE.redirect(redirect_url)
+
def generateIndex(self,field,index_name,table,RESPONSE=None):
"""erzeuge ein Index Objekt einem Feld (experimental)
@@ -504,6 +481,27 @@ class ZSQLExtendFolder(Folder,Persistent
"""
return urllib.quote(txt)
+
+ def createIdSet(self, resultset, idField=None):
+ """returns a (frozen)set of IDs from a SQL-resultset (using idField) or a list (if idField=None)"""
+ logging.debug("createidset for idfield %s"%idField)
+ if idField is None:
+ return frozenset(resultset)
+ else:
+ idlist = [r[idField] for r in resultset]
+ return frozenset(idlist)
+
+ def opIdSet(self, a, b, op):
+ """operate on sets a and b"""
+ logging.debug("opidset with op %s"%op)
+ if (op == 'intersect'):
+ return a.intersection(b)
+ elif (op == 'union'):
+ return a.union(b)
+ elif (op == 'diff'):
+ return a.difference(b)
+
+
def searchRel(self,relStatement,statement,wherePart,classes):
"""suche relative haufigkeiten (experimental)"""
ret={}
@@ -558,13 +556,13 @@ class ZSQLExtendFolder(Folder,Persistent
return pt()
- def changeZSQLExtend(self,label,description,weight=0,REQUEST=None,connection_id=None):
+ def changeZSQLExtend(self,label,description,weight=0,connection_id=None,REQUEST=None,):
"""change the Konfiguration"""
self.connection_id=connection_id
self.weight=weight
self.label=label
self.description=description
-
+
if REQUEST is not None:
return self.manage_main(self, REQUEST)
@@ -573,7 +571,11 @@ class ZSQLExtendFolder(Folder,Persistent
@param str: string der Formatiert werden soll.
@param url: (optional) default ist "None", sonderfall erzeugt einen Link aus String mit unterliegender url
"""
- #url=None
+ #logging.debug("formatascii str=%s url=%s"%(repr(str),repr(url)))
+
+ if not str:
+ return ""
+
str=str.rstrip().lstrip()
if url and str:
@@ -587,7 +589,9 @@ class ZSQLExtendFolder(Folder,Persistent
retStr+="""%s
"""%(strUrl,word)
str=retStr
if str:
- return re.sub(r"[\n]","
",str)
+ retStr = re.sub(r"[\n]","
",str)
+ #logging.debug("formatascii out=%s"%(repr(retStr)))
+ return retStr
else:
return ""
@@ -960,11 +964,14 @@ class ZSQLExtendFolder(Folder,Persistent
if (hasattr(self,"_v_searchSQL") and (self._v_searchSQL == None)) or (not hasattr(self,"_v_searchSQL")):
self._v_searchSQL=Shared.DC.ZRDB.DA.DA("_v_searchSQL","_v_searchSQL",self.getConnectionObj().getId(),"var","")
+ #self._v_searchSQL=self.getConnectionObj()()
self._v_searchSQL.max_rows_=max_rows
+ #self._v_searchSQL.set_client_encoding('UNICODE')
try:
logging.error("I am here")
t=self._v_searchSQL.__call__(var=query)
+ #t=self._v_searchSQL.query(query)
logging.error("I am here %s"%t)
return t
except :
@@ -978,8 +985,10 @@ class ZSQLExtendFolder(Folder,Persistent
try:
self._v_searchSQL.max_rows_=max_rows
-
+ #self._v_searchSQL.set_client_encoding('UNICODE')
+
return self._v_searchSQL.__call__(var=query)
+ #return self._v_searchSQL.query(query)
except :
logger("ZSQLSimpleSearch ERROR2",logging.ERROR, '%s %s'%sys.exc_info()[:2])
if sys.exc_info()[0]=="Database Error":
@@ -1266,30 +1275,25 @@ class ZSQLExtendFolder(Folder,Persistent
"""suche mit alten parametern bis auf die in argv getauschten"""
if args:
argv=args
-
+
#get the old queries
qs=self.REQUEST.SESSION[storename]['qs']
querys=qs.split(",")
#which arguments are in the old query string
-
queryList={}
for query in querys:
arg=query.split("=")[0]
if arg[0]=="_": arg="-"+arg[1:] # sicherstellen, dass an Anfang stets "_"
try:
- queryList[arg]=query.split("=")[1]
+ queryList[arg]=urllib.unquote_plus(query.split("=")[1])
except:
queryList[arg]=''
argList=[]
arg=""
-
-
-
#gehe durch die zu aendernden Argumente
for argTmp in argv.keys():
-
arg=argTmp[0:]# sicherstellen, dass der string auh kopiert wird
if arg[0]=="_": arg="-"+arg[1:] # sicherstellen, dass an Anfang stets "_"
@@ -1301,6 +1305,7 @@ class ZSQLExtendFolder(Folder,Persistent
str="ZSQLSearch?"+urllib.urlencode(queryList)
return str
+
def parseQueryString(self,qs,iCT,storemax="no",select=None,nostore=None,storename="foundCount",tableExt=None,NoQuery=None,NoLimit=None,restrictField=None,restrictConnect=None,filter=None):
"""analysieren den QueryString"""
@@ -2129,4 +2134,4 @@ def manage_addZSQLBibliography(self, id,
-
\ No newline at end of file
+