version 1.115, 2007/04/20 08:33:17
|
version 1.120, 2007/11/05 18:45:35
|
Line 56 def analyseIntSearch(word):
|
Line 56 def analyseIntSearch(word):
|
else: |
else: |
return "BETWEEN "+splitted[0]+" AND "+splitted[1] |
return "BETWEEN "+splitted[0]+" AND "+splitted[1] |
|
|
|
def unicodify(str): |
|
"""decode str (utf-8 or latin-1 representation) into unicode object""" |
|
if not str: |
|
return u"" |
|
if type(str) is StringType: |
|
try: |
|
return str.decode('utf-8') |
|
except: |
|
return str.decode('latin-1') |
|
else: |
|
return str |
|
|
|
def utf8ify(str): |
|
"""encode unicode object or string into byte string in utf-8 representation""" |
|
if not str: |
|
return "" |
|
if type(str) is StringType: |
|
return str |
|
else: |
|
return str.encode('utf-8') |
|
|
|
|
|
def setPsycopg2UseUnicode(): |
|
"""force Psycopg2DA to return unicode objects""" |
|
try: |
|
import psycopg2 |
|
import psycopg2.extensions |
|
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) |
|
except: |
|
logging.error("Unable to force psycopg2 to use unicode") |
|
|
|
|
def sql_quote(v): |
def sql_quote(v): |
Line 314 class ZSQLExtendFolder(Folder,Persistent
|
Line 344 class ZSQLExtendFolder(Folder,Persistent
|
logger("update xml",logging.INFO,queryStr) |
logger("update xml",logging.INFO,queryStr) |
self.ZSQLSimpleSearch(queryStr) |
self.ZSQLSimpleSearch(queryStr) |
ret+="ud: %s \n"%field |
ret+="ud: %s \n"%field |
else: |
|
|
|
|
|
|
else: |
fields=",".join(dataSet.keys()) |
fields=",".join(dataSet.keys()) |
values=",".join([""" %s """%self.ZSQLQuote(dataSet[x]) for x in dataSet.keys()]) |
values=",".join([""" %s """%self.ZSQLQuote(dataSet[x]) for x in dataSet.keys()]) |
|
|
Line 325 class ZSQLExtendFolder(Folder,Persistent
|
Line 354 class ZSQLExtendFolder(Folder,Persistent
|
self.ZSQLSimpleSearch(queryStr) |
self.ZSQLSimpleSearch(queryStr) |
logger("update xml",logging.INFO,queryStr) |
logger("update xml",logging.INFO,queryStr) |
|
|
|
|
|
|
|
|
return ret |
return ret |
|
|
|
|
def importXMLFile(self,table,containerTagName,fieldNames,data=None,identify=None,filename=None,RESPONSE=None): |
def importXMLFileFMP(self,table,dsn=None,uploadfile=None,update_fields=None,id_field=None,sync_mode=False, |
#TODO: finish importXMLFile |
lc_names=True,keep_fields=False,ascii_db=False,replace=False,backup=False, |
''' |
debug=False,log_to_response=False, |
Import XML file into the table |
redirect_url=None,RESPONSE=None): |
@param table: name of the table the xml shall be imported into |
|
@param containerTagName: XML-Tag which describes a dataset |
|
@param file: xmlfile handle |
|
@param identify: (optional) field res. tag which identifies a entry uniquely for updating purposes. |
|
@param RESPONSE: (optional) |
|
''' |
|
ret="" |
|
from xml.dom.pulldom import parseString |
|
|
|
doc=parseString(file.read()) |
|
while 1: |
|
node=doc.getEvent() |
|
|
|
if node is None: |
|
break; |
|
else: |
|
if node[1].nodeName==containerTagName: |
|
doc.expandNode(node[1]) |
|
cols=node[1].getElementsByTagName('COL') |
|
dataSet=[] |
|
for col in cols: |
|
data=col.getElementsByTagName('DATA') |
|
dataSet.append(getTextFromNode(data[0])) |
|
update=False |
|
if identify: |
|
|
|
nr=fieldNames.index(identify) |
|
field=dataSet[nr] |
|
|
|
searchStr="""select %s from %s where %s = '%s'"""%(identify,table,identify,field) |
|
logger("import xml",logging.INFO,searchStr) |
|
search=self.ZSQLSimpleSearch(searchStr) |
|
if search: |
|
update=True |
|
|
|
if update: |
|
tmp=[] |
|
for fieldName in fieldNames: |
|
tmp.append("""%s = %s"""%(fieldName,self.ZSQLQuote(dataSet[fieldNames.index(fieldName)]))) |
|
setStr=",".join(tmp) |
|
nr=fieldNames.index(identify) |
|
field=dataSet[nr] |
|
|
|
queryStr="""UPDATE %s SET %s WHERE %s = '%s' """%(table,setStr,identify,field) |
|
logger("update xml",logging.INFO,queryStr) |
|
self.ZSQLSimpleSearch(queryStr) |
|
ret+="ud: %s \n"%field |
|
else: |
|
|
|
|
|
fields=",".join(fieldNames) |
|
values=",".join([""" %s """%self.ZSQLQuote(x) for x in dataSet]) |
|
|
|
|
|
queryStr="""INSERT INTO %s (%s) VALUES (%s)"""%(table,fields,values) |
|
self.ZSQLSimpleSearch(queryStr) |
|
logger("update xml",logging.INFO,queryStr) |
|
ret+="ad: %s \n"%field |
|
|
|
elif node[1].nodeName=="METADATA": |
|
fieldNames=[] |
|
doc.expandNode(node[1]) |
|
|
|
names=node[1].getElementsByTagName('FIELD') |
|
|
|
for name in names: |
|
fieldNames.append(name.getAttribute('NAME')) |
|
|
|
logger("update xml: fieldnames",logging.INFO,repr(fieldNames)) |
|
qstr="""select attname from pg_attribute, pg_class where attrelid = pg_class.oid and relname = '%s' """ |
|
columns=[x.attname for x in self.ZSQLSimpleSearch(qstr%table)] |
|
|
|
for fieldName in fieldNames: |
|
logger("update xml: fieldname",logging.INFO,repr(fieldName)) |
|
if fieldName not in columns: |
|
qstr="""alter table %s add %s %s""" |
|
self.ZSQLSimpleSearch(qstr%(table,fieldName,'text')) |
|
logger("update xml: fieldname add",logging.INFO,qstr%(table,fieldName,'text')) |
|
#fn=node[1].getAttribute("xml:id") |
|
#nf=file("xtf/"+fn+".xtf",'w') |
|
#nf.write("""<texts xmlns="http://emegir.info/xtf" xmlns:lem="http://emegir.info/lemma" >"""+node[1].toxml()+"</texts>") |
|
#print "wrote: %s"%fn |
|
|
|
|
|
def importXMLFileFMP(self,table,dsn=None,uploadfile=None,update_fields=None,id_field=None,sync_mode=False,replace=False,redirect_url=None,ascii_db=False,RESPONSE=None): |
|
''' |
''' |
Import FileMaker XML file (FMPXMLRESULT format) into the table. |
Import FileMaker XML file (FMPXMLRESULT format) into the table. |
@param dsn: database connection string |
@param dsn: database connection string |
Line 428 class ZSQLExtendFolder(Folder,Persistent
|
Line 369 class ZSQLExtendFolder(Folder,Persistent
|
@param update_fields: (optional) list of fields to update; default is to create all fields |
@param update_fields: (optional) list of fields to update; default is to create all fields |
@param id_field: (optional) field which uniquely identifies an entry for updating purposes. |
@param id_field: (optional) field which uniquely identifies an entry for updating purposes. |
@param sync_mode: (optional) really synchronise, i.e. delete entries not in XML file |
@param sync_mode: (optional) really synchronise, i.e. delete entries not in XML file |
|
@param lc_names: (optional) lower case and clean up field names from XML |
|
@param keep_fields: (optional) don't add fields to SQL database |
|
@param ascii_db: (optional) assume ascii encoding in db |
|
@param replace: (optional) delete and re-insert data |
|
@param backup: (optional) create backup of old table (breaks indices) |
@param RESPONSE: (optional) |
@param RESPONSE: (optional) |
@param redirect_url: (optional) url for redirecting after the upload is done |
@param redirect_url: (optional) url for redirecting after the upload is done |
''' |
''' |
|
|
tfilehd,filename=tempfile.mkstemp() |
tfilehd,filename=tempfile.mkstemp() |
tfile=os.fdopen(tfilehd,'w') |
tfile=os.fdopen(tfilehd,'w') |
logging.error("import %s"%uploadfile) |
logging.info("import %s"%uploadfile) |
for c in uploadfile.read(): |
for c in uploadfile.read(): |
tfile.write(c) |
tfile.write(c) |
tfile.close() |
tfile.close() |
Line 451 class ZSQLExtendFolder(Folder,Persistent
|
Line 397 class ZSQLExtendFolder(Folder,Persistent
|
options.update_fields=update_fields |
options.update_fields=update_fields |
options.id_field=id_field |
options.id_field=id_field |
options.sync_mode=sync_mode |
options.sync_mode=sync_mode |
|
options.lc_names=lc_names |
options.replace_table=replace |
options.replace_table=replace |
options.lc_names=True |
options.keep_fields=keep_fields |
options.ascii_db=ascii_db |
options.ascii_db=ascii_db |
|
options.replace_table=replace |
|
options.backup_table=backup |
|
options.debug=debug |
|
|
|
if RESPONSE and log_to_response: |
|
# set up logging to response as plain text |
|
RESPONSE.setHeader("Content-Type","text/plain; charset=utf-8") |
|
RESPONSE.write("Import FMPXML file...\n\n") |
|
loghandler = logging.StreamHandler(RESPONSE) |
|
if debug: |
|
loghandler.setLevel(logging.DEBUG) |
|
else: |
|
loghandler.setLevel(logging.INFO) |
|
logger = logging.getLogger('db.import.fmpxml') |
|
logger.addHandler(loghandler) |
|
options.use_logger_instance = logger |
|
|
importFMPXML(options) |
importFMPXML(options) |
|
|
os.remove(filename) |
os.remove(filename) |
|
|
|
if RESPONSE and log_to_response: |
|
loghandler.flush() |
|
RESPONSE.write("\n\n DONE!") |
|
return |
|
|
if RESPONSE and redirect_url: |
if RESPONSE and redirect_url: |
RESPONSE.redirect(redirect_url) |
RESPONSE.redirect(redirect_url) |
|
|
|
|
def generateIndex(self,field,index_name,table,RESPONSE=None): |
def generateIndex(self,field,index_name,table,RESPONSE=None): |
"""erzeuge ein Index Objekt einem Feld (experimental) |
"""erzeuge ein Index Objekt einem Feld (experimental) |
@param field: Feldname zu dem ein Index erzeugt werden soll |
@param field: Feldname zu dem ein Index erzeugt werden soll |
Line 504 class ZSQLExtendFolder(Folder,Persistent
|
Line 474 class ZSQLExtendFolder(Folder,Persistent
|
""" |
""" |
return urllib.quote(txt) |
return urllib.quote(txt) |
|
|
|
|
|
def createIdSet(self, resultset, idField=None): |
|
"""returns a (frozen)set of IDs from a SQL-resultset (using idField) or a list (if idField=None)""" |
|
if idField is None: |
|
return frozenset(resultset) |
|
else: |
|
idlist = [r[idField] for r in resultset] |
|
return frozenset(idlist) |
|
|
|
def opIdSet(self, a, b, op): |
|
"""operate on sets a and b""" |
|
if (op == 'intersect'): |
|
return a.intersection(b) |
|
elif (op == 'union'): |
|
return a.union(b) |
|
elif (op == 'diff'): |
|
return a.difference(b) |
|
|
|
|
def searchRel(self,relStatement,statement,wherePart,classes): |
def searchRel(self,relStatement,statement,wherePart,classes): |
"""suche relative haufigkeiten (experimental)""" |
"""suche relative haufigkeiten (experimental)""" |
ret={} |
ret={} |
Line 558 class ZSQLExtendFolder(Folder,Persistent
|
Line 547 class ZSQLExtendFolder(Folder,Persistent
|
return pt() |
return pt() |
|
|
|
|
def changeZSQLExtend(self,label,description,weight=0,REQUEST=None,connection_id=None): |
def changeZSQLExtend(self,label,description,weight=0,connection_id=None,REQUEST=None,): |
"""change the Konfiguration""" |
"""change the Konfiguration""" |
self.connection_id=connection_id |
self.connection_id=connection_id |
self.weight=weight |
self.weight=weight |
Line 573 class ZSQLExtendFolder(Folder,Persistent
|
Line 562 class ZSQLExtendFolder(Folder,Persistent
|
@param str: string der Formatiert werden soll. |
@param str: string der Formatiert werden soll. |
@param url: (optional) default ist "None", sonderfall erzeugt einen Link aus String mit unterliegender url |
@param url: (optional) default ist "None", sonderfall erzeugt einen Link aus String mit unterliegender url |
""" |
""" |
#url=None |
#logging.debug("formatascii str=%s url=%s"%(repr(str),repr(url))) |
|
|
|
if not str: |
|
return "" |
|
|
str=str.rstrip().lstrip() |
str=str.rstrip().lstrip() |
|
|
if url and str: |
if url and str: |
Line 587 class ZSQLExtendFolder(Folder,Persistent
|
Line 580 class ZSQLExtendFolder(Folder,Persistent
|
retStr+="""<a href="%s">%s</a><br/>"""%(strUrl,word) |
retStr+="""<a href="%s">%s</a><br/>"""%(strUrl,word) |
str=retStr |
str=retStr |
if str: |
if str: |
return re.sub(r"[\n]","<br/>",str) |
retStr = re.sub(r"[\n]","<br/>",str) |
|
#logging.debug("formatascii out=%s"%(repr(retStr))) |
|
return retStr |
else: |
else: |
return "" |
return "" |
|
|
Line 960 class ZSQLExtendFolder(Folder,Persistent
|
Line 955 class ZSQLExtendFolder(Folder,Persistent
|
if (hasattr(self,"_v_searchSQL") and (self._v_searchSQL == None)) or (not hasattr(self,"_v_searchSQL")): |
if (hasattr(self,"_v_searchSQL") and (self._v_searchSQL == None)) or (not hasattr(self,"_v_searchSQL")): |
|
|
self._v_searchSQL=Shared.DC.ZRDB.DA.DA("_v_searchSQL","_v_searchSQL",self.getConnectionObj().getId(),"var","<dtml-var var>") |
self._v_searchSQL=Shared.DC.ZRDB.DA.DA("_v_searchSQL","_v_searchSQL",self.getConnectionObj().getId(),"var","<dtml-var var>") |
|
#self._v_searchSQL=self.getConnectionObj()() |
|
|
self._v_searchSQL.max_rows_=max_rows |
self._v_searchSQL.max_rows_=max_rows |
|
#self._v_searchSQL.set_client_encoding('UNICODE') |
try: |
try: |
logging.error("I am here") |
logging.error("I am here") |
t=self._v_searchSQL.__call__(var=query) |
t=self._v_searchSQL.__call__(var=query) |
|
#t=self._v_searchSQL.query(query) |
logging.error("I am here %s"%t) |
logging.error("I am here %s"%t) |
return t |
return t |
except : |
except : |
Line 978 class ZSQLExtendFolder(Folder,Persistent
|
Line 976 class ZSQLExtendFolder(Folder,Persistent
|
try: |
try: |
|
|
self._v_searchSQL.max_rows_=max_rows |
self._v_searchSQL.max_rows_=max_rows |
|
#self._v_searchSQL.set_client_encoding('UNICODE') |
|
|
return self._v_searchSQL.__call__(var=query) |
return self._v_searchSQL.__call__(var=query) |
|
#return self._v_searchSQL.query(query) |
except : |
except : |
logger("ZSQLSimpleSearch ERROR2",logging.ERROR, '%s %s'%sys.exc_info()[:2]) |
logger("ZSQLSimpleSearch ERROR2",logging.ERROR, '%s %s'%sys.exc_info()[:2]) |
if sys.exc_info()[0]=="Database Error": |
if sys.exc_info()[0]=="Database Error": |