Annotation of OSAS/OSA_system/OSAS_show.py, revision 1.27
1.15 dwinter 1: """ Classes for displaying, browsing and organizing the archive
2: 20040303 Needs configuration for rescaling thumbs
1.1 dwinter 3:
1.15 dwinter 4: """
1.4 dwinter 5:
1.2 dwinter 6: import addFolder
1.20 dwinter 7: import OSAS_helpers
1.1 dwinter 8: from OFS.Folder import Folder
9: from OFS.SimpleItem import SimpleItem
10: from Products.PageTemplates.PageTemplateFile import PageTemplateFile
11: from Products.PageTemplates.PageTemplate import PageTemplate
1.8 dwinter 12: from AccessControl import ClassSecurityInfo
13: from Globals import InitializeClass
1.1 dwinter 14:
15: class OSAS_ShowOnline(SimpleItem):
1.8 dwinter 16: security=ClassSecurityInfo()
17:
1.1 dwinter 18: """OnlineBrowser"""
19: def __init__(self,id):
20: """initialize a new instance"""
21: self.id = id
22:
23: meta_type="OSAS_ShowOnline"
1.8 dwinter 24:
25: security.declareProtected('View','index_html')
1.1 dwinter 26: def index_html(self):
27: """main view"""
1.15 dwinter 28: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_ViewFiles.zpt').__of__(self)
1.1 dwinter 29: return pt()
30:
31: def getfilesystem2(self,start,reload=0):
32: """load filesystem"""
33:
34: k=filesystem2(start,1)
35: return k
36:
37: def tree(self,start):
38: """get the filetree"""
39: k=browse(start)
40: return k
41:
42: def path_to_link_view(self,path):
43: """generates navigation bar for viewfiles"""
44: return path_to_link_view(self.REQUEST['URL'],path)
45:
46: def isdigilib2(self,path):
47: """check if digilib"""
48: return isdigilib2(path)
49:
50: def changeName(self,name):
51: return changeName(name)
52:
53: def hasMetafile(self,path):
54: return hasMetafile(path)
55:
56: def getMetafile(self,path):
57: return getMetafile(path)
58:
59: def toggle_view(self,path,file):
60: """Oeffnen bzw. schließen der Subfolders"""
61: self.tree(path).toggle(path,file)
62: return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1']+"?path="+path)
1.8 dwinter 63:
1.20 dwinter 64:
1.8 dwinter 65: InitializeClass(OSAS_ShowOnline)
66:
1.1 dwinter 67: def manage_AddOSAS_ShowOnlineForm(self):
68: """interface for adding the OSAS_root"""
1.15 dwinter 69: pt=PageTemplateFile('Products/OSA_system/zpt/AddOSAS_ShowOnline.zpt').__of__(self)
1.1 dwinter 70: return pt()
71:
72: def manage_AddOSAS_ShowOnline(self,id,RESPONSE=None):
73: """add the OSAS_root"""
74: newObj=OSAS_ShowOnline(id)
75: self._setObject(id,newObj)
76: if RESPONSE is not None:
77: RESPONSE.redirect('manage_main')
78:
79:
1.2 dwinter 80: class OSAS_StoreOnline(SimpleItem):
81: """Webfrontend für das Storagesystem"""
1.8 dwinter 82: security=ClassSecurityInfo()
83:
1.2 dwinter 84: def __init__(self,id):
85: """initialize a new instance"""
86: self.id = id
87:
88: meta_type="OSAS_StoreOnline"
89:
1.8 dwinter 90: security.declareProtected('View','index_html')
1.2 dwinter 91: def index_html(self):
92: """main view"""
1.15 dwinter 93: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_StoreFiles.zpt').__of__(self)
1.2 dwinter 94: return pt()
95:
1.4 dwinter 96: def readContexts(self,path):
97: """Zeige Contexte"""
1.6 dwinter 98: if os.path.exists(path+"/index.meta"):
1.7 dwinter 99:
1.6 dwinter 100: return readContexts(path)
1.7 dwinter 101:
1.6 dwinter 102: else:
1.7 dwinter 103:
1.6 dwinter 104: return []
1.9 dwinter 105:
106: def rescaleThumbs(self,path):
1.14 dwinter 107: """rescale thumbs of images in path"""
1.9 dwinter 108:
1.25 casties 109: dlpath = re.sub('/mpiwg/online/','',self.REQUEST['path'])
1.26 casties 110: os.popen("ssh archive@nausikaa2.rz-berlin.mpg.de /usr/local/mpiwg/scripts/scaleomat -src=/mpiwg/online -dest=/mpiwg/temp/online/scaled/thumb -dir=%s -scaleto=90 -replace >> /tmp/sc.out &"%dlpath )
1.25 casties 111: #os.popen("ssh archive@nausikaa2.rz-berlin.mpg.de /usr/local/mpiwg/scripts/scaleomat.pl %s /mpiwg/temp/online/scaled/thumb 90 --replace >> /tmp/sc.out &"% re.sub('/mpiwg/online/','',self.REQUEST['path']))
1.10 dwinter 112:
113: self.REQUEST.SESSION['path']=self.REQUEST['path']
114: #return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1'])
1.15 dwinter 115: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_scaled.zpt').__of__(self)
1.10 dwinter 116: return pt()
117:
118:
1.2 dwinter 119: def getfilesystem2(self,start,reload=0):
120: """load filesystem"""
121:
122: k=filesystem2(start,1)
123: return k
124:
125: def tree(self,start):
126: """get the filetree"""
127: k=browse(start)
128: return k
129:
130: def path_to_link_store(self,path):
131: """generates navigation bar for viewfiles"""
132: return path_to_link_store(self.REQUEST['URL'],path)
133:
134: def isdigilib2(self,path):
135: """check if digilib"""
136: return isdigilib2(path)
137:
138: def changeName(self,name):
139: return changeName(name)
140:
141: def hasMetafile(self,path):
142: return hasMetafile(path)
143:
144: def getMetafile(self,path):
145: return getMetafile(path)
146:
147: def toggle_view(self,path,file):
148: """Oeffnen bzw. schließen der Subfolders"""
149: self.tree(path).toggle(path,file)
150: return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1']+"?path="+path)
151:
152: def isFolder(self,path):
153: """Test ob Eintrag ein Folder ist"""
154: return isFolder(self,path)
155:
156: def isScannedDocument(self,path):
1.14 dwinter 157: """Test ob Eintrag ein Scanned Document ist"""
1.2 dwinter 158: return isScannedDocument(self,path)
159:
160: def isFullText(self,path,folder_name):
161: """Test ob Eintrag ein Folder ist"""
162: return isFullText(path,folder_name)
163:
1.24 dwinter 164: def isPdf(self,path,folder_name):
165: """Test ob Eintrag ein Folder mit pdf2 ist"""
166: return isPdf(path,folder_name)
167:
168:
1.17 dwinter 169: def isPresentation(self,path,folder_name):
170: """Test ob Eintrag ein Folder ist"""
171: return isPresentation(path,folder_name)
172:
1.14 dwinter 173: def date(self):
174: return strftime("%d.%m.%Y",localtime())
175:
1.2 dwinter 176: def addFolderForm(self,path):
177: """add a new path"""
1.15 dwinter 178: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_addFolder.zpt').__of__(self)
1.2 dwinter 179: return pt()
180:
1.18 dwinter 181: def showHelp(self,refType,genericTag):
182: """helptext"""
183: for reference in self.ZopeFind(self.standardMD):
184: if reference[1].title==refType:
185: text=getattr(reference[1],'md_'+genericTag)[2]
186: return text
187: return "NO EXPLANATION"
188:
189: def showHelpTag(self,url,reftype,item):
190: """gen javascript for showhelp"""
191: url2=url+'/showHelp?refType=%s&genericTag=%s'%(reftype,item)
192: ret="""javascript:wd=window.open(\'%s\',\'Help\',\'width=300,height=250\');void(\'\');wd.focus();"""%url2
193: return ret
194:
1.2 dwinter 195: def addFolder(self,path,folder_name,description,archive_creation_date,creator):
196: """add the folder to the filesystem and write the metadata files"""
197: return addFolder.addFolder(self,path,folder_name,description,archive_creation_date,creator)
198:
199: def EditIndex(self,path):
200: """Editiere das Index Metafile"""
201: try:
202: dom=xml.dom.minidom.parse(path+"/index.meta")
1.27 ! dwinter 203: indexmeta=dom.toxml(encoding='UTF-8')
1.2 dwinter 204: except:
205: indexmeta=""
1.14 dwinter 206:
1.2 dwinter 207: self.REQUEST.SESSION['indexmeta']=indexmeta
208: self.REQUEST.SESSION['path']=path
1.15 dwinter 209: newtemplate=PageTemplateFile('Products/OSA_system/zpt/editindex').__of__(self)
1.2 dwinter 210: return newtemplate()
211:
212: def EditIndex2(self):
213: """Sichern der Aenderungen in Index.meta"""
214: if not self.REQUEST.has_key('fileupload'):
215: #newtext=urllib.unquote(self.REQUEST['indexmeta'])
216: newtext=self.REQUEST['indexmeta']
1.3 dwinter 217:
1.2 dwinter 218: else:
219: self.file_name=self.REQUEST['fileupload'].filename
220: #newtext=self.REQUEST.form['fileupload'].read()
221: # HACK DW
222: newtext=self.REQUEST['indexmeta']
223:
224: indexmeta=file(self.REQUEST.SESSION['path']+"/index.meta","w")
225: indexmeta.writelines(newtext)
226: return self.REQUEST.response.redirect(self.REQUEST['URL1']+"?path="+self.REQUEST.SESSION['path'])
227:
1.3 dwinter 228: def add_metafile(self):
229: """nothing"""
1.15 dwinter 230: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_addmetadata.zpt').__of__(self)
1.3 dwinter 231: return pt()
1.4 dwinter 232:
1.20 dwinter 233: def getTextToolsField(self,path,name,default=''):
234: """Lese Textoolsfelder aus index.meta im path aus"""
235:
236: try:
237: dom=xml.dom.minidom.parse(path+"/index.meta")
238: node=dom.getElementsByTagName('texttool')[0] #getNode
239: subnode=node.getElementsByTagName(name)[0]
1.22 dwinter 240:
241: # bei text wird nur der Folder gebraucht
242: if name=="text":
243: splitted=getText(subnode.childNodes).split("/")
244: return splitted[len(splitted)-2]
245: else:
246: return getText(subnode.childNodes)
1.20 dwinter 247: except:
248: return default
249:
1.23 dwinter 250: def getViewerTemplateSets(self,obj_ids=None):
251: """Get the ViewerTemplateSet title for configuration"""
1.20 dwinter 252: ret=[]
253:
254: try:
1.23 dwinter 255: viewerTemplateSets=self.ZopeFind(self.viewerTemplateSets,obj_metatypes=['OSAS_viewerTemplateSet'],obj_ids=obj_ids)#assumes viewerTemplateSets folder somewhere in the hierarchie.
1.20 dwinter 256:
1.23 dwinter 257: for viewerTemplateSet in viewerTemplateSets:
258: ret.append((viewerTemplateSet[1].title,viewerTemplateSet[0],viewerTemplateSet[1]))
1.20 dwinter 259:
260: return ret
261:
262: except:
1.23 dwinter 263: return [('no ViewerTemplateSetfolders','')]
1.20 dwinter 264:
265: def changeTemplatesForm(self,path):
266: """changeform"""
267: path=self.REQUEST.SESSION['path']=path
268: pt=PageTemplateFile('Products/OSA_system/zpt/changeTemplatesForm.zpt').__of__(self)
269: return pt()
270:
271:
272:
1.21 dwinter 273: def changeTemplates(self,path,RESPONSE=None):
1.20 dwinter 274: """Andere alle Eintraege in index.meta"""
1.21 dwinter 275: os.path.walk(path,OSAS_helpers.changeIndexMeta,self.REQUEST.form)
276: if RESPONSE is not None:
277: RESPONSE.redirect(self.REQUEST['URL1']+"?path="+self.REQUEST.SESSION['path'])
1.20 dwinter 278:
279:
1.8 dwinter 280: InitializeClass(OSAS_StoreOnline)
1.2 dwinter 281:
282: def manage_AddOSAS_StoreOnlineForm(self):
283: """interface for adding the OSAS_root"""
1.15 dwinter 284: pt=PageTemplateFile('Products/OSA_system/zpt/AddOSAS_StoreOnline.zpt').__of__(self)
1.2 dwinter 285: return pt()
286:
287: def manage_AddOSAS_StoreOnline(self,id,RESPONSE=None):
288: """add the OSAS_root"""
289: newObj=OSAS_StoreOnline(id)
290: self._setObject(id,newObj)
291: if RESPONSE is not None:
292: RESPONSE.redirect('manage_main')
293:
1.1 dwinter 294:
295:
296:
1.4 dwinter 297: def readContexts(path):
298: """ReadContext from index.meta"""
299: dom=xml.dom.minidom.parse(path+"/index.meta")
300: nodes=dom.getElementsByTagName('context')
301: ret=[]
302:
1.7 dwinter 303:
1.4 dwinter 304: for node in nodes:
1.7 dwinter 305: try:
306: link=getText(node.getElementsByTagName('link')[0].childNodes)
307: name=getText(node.getElementsByTagName('name')[0].childNodes)
308: ret.append((link,name))
309: except:
310: """nothing"""
1.4 dwinter 311: return ret
312:
313:
1.1 dwinter 314: ### Ab hier Baustelle
315:
316:
317: from types import *
318: import urllib
319: import os
320: import sys
321: import re
322: from AccessControl import ClassSecurityInfo
323: from AccessControl.Role import RoleManager
324: from Acquisition import Implicit
325: from Globals import Persistent
1.16 dwinter 326: try:
327: from time import strptime
328: except:
329: print "ignoring time.strptime import"
330:
1.1 dwinter 331: from time import strftime
332: import time
333: import os.path
334: import dircache
335: import xml.dom.minidom
336: from Products.PageTemplates.PageTemplateFile import PageTemplateFile
337: from Products.PageTemplates.PageTemplate import PageTemplate
338: import tempfile
339: tempfile.tempdir="/var/tmp/archiver"
340:
341: exclusion=[".HSResource","lost+found","Network Trash Folder","TheFindByContentFolder","TheVolumeSettingsFolder"]
342: class fsentry(Implicit, Persistent, RoleManager):
343: """File entry class"""
344: path = ""
345: user = ""
346: month = ""
347: date =""
348: time = ""
349:
350: security=ClassSecurityInfo()
351: def __init__(self,extpath):
352: """initialize class"""
353: extpath=os.path.abspath(re.search(r"(.*)\n",extpath).group(1))
354: self.all=extpath
355: self.path=extpath
356: self.user=""
357: self.mtime=os.path.getmtime(extpath)
358:
359:
360: security.declarePublic('getPath')
361: def getPath(self):
362: """Ausgabe von path"""
363: return self.path
364:
365: security.declarePublic('getUser')
366: def getUser(self):
367: """Ausgabe von user"""
368: return self.user
369:
370: security.declarePublic('getDate')
371: def getDate(self):
372: """Ausgabe von Date"""
373: return strftime("%Y%m%d%H%M",time.gmtime(self.mtime))
374:
375: security.declarePublic('getDate')
376: def getID(self):
377: """Ausgabe einer eindeutigen Sortierbaren ID"""
378: return self.getDate()+self.getPath()
379:
380: security.declarePublic('getTime')
381: def getTime(self):
382: """Ausgabe von path"""
383: return self.time
384: security.declarePublic('getAll')
385: def getAll(self):
386: """Ausgabe von path"""
387: return self.all
388:
389: class filesystem(Implicit, Persistent, RoleManager):
390: """store filesystem"""
391: node={}
392: hasindex={}
393: security=ClassSecurityInfo()
394:
395: def getfs(self,start):
396: """load filessystem"""
397: f = os.popen("find "+ start+" -name '*' ","r")
398: lines = f.readlines()
399:
400: return lines
401:
402: def loadfs(self,start):
403: """analyse filesystem"""
404: for line in self.getfs(start):
405:
406: g=re.search(r"(.*/)(.*)\n",line)
407: if not g==None:
408: path=g.group(1)
409: file=g.group(2)
410: if self.node.has_key(path):
411: elements=self.node[path]
412: elements.append(file)
413: self.node[path]=elements
414: else:
415: self.node[path]=[file]
416: if (file=="index.meta") | (file=="meta"):
417: self.hasindex[path]="1"
418:
419: def __init__(self,start,reload=0):
420: if reload==1:
421: self.node={}
422: self.hasindex={}
423: self.loadfs(start)
424:
425:
426: security.declarePublic('getNode')
427: def getNode(self):
428: return self.node
429:
430: security.declarePublic('getKeys')
431: def getKeys(self):
432: return self.node.keys()
433:
434: security.declarePublic('clearnode')
435: def clearnode(self):
436: self.node={}
437: return 0
438:
439: security.declarePublic('hasIndex')
440: def hasIndex(self,path):
441:
442: return self.hasindex.has_key(path)
443:
444:
445: def onlyIndex_old(self):
446: """return only files with archive material"""
447: j={}
448: for k in self.node:
449: if self.hasindex.has_key(k):
450: if len(self.node[k])>1:
451: if (len(self.node[k])==2) & ('meta' not in self.node[k]):
452: j[k]=self.node[k]
453: elif (len(self.node[k])==2) & ('meta' in self.node[k]):
454: """ nothing """
455: else:
456: j[k]=self.node[k]
457: return j
458:
459: def archive_the_path(self,path):
460: """parse indexmeta and return digilib path"""
1.13 dwinter 461:
1.1 dwinter 462: try:
463: #f = os.popen("cat "+path+"/index.meta","r")
464: f =file(path+"/index.meta","r")
465:
466: lines = f.read()
1.13 dwinter 467:
1.1 dwinter 468: try:
469: dom = xml.dom.minidom.parseString(lines)
1.13 dwinter 470: if dom.getElementsByTagName("content-type"):
471: if getText(dom.getElementsByTagName("content-type")[0].childNodes)=="folder":
472: """folder nicht archivieren"""
473: return 0
474:
475: archive_storage_date=getText(dom.getElementsByTagName("archive-storage-date")[0].childNodes)
476:
477: if archive_storage_date=="":
478:
479: """leer also archivieren"""
480: return 1
1.1 dwinter 481: else:
1.13 dwinter 482: """nicht archivieren"""
483: return 0
1.1 dwinter 484: except:
485: """kein tag also archivieren"""
486: return 1
487: except:
488: """kein index.meta also nicht archivieren"""
489: return 0
490:
491: security.declarePublic('onlyIndex')
492: def onlyIndex(self):
493: """return only files with archive material (archive-storage-date not set)"""
494: j={}
495:
496: for k in self.node:
497: if self.archive_the_path(k):
498: j[k]=self.node[k]
499: return j
500: security.declarePublic('getImageDirs')
501: def getImageDirs(self,dom,path):
502: dirs=dom.getElementsByTagName("dir")
503: dirback=[]
504: for dir in dirs:
505: temp=getText(dir.getElementsByTagName("name")[0].childNodes)
506: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path+"/"+temp)
507: if not temp2==None:
508: try:
509: dirback.append(temp2.group(2))
510: except:
511: """nothing"""
512: else:
513: dirback.append(temp)
514: return dirback
515:
516:
517:
518:
519: security.declarePublic('digilib')
520: def digilib(self, path):
521: """check if folder is a container for digilib files"""
522: if self.hasindex.has_key(path+"/"):
523: return(self.parseIndexMeta(path))
524: else:
525: return "NO"
526:
527:
528:
529:
530: security.declarePublic('isdigilib')
531: def isdigilib(self, path):
532: """return number of possible image directories usefull for digilib"""
533: if self.hasindex.has_key(path+"/"):
534: return(len(self.parseIndexMeta(path)))
535: else:
536: return 0
537:
538: security.declarePublic('parseIndexMeta')
539: def parseIndexMeta(self,k):
540: """parse indexmeta and return digilib path"""
541: f = os.popen("cat "+k+"/index.meta","r")
542: lines = f.read()
543:
544: try:
545: dom = xml.dom.minidom.parseString(lines)
546: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
547: if (content_type=="scanned-document") or (content_type=="scanned document"):
548: dirs=self.getImageDirs(dom,k)
549:
550: return dirs
551: except:
552: return []
553:
554:
555: class filesystem2(Implicit, Persistent, RoleManager):
556: """store filesystem"""
557: node={}
558: hasindex={}
559: security=ClassSecurityInfo()
560:
561: def getfs(self,start):
562: """load filessystem"""
1.13 dwinter 563:
1.1 dwinter 564: f = os.popen("find "+ start+" -name '*' ","r")
565: lines = f.readlines()
566:
567: return lines
568:
569: def loadfs(self,start):
570: """analyse filesystem"""
571: for line in self.getfs(start):
572:
573: g=re.search(r"(.*/)(.*)\n",line)
574: if not g==None:
575: try:
576: path=g.group(1)
577: file=g.group(2)
578: except:
579: """nothing"""
580: if self.node.has_key(path):
581: elements=self.node[path]
582: elements.append(file)
583: self.node[path]=elements
584: else:
585: self.node[path]=[file]
586: if (file=="index.meta") | (file=="meta"):
587: self.hasindex[path]="1"
588:
589: def __init__(self,start,reload=0):
590: """nothing"""
591:
592:
593: security.declarePublic('getImageDirs')
594: def getImageDirs(self,dom,path):
595: dirs=dom.getElementsByTagName("dir")
596: dirback=[]
597: for dir in dirs:
598: temp=getText(dir.getElementsByTagName("name")[0].childNodes)
599: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path+"/"+temp)
600: if not temp2==None:
601: try:
602: dirback.append(temp2.group(2))
603: except:
604: """nothing"""
605: else:
606: dirback.append(temp)
607: return dirback
608:
609:
610: security.declarePublic('digilib')
611: def digilib(self, path):
612: """check if folder is a container for digilib files"""
613: if os.path.exists(path+"/index.meta"):
614: return(self.parseIndexMeta(path))
615: else:
616: return "NO"
617:
618: security.declarePublic('isdigilib')
619: def isdigilib(self, path):
620: if os.path.exists(path+"/index.meta"):
621: return(len(self.parseIndexMeta(path)))
622: else:
623: return 0
624: security.declarePublic('parseIndexMeta')
625: def parseIndexMeta(self,k):
626: """parse indexmeta and return digilib path"""
627: f = os.popen("cat "+k+"/index.meta","r")
628: lines = f.read()
629:
630: try:
631: dom = xml.dom.minidom.parseString(lines)
632: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
633: if content_type=="scanned-document":
634: dirs=self.getImageDirs(dom,k)
635:
636: return dirs
637: except:
638: return []
639:
640:
641: class browse(Implicit, Persistent, RoleManager):
642:
643: security=ClassSecurityInfo()
644: tree={}
645: toggledict={}
646:
647: def filterExcluded(self,dir):
648: ret=[]
649: for item in dir:
650: if not item in exclusion:
651: ret.append(item)
652: return ret
653:
654: def __init__(self,startpath):
655: self.tree={}
656: self.tree[startpath]=self.filterExcluded(dircache.listdir(startpath))
657:
658: security.declarePublic('getTree')
659: def getTree(self,path):
660: if self.tree.has_key(path):
661: return self.tree[path]
662: else:
663: self.tree[path]=self.filterExcluded(dircache.listdir(path))
664: return self.tree[path]
665:
666: security.declarePublic('isDirectory')
667: def isDirectory(self,path,file):
668: return os.path.isdir(os.path.abspath(path+"/"+file))
669:
670: security.declarePublic('toggle')
671: def toggle(self,tmppath,file):
672: path=tmppath+"/"+file
673:
674: if self.toggledict.has_key(path):
675: if self.toggledict[path]==0:
676: self.toggledict[path]=1
677:
678: else:
679: self.toggledict[path]=0
680:
681: else:
682: self.toggledict[path]=4
683:
684:
685: security.declarePublic('isToggle')
686: def isToggle(self,tmppath,file):
687: path=tmppath+"/"+file
688:
689: if self.toggledict.has_key(path):
690:
691: return self.toggledict[path]
692: else:
693:
694: return 0
695:
696:
697: def getfs(start):
698: """return filesystem"""
1.13 dwinter 699:
700:
1.1 dwinter 701: f = os.popen("find "+ start+" -name '*'","r")
702: lines = f.readlines()
703: return lines
704:
705: def showall(start):
706: lines = getfs(start)
707: for line in lines:
708: print line
709: return 0
710:
711: def entries(start):
712: """retrun list of entries of a filesystem"""
713: i=0
714: fs=[]
715: lines=getfs(start)
716: for line in lines:
717: try:
718: if os.path.exists(os.path.abspath(re.search(r"(.*)\n",line).group(1))):
719: fs.append(fsentry(line))
720: i=i+1
721: except:
722: """nothing"""
723: return fs
724:
725: def getfilesystem(start,reload=0):
726: """load filesystem"""
727:
728: k=filesystem(start,1)
729: return k
730:
731:
732:
733: def sort_by_date(fs):
734: """sorts lists of fileentries"""
735: ls=[]
736: dict={}
737: for k in fs:
738: ls.append(k.getID())
739: dict[k.getID()]=k
740: ls.sort()
741: ls.reverse()
742: ret=[]
743: for j in ls:
744: ret.append(dict[j])
745: return ret
746:
747: def path_to_link(path):
748: """generates navigation bar for showfiles"""
749: string=""
750:
751: tmppath=os.path.dirname(path)
752: i=0
753: pathes=[[path, os.path.basename(path)]]
754:
755: while not (len(tmppath)==1):
756:
757: i=i+1
758: if i>20: break
759:
760: pathes.append([tmppath, os.path.basename(tmppath)])
761: tmppath=os.path.dirname(tmppath)
762:
763: while i>=0:
764: string=string+"<a href=showfiles?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
765:
766: i=i-1
767: return string
768:
769: def path_to_link_view(URL,path):
770: """generates navigation bar for viewfiles"""
771: string=""
772:
773: tmppath=os.path.dirname(path)
774: i=0
775: pathes=[[path, os.path.basename(path)]]
776:
777: while not (len(tmppath)==1):
778:
779: i=i+1
780: if i>20: break
781:
782: pathes.append([tmppath, os.path.basename(tmppath)])
783: tmppath=os.path.dirname(tmppath)
784:
785: while i>=0:
786: string=string+"<a href="+URL+"?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
787:
788: i=i-1
789: return string
790:
1.2 dwinter 791: def path_to_link_store(URL,path):
1.1 dwinter 792: """generates navigation bar for viewfiles"""
793: string=""
794:
795: tmppath=os.path.dirname(path)
796: i=0
797: pathes=[[path, os.path.basename(path)]]
798:
799: while not (len(tmppath)==1):
800:
801: i=i+1
802: if i>20: break
803:
804: pathes.append([tmppath, os.path.basename(tmppath)])
805: tmppath=os.path.dirname(tmppath)
806:
807: while i>=0:
1.2 dwinter 808: string=string+"<a href="+URL+"?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
1.1 dwinter 809:
810: i=i-1
811: return string
812:
813:
814: class Error(Implicit, Persistent, RoleManager):
815:
816: error=[]
817: security=ClassSecurityInfo()
818: def __init__(self,initerror):
819: self.error=initerror[0:]
820:
821: security.declarePublic('getError')
822: def getError(self):
823: return self.error
824:
825: class metacheck(Implicit, Persistent, RoleManager):
826: lines=[]
827: security=ClassSecurityInfo()
828: def parsearchive(self,str):
829: """parse for error"""
830: retstr=''
831:
832: if not len(str)==0:
833: for line in str:
834: retstr=retstr+line+"<br>"
835: check=re.search(r"(.*):(.*)",line)
836: if check.group(1)=='ABORT':
837: error="error"
838: elif check.group(1)=='DONE':
839: error="ok"
840: else:
841: error="running"
842:
843: return [retstr,error]
844: else:
845: return ['','running']
846: def __init__(self,path):
847: """archive the documents in path"""
848: self.lines=[]
849:
850: if type(path)==StringType:
851: f = os.popen("/usr/local/mpiwg/archive/metacheck "+path,"r")
852: self.lines.append(Error([path,self.parsearchive(f.readlines())]))
853: else:
854: for singlepath in path:
855: f = os.popen("/usr/local/mpiwg/archive/metacheck "+singlepath,"r")
856: self.lines.append(Error([singlepath,self.parsearchive(f.readlines())]))
857: security.declarePublic('messages')
858:
859: def messages(self):
860: return self.lines
861:
862:
863:
864:
865: class archive(Implicit, Persistent, RoleManager):
866: lines=[]
867: security=ClassSecurityInfo()
868: def parsearchive(self,str):
869: """parse for error"""
870: retstr=''
871:
872: if not len(str)==0:
873: for line in str:
874: retstr=retstr+line+"<br>"
875: check=re.search(r"(.*):(.*)",line)
876: if check.group(1)=='ABORT':
877: error="error"
878: elif check.group(1)=='DONE':
879: error="ok"
880: else:
881: error="running"
882:
883: return [retstr,error]
884: else:
885: return ['','running']
886:
887: def __init__(self,path,session):
888: """archive the documents in path"""
889: self.lines=[]
890: self.filenames={}
891: session['archiver']=self
892:
893:
894: if type(path)==StringType:
895: self.filenames[path]=tempfile.mktemp()
896: f = os.popen("/usr/local/mpiwg/archive/archiver "+path+" > "+self.filenames[path]+" &","r")
897: else:
898: for singlepath in path:
899: self.filenames[singlepath]=tempfile.mktemp()
900: f = os.popen("/usr/local/mpiwg/archive/archiver "+singlepath+" > "+self.filenames[singlepath]+" &","r")
901:
902: security.declarePublic('messages')
903: def messages(self):
904: self.lines=[]
905: for path in self.filenames.keys():
906:
907: self.lines.append(Error([path,self.parsearchive(open(self.filenames[path],"r").readlines())]))
908: return self.lines
909:
910:
911: def evalext(str):
912: return eval(str)
913:
914: def storeerror(ret,path,context,i):
915: session=context.REQUEST.SESSION
916: session['error%i'%i]=ret
917: session['path%i'%i]=path
918:
919: return 'error?number=%i'%i
920:
921: def geterror(str,context):
922: session=context.REQUEST.SESSION
923: return session[str]
924:
925: def readfile(path):
926:
927: ret=""
928: f=open(path,'r')
929: for g in f.readlines():
930: ret=ret+g
931: return ret
932:
933: def writefile(self,path,txt,REQUEST):
934: f=open(path,'w')
935: f.write(txt)
936: f.close()
937: rval=self.aq_acquire('archive2')
938: return rval()
939:
940:
941: def metachecker(self,path):
942: """check the metadata the documents in path"""
943: self.REQUEST.SESSION['path']=self.REQUEST['path']
944: return metacheck(path)
945:
946: def archiver(self,path):
947: """archive the documents in path"""
948: tmp=archive(path,self.REQUEST.SESSION)
949: return self.REQUEST.RESPONSE.redirect('archive4')
950:
951: def getText(nodelist):
952:
953: rc = ""
954: for node in nodelist:
955: if node.nodeType == node.TEXT_NODE:
956: rc = rc + node.data
957: return rc
958:
959: def getBib(nodelist):
960: rc= "<table border='0'>"
1.3 dwinter 961:
1.1 dwinter 962: for node in nodelist:
963:
964: if node.nodeType == node.ELEMENT_NODE:
965: """nothing"""
1.27 ! dwinter 966:
1.1 dwinter 967: rc = rc+"<tr><td valign='right'>"+str(node.nodeName)+":</td><td> "+getText(node.childNodes)+"</td></tr>"
1.27 ! dwinter 968:
1.1 dwinter 969: #print rc
970: return rc+"</table>"
971:
972: def getMetafile(path):
1.14 dwinter 973: """get index.meta and translate it to an HTML"""
1.1 dwinter 974: html=[]
975: if not os.path.exists(path+"/index.meta"):
976:
977: return "NO_METADATA"
978: else:
979: f = os.popen("cat "+path+"/index.meta","r")
980: lines = f.read()
981: dom = xml.dom.minidom.parseString(lines)
1.12 dwinter 982: try:
983: name=getText(dom.getElementsByTagName("name")[0].childNodes)
984: except:
985: name="NOT_DEFINED!!!"
986: try:
987: creator=getText(dom.getElementsByTagName("creator")[0].childNodes)
988: except:
989: creator="NOT_DEFINED!!!"
990:
991: try:
992: creation_date=getText(dom.getElementsByTagName("archive-creation-date")[0].childNodes)
993: except:
994: creation_date="NOT_DEFINED!!!"
995:
996: try:
997: description=getText(dom.getElementsByTagName("description")[0].childNodes)
998: except:
999: description="NOT_DEFINED!!!"
1000:
1.1 dwinter 1001: try:
1002: type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
1003: except:
1004: type=""
1005: if type=="scanned document":
1006: html="<h3>Document: "+name+"</h3>"
1007: elif type=="folder":
1008: html="<h3>Folder: "+name+"</h3>"
1009: else:
1010: html="<h3>Document: "+name+"</h3>"
1011:
1012: html=html+"<p><i>created by: "+creator+" at: "+creation_date+"</i></p>"
1013: html=html+"<h4>Description</h4><p>"+description+"</p>"
1014: try:
1015: bib = dom.getElementsByTagName("meta")[0].getElementsByTagName("bib")[0]
1016: if bib.attributes.has_key('type'):
1017: html=html+"<h4>Info ("+bib.attributes['type'].value+")</h4>"
1018: else:
1019: html=html+"<h4>Info</h4>"
1020: html=html+getBib(bib.childNodes)
1.3 dwinter 1021:
1.1 dwinter 1022: except:
1023: """none"""
1024:
1025: # html=html.encode('utf-8','replace')+getBib(bib.childNodes).encode('utf-8','replace')
1026:
1.27 ! dwinter 1027: return html.encode('utf-8')
1.1 dwinter 1028:
1029: def hasMetafile(path):
1030: """get index.meta"""
1031: return os.path.exists(path+"/index.meta")
1032: #return path
1.17 dwinter 1033:
1034:
1.1 dwinter 1035:
1036: def isdigilib2(path):
1037: """check if folder is candidate for digilib without metadata"""
1038: try:
1039: dir=os.listdir(path)
1040:
1041: imagesuffixes=['.gif','.jpg','.jpeg','.png','.tiff','.tif','.JPG','.TIFF','.TIF']
1042: ret=""
1043: for a in dir:
1044:
1045: suffix=os.path.splitext(a)
1046:
1047: if suffix[1] in imagesuffixes:
1048: return 1
1049:
1050: try:
1051: dom=xml.dom.minidom.parse(os.path.split(path)[0]+"/index.meta")
1052: for node in dom.getElementsByTagName("dir"):
1053:
1054: if getText(node.getElementsByTagName("content-type")[0].childNodes)=="images":
1055:
1056: if getText(node.getElementsByTagName("name")[0].childNodes)==os.path.split(path)[1]:
1057: return 1
1058: return 0
1059: except:
1060:
1061: return 0
1062:
1063:
1064:
1065:
1066:
1067: except:
1068: return 0
1069:
1070: def isFullText(path,folder_name):
1071: """check if foldername in path is full text"""
1072: try:
1073: dom=xml.dom.minidom.parse(path+"/index.meta")
1074: for node in dom.getElementsByTagName("dir"):
1075:
1076: if getText(node.getElementsByTagName("content-type")[0].childNodes)=="fulltext":
1077:
1078: if getText(node.getElementsByTagName("name")[0].childNodes)==folder_name:
1079: return 1
1080: return 0
1081: except:
1082:
1083: return 0
1084:
1.24 dwinter 1085: def isPdf(path,folder_name):
1086: """check if foldername in path is full text"""
1087: try:
1088: dom=xml.dom.minidom.parse(path+"/index.meta")
1089: for node in dom.getElementsByTagName("dir"):
1090:
1091: if getText(node.getElementsByTagName("content-type")[0].childNodes).lower()=="pdf":
1092:
1093: if getText(node.getElementsByTagName("name")[0].childNodes)==folder_name:
1094: return 1
1095: return 0
1096: except:
1097:
1098: return 0
1.1 dwinter 1099:
1100: def isPresentation(path,folder_name):
1101: """check if foldername in path is full text"""
1102: try:
1103: dom=xml.dom.minidom.parse(path+"/index.meta")
1104: #print dom.toxml()
1105: for dirnode in dom.getElementsByTagName("dir"):
1106: try:
1107:
1108: if getText(dirnode.getElementsByTagName('content-type')[0].childNodes)=='presentation':
1109: if getText(dirnode.getElementsByTagName("name")[0].childNodes)==folder_name:
1110: return 1
1111: except:
1112: """nothing"""
1113: return 0
1114: except:
1115:
1116: return 0
1117:
1118:
1119:
1120:
1121:
1122: def changeName(path):
1123: try:
1124: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path)
1125: if temp2==None:
1126: return "digifiles/"+re.search(r"(.*)/mpiwg/production/docuserver/(.*)",path).group(2)
1127: else:
1128: return temp2.group(2)
1129: except: # hack - im archivbereich keine online darstellung gibt jetzt ein no zurück.
1130: return "NO"
1131:
1132:
1133: def test(self):
1134: self.i=1
1135: #newtemplate=PageTemplateFile('/usr/local/mpiwg/Zope/Extensions/test').__of__(self)
1136: self.manage_addProduct['OFSP'].manage_addDTMLMethod('neu','neu')
1137: self.getattr('neu').manage_edit('HELLO','neu')
1138: return "ok"
1139:
1140:
1141: class ls(Implicit, Persistent, RoleManager):
1142: """File entry class"""
1143: path = ""
1144: user = ""
1145: month = ""
1146: date =""
1147: time = ""
1148:
1149: security=ClassSecurityInfo()
1150:
1151: def __init__(self,start):
1152: self.outfile=tempfile.mktemp()
1153: start['outfile']=self
1154: os.popen("ls -R / >"+self.outfile+" &","r")
1155:
1156:
1157: security.declarePublic('read')
1158: def read(self):
1159: return self.f.read()
1160: security.declarePublic('retself')
1161: def retself(self):
1162: return self
1163: security.declarePublic('all')
1164: def all(self):
1165: ret=""
1166: for g in self.f:
1167: ret=ret+g
1168: return ret
1169:
1170: security.declarePublic('printOutfile')
1171: def printOutfile(self):
1172: while not os.path.exists(self.outfile):
1173: """nothing"""
1174: return open(self.outfile).readlines()
1175:
1176: class overview(Implicit,Persistent, RoleManager):
1177: dir=[]
1178: resources={}
1179: security=ClassSecurityInfo()
1180:
1181: def __init__(self,path):
1182: dir=os.listdir(path)
1183:
1184: for file in dir:
1185: self.resources[self.getResource(path,file)]=path+"/"+file
1186:
1187:
1188: def getResource(self,path,filename):
1189: f=file(path+"/"+filename,'r')
1190:
1191: for line in f.readlines():
1192:
1193: if line[0:4]=="INFO":
1194: if line[6:14]=="resource":
1195: return line
1196: return "error"
1197:
1198: def parsearchive(self,str):
1199: """parse for error"""
1200: retstr=''
1201:
1202: if not len(str)==0:
1203: for line in str:
1204: retstr=retstr+line+"<br>"
1205: check=re.search(r"(.*):(.*)",line)
1206: if check.group(1)=='ABORT':
1207: error="error"
1208: elif check.group(1)=='DONE':
1209: error="ok"
1210: else:
1211: error="running"
1212:
1213: return [retstr,error]
1214: else:
1215: return ['','running']
1216:
1217: security.declarePublic('messages')
1218: def messages(self):
1219: self.lines=[]
1220: for name in self.resources.keys():
1221: path=self.resources[name]
1222:
1223: self.lines.append(Error([name,self.parsearchive(open(path,"r").readlines())]))
1224: return self.lines
1225:
1226: security.declarePublic('printResource')
1227: def printResource(self):
1228: return self.resources
1229:
1230: def getoverview(path):
1231:
1232: return overview(path)
1233:
1234:
1235: def ls_test(self):
1236: tmp=ls(self.REQUEST.SESSION)
1237: return self.REQUEST.RESPONSE.redirect('next')
1238:
1239: def storeFile(self,something):
1240: self.REQUEST.SESSION['something']=something
1241: return 1
1242:
1243: def getFile(self):
1244: return self.REQUEST.SESSION['something']
1245:
1246: def isFolder(self,path):
1247: """returns TRUE, wenn path ein Folder ist in den weitere Objekte Folder oder Dokumente gelegt werden dürfen"""
1248: return not isScannedDocument(self,path) # vorläufig sind alle Documente die keine scanned documente sind folder.
1249:
1250: def isScannedDocument(self,path):
1251: """returns TRUE, wenn path der Stammordner eines gescannten Documents ist"""
1252: try:
1253: f = file(path+"/index.meta","r")
1254: lines = f.read()
1255:
1256: try:
1257: dom = xml.dom.minidom.parseString(lines)
1258: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
1.19 dwinter 1259: if (content_type=="scanned-document") or (content_type=="scanned document") or (content_type=="fulltext document"):
1.1 dwinter 1260: return 1
1261: else:
1262: return 0
1263: except:
1264: return 0
1265: except:
1266: return 0
1267:
1268: from time import localtime,strftime
1269:
1270: def date(self):
1271: return strftime("%d.%m.%Y",localtime())
1272:
1273:
1274:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>