Annotation of OSAS/OSA_system/OSAS_show.py, revision 1.22
1.15 dwinter 1: """ Classes for displaying, browsing and organizing the archive
2: 20040303 Needs configuration for rescaling thumbs
1.1 dwinter 3:
1.15 dwinter 4: """
1.4 dwinter 5:
1.2 dwinter 6: import addFolder
1.20 dwinter 7: import OSAS_helpers
1.1 dwinter 8: from OFS.Folder import Folder
9: from OFS.SimpleItem import SimpleItem
10: from Products.PageTemplates.PageTemplateFile import PageTemplateFile
11: from Products.PageTemplates.PageTemplate import PageTemplate
1.8 dwinter 12: from AccessControl import ClassSecurityInfo
13: from Globals import InitializeClass
1.1 dwinter 14:
15: class OSAS_ShowOnline(SimpleItem):
1.8 dwinter 16: security=ClassSecurityInfo()
17:
1.1 dwinter 18: """OnlineBrowser"""
19: def __init__(self,id):
20: """initialize a new instance"""
21: self.id = id
22:
23: meta_type="OSAS_ShowOnline"
1.8 dwinter 24:
25: security.declareProtected('View','index_html')
1.1 dwinter 26: def index_html(self):
27: """main view"""
1.15 dwinter 28: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_ViewFiles.zpt').__of__(self)
1.1 dwinter 29: return pt()
30:
31: def getfilesystem2(self,start,reload=0):
32: """load filesystem"""
33:
34: k=filesystem2(start,1)
35: return k
36:
37: def tree(self,start):
38: """get the filetree"""
39: k=browse(start)
40: return k
41:
42: def path_to_link_view(self,path):
43: """generates navigation bar for viewfiles"""
44: return path_to_link_view(self.REQUEST['URL'],path)
45:
46: def isdigilib2(self,path):
47: """check if digilib"""
48: return isdigilib2(path)
49:
50: def changeName(self,name):
51: return changeName(name)
52:
53: def hasMetafile(self,path):
54: return hasMetafile(path)
55:
56: def getMetafile(self,path):
57: return getMetafile(path)
58:
59: def toggle_view(self,path,file):
60: """Oeffnen bzw. schließen der Subfolders"""
61: self.tree(path).toggle(path,file)
62: return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1']+"?path="+path)
1.8 dwinter 63:
1.20 dwinter 64:
1.8 dwinter 65: InitializeClass(OSAS_ShowOnline)
66:
1.1 dwinter 67: def manage_AddOSAS_ShowOnlineForm(self):
68: """interface for adding the OSAS_root"""
1.15 dwinter 69: pt=PageTemplateFile('Products/OSA_system/zpt/AddOSAS_ShowOnline.zpt').__of__(self)
1.1 dwinter 70: return pt()
71:
72: def manage_AddOSAS_ShowOnline(self,id,RESPONSE=None):
73: """add the OSAS_root"""
74: newObj=OSAS_ShowOnline(id)
75: self._setObject(id,newObj)
76: if RESPONSE is not None:
77: RESPONSE.redirect('manage_main')
78:
79:
1.2 dwinter 80: class OSAS_StoreOnline(SimpleItem):
81: """Webfrontend für das Storagesystem"""
1.8 dwinter 82: security=ClassSecurityInfo()
83:
1.2 dwinter 84: def __init__(self,id):
85: """initialize a new instance"""
86: self.id = id
87:
88: meta_type="OSAS_StoreOnline"
89:
1.8 dwinter 90: security.declareProtected('View','index_html')
1.2 dwinter 91: def index_html(self):
92: """main view"""
1.15 dwinter 93: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_StoreFiles.zpt').__of__(self)
1.2 dwinter 94: return pt()
95:
1.4 dwinter 96: def readContexts(self,path):
97: """Zeige Contexte"""
1.6 dwinter 98: if os.path.exists(path+"/index.meta"):
1.7 dwinter 99:
1.6 dwinter 100: return readContexts(path)
1.7 dwinter 101:
1.6 dwinter 102: else:
1.7 dwinter 103:
1.6 dwinter 104: return []
1.9 dwinter 105:
106: def rescaleThumbs(self,path):
1.14 dwinter 107: """rescale thumbs of images in path"""
1.9 dwinter 108:
1.21 dwinter 109: os.popen("ssh archive@nausikaa2.rz-berlin.mpg.de /usr/local/mpiwg/scripts/scaleomat.pl %s /mpiwg/temp/online/scaled/thumb 90 --replace >> /tmp/sc.out &"% re.sub('/mpiwg/online/','',self.REQUEST['path']))
1.10 dwinter 110:
111: self.REQUEST.SESSION['path']=self.REQUEST['path']
112: #return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1'])
1.15 dwinter 113: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_scaled.zpt').__of__(self)
1.10 dwinter 114: return pt()
115:
116:
1.2 dwinter 117: def getfilesystem2(self,start,reload=0):
118: """load filesystem"""
119:
120: k=filesystem2(start,1)
121: return k
122:
123: def tree(self,start):
124: """get the filetree"""
125: k=browse(start)
126: return k
127:
128: def path_to_link_store(self,path):
129: """generates navigation bar for viewfiles"""
130: return path_to_link_store(self.REQUEST['URL'],path)
131:
132: def isdigilib2(self,path):
133: """check if digilib"""
134: return isdigilib2(path)
135:
136: def changeName(self,name):
137: return changeName(name)
138:
139: def hasMetafile(self,path):
140: return hasMetafile(path)
141:
142: def getMetafile(self,path):
143: return getMetafile(path)
144:
145: def toggle_view(self,path,file):
146: """Oeffnen bzw. schließen der Subfolders"""
147: self.tree(path).toggle(path,file)
148: return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1']+"?path="+path)
149:
150: def isFolder(self,path):
151: """Test ob Eintrag ein Folder ist"""
152: return isFolder(self,path)
153:
154: def isScannedDocument(self,path):
1.14 dwinter 155: """Test ob Eintrag ein Scanned Document ist"""
1.2 dwinter 156: return isScannedDocument(self,path)
157:
158: def isFullText(self,path,folder_name):
159: """Test ob Eintrag ein Folder ist"""
160: return isFullText(path,folder_name)
161:
1.17 dwinter 162: def isPresentation(self,path,folder_name):
163: """Test ob Eintrag ein Folder ist"""
164: return isPresentation(path,folder_name)
165:
1.14 dwinter 166: def date(self):
167: return strftime("%d.%m.%Y",localtime())
168:
1.2 dwinter 169: def addFolderForm(self,path):
170: """add a new path"""
1.15 dwinter 171: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_addFolder.zpt').__of__(self)
1.2 dwinter 172: return pt()
173:
1.18 dwinter 174: def showHelp(self,refType,genericTag):
175: """helptext"""
176: for reference in self.ZopeFind(self.standardMD):
177: if reference[1].title==refType:
178: text=getattr(reference[1],'md_'+genericTag)[2]
179: return text
180: return "NO EXPLANATION"
181:
182: def showHelpTag(self,url,reftype,item):
183: """gen javascript for showhelp"""
184: url2=url+'/showHelp?refType=%s&genericTag=%s'%(reftype,item)
185: ret="""javascript:wd=window.open(\'%s\',\'Help\',\'width=300,height=250\');void(\'\');wd.focus();"""%url2
186: return ret
187:
1.2 dwinter 188: def addFolder(self,path,folder_name,description,archive_creation_date,creator):
189: """add the folder to the filesystem and write the metadata files"""
190: return addFolder.addFolder(self,path,folder_name,description,archive_creation_date,creator)
191:
192: def EditIndex(self,path):
193: """Editiere das Index Metafile"""
194: try:
195: dom=xml.dom.minidom.parse(path+"/index.meta")
196: indexmeta=dom.toxml()
197: except:
198: indexmeta=""
1.14 dwinter 199:
1.2 dwinter 200: self.REQUEST.SESSION['indexmeta']=indexmeta
201: self.REQUEST.SESSION['path']=path
1.15 dwinter 202: newtemplate=PageTemplateFile('Products/OSA_system/zpt/editindex').__of__(self)
1.2 dwinter 203: return newtemplate()
204:
205: def EditIndex2(self):
206: """Sichern der Aenderungen in Index.meta"""
207: if not self.REQUEST.has_key('fileupload'):
208: #newtext=urllib.unquote(self.REQUEST['indexmeta'])
209: newtext=self.REQUEST['indexmeta']
1.3 dwinter 210:
1.2 dwinter 211: else:
212: self.file_name=self.REQUEST['fileupload'].filename
213: #newtext=self.REQUEST.form['fileupload'].read()
214: # HACK DW
215: newtext=self.REQUEST['indexmeta']
216:
217: indexmeta=file(self.REQUEST.SESSION['path']+"/index.meta","w")
218: indexmeta.writelines(newtext)
219: return self.REQUEST.response.redirect(self.REQUEST['URL1']+"?path="+self.REQUEST.SESSION['path'])
220:
1.3 dwinter 221: def add_metafile(self):
222: """nothing"""
1.15 dwinter 223: pt=PageTemplateFile('Products/OSA_system/zpt/OSAS_addmetadata.zpt').__of__(self)
1.3 dwinter 224: return pt()
1.4 dwinter 225:
1.20 dwinter 226: def getTextToolsField(self,path,name,default=''):
227: """Lese Textoolsfelder aus index.meta im path aus"""
228:
229: try:
230: dom=xml.dom.minidom.parse(path+"/index.meta")
231: node=dom.getElementsByTagName('texttool')[0] #getNode
232: subnode=node.getElementsByTagName(name)[0]
1.22 ! dwinter 233:
! 234: # bei text wird nur der Folder gebraucht
! 235: if name=="text":
! 236: splitted=getText(subnode.childNodes).split("/")
! 237: return splitted[len(splitted)-2]
! 238: else:
! 239: return getText(subnode.childNodes)
1.20 dwinter 240: except:
241: return default
242:
243: def getProjects(self,obj_ids=None):
244: """Get the Project title for configuration"""
245: ret=[]
246:
247: try:
248: projects=self.ZopeFind(self.projects,obj_metatypes=['OSAS_project'],obj_ids=obj_ids)#assumes projects folder somewhere in the hierarchie.
249:
250: for project in projects:
251: ret.append((project[1].title,project[0],project[1]))
252:
253: return ret
254:
255: except:
256: return [('no Projectfolders','')]
257:
258: def changeTemplatesForm(self,path):
259: """changeform"""
260: path=self.REQUEST.SESSION['path']=path
261: pt=PageTemplateFile('Products/OSA_system/zpt/changeTemplatesForm.zpt').__of__(self)
262: return pt()
263:
264:
265:
1.21 dwinter 266: def changeTemplates(self,path,RESPONSE=None):
1.20 dwinter 267: """Andere alle Eintraege in index.meta"""
1.21 dwinter 268: os.path.walk(path,OSAS_helpers.changeIndexMeta,self.REQUEST.form)
269: if RESPONSE is not None:
270: RESPONSE.redirect(self.REQUEST['URL1']+"?path="+self.REQUEST.SESSION['path'])
1.20 dwinter 271:
272:
1.8 dwinter 273: InitializeClass(OSAS_StoreOnline)
1.2 dwinter 274:
275: def manage_AddOSAS_StoreOnlineForm(self):
276: """interface for adding the OSAS_root"""
1.15 dwinter 277: pt=PageTemplateFile('Products/OSA_system/zpt/AddOSAS_StoreOnline.zpt').__of__(self)
1.2 dwinter 278: return pt()
279:
280: def manage_AddOSAS_StoreOnline(self,id,RESPONSE=None):
281: """add the OSAS_root"""
282: newObj=OSAS_StoreOnline(id)
283: self._setObject(id,newObj)
284: if RESPONSE is not None:
285: RESPONSE.redirect('manage_main')
286:
1.1 dwinter 287:
288:
289:
1.4 dwinter 290: def readContexts(path):
291: """ReadContext from index.meta"""
292: dom=xml.dom.minidom.parse(path+"/index.meta")
293: nodes=dom.getElementsByTagName('context')
294: ret=[]
295:
1.7 dwinter 296:
1.4 dwinter 297: for node in nodes:
1.7 dwinter 298: try:
299: link=getText(node.getElementsByTagName('link')[0].childNodes)
300: name=getText(node.getElementsByTagName('name')[0].childNodes)
301: ret.append((link,name))
302: except:
303: """nothing"""
1.4 dwinter 304: return ret
305:
306:
1.1 dwinter 307: ### Ab hier Baustelle
308:
309:
310: from types import *
311: import urllib
312: import os
313: import sys
314: import re
315: from AccessControl import ClassSecurityInfo
316: from AccessControl.Role import RoleManager
317: from Acquisition import Implicit
318: from Globals import Persistent
1.16 dwinter 319: try:
320: from time import strptime
321: except:
322: print "ignoring time.strptime import"
323:
1.1 dwinter 324: from time import strftime
325: import time
326: import os.path
327: import dircache
328: import xml.dom.minidom
329: from Products.PageTemplates.PageTemplateFile import PageTemplateFile
330: from Products.PageTemplates.PageTemplate import PageTemplate
331: import tempfile
332: tempfile.tempdir="/var/tmp/archiver"
333:
334: exclusion=[".HSResource","lost+found","Network Trash Folder","TheFindByContentFolder","TheVolumeSettingsFolder"]
335: class fsentry(Implicit, Persistent, RoleManager):
336: """File entry class"""
337: path = ""
338: user = ""
339: month = ""
340: date =""
341: time = ""
342:
343: security=ClassSecurityInfo()
344: def __init__(self,extpath):
345: """initialize class"""
346: extpath=os.path.abspath(re.search(r"(.*)\n",extpath).group(1))
347: self.all=extpath
348: self.path=extpath
349: self.user=""
350: self.mtime=os.path.getmtime(extpath)
351:
352:
353: security.declarePublic('getPath')
354: def getPath(self):
355: """Ausgabe von path"""
356: return self.path
357:
358: security.declarePublic('getUser')
359: def getUser(self):
360: """Ausgabe von user"""
361: return self.user
362:
363: security.declarePublic('getDate')
364: def getDate(self):
365: """Ausgabe von Date"""
366: return strftime("%Y%m%d%H%M",time.gmtime(self.mtime))
367:
368: security.declarePublic('getDate')
369: def getID(self):
370: """Ausgabe einer eindeutigen Sortierbaren ID"""
371: return self.getDate()+self.getPath()
372:
373: security.declarePublic('getTime')
374: def getTime(self):
375: """Ausgabe von path"""
376: return self.time
377: security.declarePublic('getAll')
378: def getAll(self):
379: """Ausgabe von path"""
380: return self.all
381:
382: class filesystem(Implicit, Persistent, RoleManager):
383: """store filesystem"""
384: node={}
385: hasindex={}
386: security=ClassSecurityInfo()
387:
388: def getfs(self,start):
389: """load filessystem"""
390: f = os.popen("find "+ start+" -name '*' ","r")
391: lines = f.readlines()
392:
393: return lines
394:
395: def loadfs(self,start):
396: """analyse filesystem"""
397: for line in self.getfs(start):
398:
399: g=re.search(r"(.*/)(.*)\n",line)
400: if not g==None:
401: path=g.group(1)
402: file=g.group(2)
403: if self.node.has_key(path):
404: elements=self.node[path]
405: elements.append(file)
406: self.node[path]=elements
407: else:
408: self.node[path]=[file]
409: if (file=="index.meta") | (file=="meta"):
410: self.hasindex[path]="1"
411:
412: def __init__(self,start,reload=0):
413: if reload==1:
414: self.node={}
415: self.hasindex={}
416: self.loadfs(start)
417:
418:
419: security.declarePublic('getNode')
420: def getNode(self):
421: return self.node
422:
423: security.declarePublic('getKeys')
424: def getKeys(self):
425: return self.node.keys()
426:
427: security.declarePublic('clearnode')
428: def clearnode(self):
429: self.node={}
430: return 0
431:
432: security.declarePublic('hasIndex')
433: def hasIndex(self,path):
434:
435: return self.hasindex.has_key(path)
436:
437:
438: def onlyIndex_old(self):
439: """return only files with archive material"""
440: j={}
441: for k in self.node:
442: if self.hasindex.has_key(k):
443: if len(self.node[k])>1:
444: if (len(self.node[k])==2) & ('meta' not in self.node[k]):
445: j[k]=self.node[k]
446: elif (len(self.node[k])==2) & ('meta' in self.node[k]):
447: """ nothing """
448: else:
449: j[k]=self.node[k]
450: return j
451:
452: def archive_the_path(self,path):
453: """parse indexmeta and return digilib path"""
1.13 dwinter 454:
1.1 dwinter 455: try:
456: #f = os.popen("cat "+path+"/index.meta","r")
457: f =file(path+"/index.meta","r")
458:
459: lines = f.read()
1.13 dwinter 460:
1.1 dwinter 461: try:
462: dom = xml.dom.minidom.parseString(lines)
1.13 dwinter 463: if dom.getElementsByTagName("content-type"):
464: if getText(dom.getElementsByTagName("content-type")[0].childNodes)=="folder":
465: """folder nicht archivieren"""
466: return 0
467:
468: archive_storage_date=getText(dom.getElementsByTagName("archive-storage-date")[0].childNodes)
469:
470: if archive_storage_date=="":
471:
472: """leer also archivieren"""
473: return 1
1.1 dwinter 474: else:
1.13 dwinter 475: """nicht archivieren"""
476: return 0
1.1 dwinter 477: except:
478: """kein tag also archivieren"""
479: return 1
480: except:
481: """kein index.meta also nicht archivieren"""
482: return 0
483:
484: security.declarePublic('onlyIndex')
485: def onlyIndex(self):
486: """return only files with archive material (archive-storage-date not set)"""
487: j={}
488:
489: for k in self.node:
490: if self.archive_the_path(k):
491: j[k]=self.node[k]
492: return j
493: security.declarePublic('getImageDirs')
494: def getImageDirs(self,dom,path):
495: dirs=dom.getElementsByTagName("dir")
496: dirback=[]
497: for dir in dirs:
498: temp=getText(dir.getElementsByTagName("name")[0].childNodes)
499: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path+"/"+temp)
500: if not temp2==None:
501: try:
502: dirback.append(temp2.group(2))
503: except:
504: """nothing"""
505: else:
506: dirback.append(temp)
507: return dirback
508:
509:
510:
511:
512: security.declarePublic('digilib')
513: def digilib(self, path):
514: """check if folder is a container for digilib files"""
515: if self.hasindex.has_key(path+"/"):
516: return(self.parseIndexMeta(path))
517: else:
518: return "NO"
519:
520:
521:
522:
523: security.declarePublic('isdigilib')
524: def isdigilib(self, path):
525: """return number of possible image directories usefull for digilib"""
526: if self.hasindex.has_key(path+"/"):
527: return(len(self.parseIndexMeta(path)))
528: else:
529: return 0
530:
531: security.declarePublic('parseIndexMeta')
532: def parseIndexMeta(self,k):
533: """parse indexmeta and return digilib path"""
534: f = os.popen("cat "+k+"/index.meta","r")
535: lines = f.read()
536:
537: try:
538: dom = xml.dom.minidom.parseString(lines)
539: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
540: if (content_type=="scanned-document") or (content_type=="scanned document"):
541: dirs=self.getImageDirs(dom,k)
542:
543: return dirs
544: except:
545: return []
546:
547:
548: class filesystem2(Implicit, Persistent, RoleManager):
549: """store filesystem"""
550: node={}
551: hasindex={}
552: security=ClassSecurityInfo()
553:
554: def getfs(self,start):
555: """load filessystem"""
1.13 dwinter 556:
1.1 dwinter 557: f = os.popen("find "+ start+" -name '*' ","r")
558: lines = f.readlines()
559:
560: return lines
561:
562: def loadfs(self,start):
563: """analyse filesystem"""
564: for line in self.getfs(start):
565:
566: g=re.search(r"(.*/)(.*)\n",line)
567: if not g==None:
568: try:
569: path=g.group(1)
570: file=g.group(2)
571: except:
572: """nothing"""
573: if self.node.has_key(path):
574: elements=self.node[path]
575: elements.append(file)
576: self.node[path]=elements
577: else:
578: self.node[path]=[file]
579: if (file=="index.meta") | (file=="meta"):
580: self.hasindex[path]="1"
581:
582: def __init__(self,start,reload=0):
583: """nothing"""
584:
585:
586: security.declarePublic('getImageDirs')
587: def getImageDirs(self,dom,path):
588: dirs=dom.getElementsByTagName("dir")
589: dirback=[]
590: for dir in dirs:
591: temp=getText(dir.getElementsByTagName("name")[0].childNodes)
592: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path+"/"+temp)
593: if not temp2==None:
594: try:
595: dirback.append(temp2.group(2))
596: except:
597: """nothing"""
598: else:
599: dirback.append(temp)
600: return dirback
601:
602:
603: security.declarePublic('digilib')
604: def digilib(self, path):
605: """check if folder is a container for digilib files"""
606: if os.path.exists(path+"/index.meta"):
607: return(self.parseIndexMeta(path))
608: else:
609: return "NO"
610:
611: security.declarePublic('isdigilib')
612: def isdigilib(self, path):
613: if os.path.exists(path+"/index.meta"):
614: return(len(self.parseIndexMeta(path)))
615: else:
616: return 0
617: security.declarePublic('parseIndexMeta')
618: def parseIndexMeta(self,k):
619: """parse indexmeta and return digilib path"""
620: f = os.popen("cat "+k+"/index.meta","r")
621: lines = f.read()
622:
623: try:
624: dom = xml.dom.minidom.parseString(lines)
625: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
626: if content_type=="scanned-document":
627: dirs=self.getImageDirs(dom,k)
628:
629: return dirs
630: except:
631: return []
632:
633:
634: class browse(Implicit, Persistent, RoleManager):
635:
636: security=ClassSecurityInfo()
637: tree={}
638: toggledict={}
639:
640: def filterExcluded(self,dir):
641: ret=[]
642: for item in dir:
643: if not item in exclusion:
644: ret.append(item)
645: return ret
646:
647: def __init__(self,startpath):
648: self.tree={}
649: self.tree[startpath]=self.filterExcluded(dircache.listdir(startpath))
650:
651: security.declarePublic('getTree')
652: def getTree(self,path):
653: if self.tree.has_key(path):
654: return self.tree[path]
655: else:
656: self.tree[path]=self.filterExcluded(dircache.listdir(path))
657: return self.tree[path]
658:
659: security.declarePublic('isDirectory')
660: def isDirectory(self,path,file):
661: return os.path.isdir(os.path.abspath(path+"/"+file))
662:
663: security.declarePublic('toggle')
664: def toggle(self,tmppath,file):
665: path=tmppath+"/"+file
666:
667: if self.toggledict.has_key(path):
668: if self.toggledict[path]==0:
669: self.toggledict[path]=1
670:
671: else:
672: self.toggledict[path]=0
673:
674: else:
675: self.toggledict[path]=4
676:
677:
678: security.declarePublic('isToggle')
679: def isToggle(self,tmppath,file):
680: path=tmppath+"/"+file
681:
682: if self.toggledict.has_key(path):
683:
684: return self.toggledict[path]
685: else:
686:
687: return 0
688:
689:
690: def getfs(start):
691: """return filesystem"""
1.13 dwinter 692:
693:
1.1 dwinter 694: f = os.popen("find "+ start+" -name '*'","r")
695: lines = f.readlines()
696: return lines
697:
698: def showall(start):
699: lines = getfs(start)
700: for line in lines:
701: print line
702: return 0
703:
704: def entries(start):
705: """retrun list of entries of a filesystem"""
706: i=0
707: fs=[]
708: lines=getfs(start)
709: for line in lines:
710: try:
711: if os.path.exists(os.path.abspath(re.search(r"(.*)\n",line).group(1))):
712: fs.append(fsentry(line))
713: i=i+1
714: except:
715: """nothing"""
716: return fs
717:
718: def getfilesystem(start,reload=0):
719: """load filesystem"""
720:
721: k=filesystem(start,1)
722: return k
723:
724:
725:
726: def sort_by_date(fs):
727: """sorts lists of fileentries"""
728: ls=[]
729: dict={}
730: for k in fs:
731: ls.append(k.getID())
732: dict[k.getID()]=k
733: ls.sort()
734: ls.reverse()
735: ret=[]
736: for j in ls:
737: ret.append(dict[j])
738: return ret
739:
740: def path_to_link(path):
741: """generates navigation bar for showfiles"""
742: string=""
743:
744: tmppath=os.path.dirname(path)
745: i=0
746: pathes=[[path, os.path.basename(path)]]
747:
748: while not (len(tmppath)==1):
749:
750: i=i+1
751: if i>20: break
752:
753: pathes.append([tmppath, os.path.basename(tmppath)])
754: tmppath=os.path.dirname(tmppath)
755:
756: while i>=0:
757: string=string+"<a href=showfiles?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
758:
759: i=i-1
760: return string
761:
762: def path_to_link_view(URL,path):
763: """generates navigation bar for viewfiles"""
764: string=""
765:
766: tmppath=os.path.dirname(path)
767: i=0
768: pathes=[[path, os.path.basename(path)]]
769:
770: while not (len(tmppath)==1):
771:
772: i=i+1
773: if i>20: break
774:
775: pathes.append([tmppath, os.path.basename(tmppath)])
776: tmppath=os.path.dirname(tmppath)
777:
778: while i>=0:
779: string=string+"<a href="+URL+"?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
780:
781: i=i-1
782: return string
783:
1.2 dwinter 784: def path_to_link_store(URL,path):
1.1 dwinter 785: """generates navigation bar for viewfiles"""
786: string=""
787:
788: tmppath=os.path.dirname(path)
789: i=0
790: pathes=[[path, os.path.basename(path)]]
791:
792: while not (len(tmppath)==1):
793:
794: i=i+1
795: if i>20: break
796:
797: pathes.append([tmppath, os.path.basename(tmppath)])
798: tmppath=os.path.dirname(tmppath)
799:
800: while i>=0:
1.2 dwinter 801: string=string+"<a href="+URL+"?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
1.1 dwinter 802:
803: i=i-1
804: return string
805:
806:
807: class Error(Implicit, Persistent, RoleManager):
808:
809: error=[]
810: security=ClassSecurityInfo()
811: def __init__(self,initerror):
812: self.error=initerror[0:]
813:
814: security.declarePublic('getError')
815: def getError(self):
816: return self.error
817:
818: class metacheck(Implicit, Persistent, RoleManager):
819: lines=[]
820: security=ClassSecurityInfo()
821: def parsearchive(self,str):
822: """parse for error"""
823: retstr=''
824:
825: if not len(str)==0:
826: for line in str:
827: retstr=retstr+line+"<br>"
828: check=re.search(r"(.*):(.*)",line)
829: if check.group(1)=='ABORT':
830: error="error"
831: elif check.group(1)=='DONE':
832: error="ok"
833: else:
834: error="running"
835:
836: return [retstr,error]
837: else:
838: return ['','running']
839: def __init__(self,path):
840: """archive the documents in path"""
841: self.lines=[]
842:
843: if type(path)==StringType:
844: f = os.popen("/usr/local/mpiwg/archive/metacheck "+path,"r")
845: self.lines.append(Error([path,self.parsearchive(f.readlines())]))
846: else:
847: for singlepath in path:
848: f = os.popen("/usr/local/mpiwg/archive/metacheck "+singlepath,"r")
849: self.lines.append(Error([singlepath,self.parsearchive(f.readlines())]))
850: security.declarePublic('messages')
851:
852: def messages(self):
853: return self.lines
854:
855:
856:
857:
858: class archive(Implicit, Persistent, RoleManager):
859: lines=[]
860: security=ClassSecurityInfo()
861: def parsearchive(self,str):
862: """parse for error"""
863: retstr=''
864:
865: if not len(str)==0:
866: for line in str:
867: retstr=retstr+line+"<br>"
868: check=re.search(r"(.*):(.*)",line)
869: if check.group(1)=='ABORT':
870: error="error"
871: elif check.group(1)=='DONE':
872: error="ok"
873: else:
874: error="running"
875:
876: return [retstr,error]
877: else:
878: return ['','running']
879:
880: def __init__(self,path,session):
881: """archive the documents in path"""
882: self.lines=[]
883: self.filenames={}
884: session['archiver']=self
885:
886:
887: if type(path)==StringType:
888: self.filenames[path]=tempfile.mktemp()
889: f = os.popen("/usr/local/mpiwg/archive/archiver "+path+" > "+self.filenames[path]+" &","r")
890: else:
891: for singlepath in path:
892: self.filenames[singlepath]=tempfile.mktemp()
893: f = os.popen("/usr/local/mpiwg/archive/archiver "+singlepath+" > "+self.filenames[singlepath]+" &","r")
894:
895: security.declarePublic('messages')
896: def messages(self):
897: self.lines=[]
898: for path in self.filenames.keys():
899:
900: self.lines.append(Error([path,self.parsearchive(open(self.filenames[path],"r").readlines())]))
901: return self.lines
902:
903:
904: def evalext(str):
905: return eval(str)
906:
907: def storeerror(ret,path,context,i):
908: session=context.REQUEST.SESSION
909: session['error%i'%i]=ret
910: session['path%i'%i]=path
911:
912: return 'error?number=%i'%i
913:
914: def geterror(str,context):
915: session=context.REQUEST.SESSION
916: return session[str]
917:
918: def readfile(path):
919:
920: ret=""
921: f=open(path,'r')
922: for g in f.readlines():
923: ret=ret+g
924: return ret
925:
926: def writefile(self,path,txt,REQUEST):
927: f=open(path,'w')
928: f.write(txt)
929: f.close()
930: rval=self.aq_acquire('archive2')
931: return rval()
932:
933:
934: def metachecker(self,path):
935: """check the metadata the documents in path"""
936: self.REQUEST.SESSION['path']=self.REQUEST['path']
937: return metacheck(path)
938:
939: def archiver(self,path):
940: """archive the documents in path"""
941: tmp=archive(path,self.REQUEST.SESSION)
942: return self.REQUEST.RESPONSE.redirect('archive4')
943:
944: def getText(nodelist):
945:
946: rc = ""
947: for node in nodelist:
948: if node.nodeType == node.TEXT_NODE:
949: rc = rc + node.data
950: return rc
951:
952: def getBib(nodelist):
953: rc= "<table border='0'>"
1.3 dwinter 954:
1.1 dwinter 955: for node in nodelist:
956:
957: if node.nodeType == node.ELEMENT_NODE:
958: """nothing"""
959: rc = rc+"<tr><td valign='right'>"+str(node.nodeName)+":</td><td> "+getText(node.childNodes)+"</td></tr>"
960: #print rc
961: return rc+"</table>"
962:
963: def getMetafile(path):
1.14 dwinter 964: """get index.meta and translate it to an HTML"""
1.1 dwinter 965: html=[]
966: if not os.path.exists(path+"/index.meta"):
967:
968: return "NO_METADATA"
969: else:
970: f = os.popen("cat "+path+"/index.meta","r")
971: lines = f.read()
972: dom = xml.dom.minidom.parseString(lines)
1.12 dwinter 973: try:
974: name=getText(dom.getElementsByTagName("name")[0].childNodes)
975: except:
976: name="NOT_DEFINED!!!"
977: try:
978: creator=getText(dom.getElementsByTagName("creator")[0].childNodes)
979: except:
980: creator="NOT_DEFINED!!!"
981:
982: try:
983: creation_date=getText(dom.getElementsByTagName("archive-creation-date")[0].childNodes)
984: except:
985: creation_date="NOT_DEFINED!!!"
986:
987: try:
988: description=getText(dom.getElementsByTagName("description")[0].childNodes)
989: except:
990: description="NOT_DEFINED!!!"
991:
1.1 dwinter 992: try:
993: type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
994: except:
995: type=""
996: if type=="scanned document":
997: html="<h3>Document: "+name+"</h3>"
998: elif type=="folder":
999: html="<h3>Folder: "+name+"</h3>"
1000: else:
1001: html="<h3>Document: "+name+"</h3>"
1002:
1003: html=html+"<p><i>created by: "+creator+" at: "+creation_date+"</i></p>"
1004: html=html+"<h4>Description</h4><p>"+description+"</p>"
1005: try:
1006: bib = dom.getElementsByTagName("meta")[0].getElementsByTagName("bib")[0]
1007: if bib.attributes.has_key('type'):
1008: html=html+"<h4>Info ("+bib.attributes['type'].value+")</h4>"
1009: else:
1010: html=html+"<h4>Info</h4>"
1011: html=html+getBib(bib.childNodes)
1.3 dwinter 1012:
1.1 dwinter 1013: except:
1014: """none"""
1015:
1016: # html=html.encode('utf-8','replace')+getBib(bib.childNodes).encode('utf-8','replace')
1017:
1018: return html
1019:
1020: def hasMetafile(path):
1021: """get index.meta"""
1022: return os.path.exists(path+"/index.meta")
1023: #return path
1.17 dwinter 1024:
1025:
1.1 dwinter 1026:
1027: def isdigilib2(path):
1028: """check if folder is candidate for digilib without metadata"""
1029: try:
1030: dir=os.listdir(path)
1031:
1032: imagesuffixes=['.gif','.jpg','.jpeg','.png','.tiff','.tif','.JPG','.TIFF','.TIF']
1033: ret=""
1034: for a in dir:
1035:
1036: suffix=os.path.splitext(a)
1037:
1038: if suffix[1] in imagesuffixes:
1039: return 1
1040:
1041: try:
1042: dom=xml.dom.minidom.parse(os.path.split(path)[0]+"/index.meta")
1043: for node in dom.getElementsByTagName("dir"):
1044:
1045: if getText(node.getElementsByTagName("content-type")[0].childNodes)=="images":
1046:
1047: if getText(node.getElementsByTagName("name")[0].childNodes)==os.path.split(path)[1]:
1048: return 1
1049: return 0
1050: except:
1051:
1052: return 0
1053:
1054:
1055:
1056:
1057:
1058: except:
1059: return 0
1060:
1061: def isFullText(path,folder_name):
1062: """check if foldername in path is full text"""
1063: try:
1064: dom=xml.dom.minidom.parse(path+"/index.meta")
1065: for node in dom.getElementsByTagName("dir"):
1066:
1067: if getText(node.getElementsByTagName("content-type")[0].childNodes)=="fulltext":
1068:
1069: if getText(node.getElementsByTagName("name")[0].childNodes)==folder_name:
1070: return 1
1071: return 0
1072: except:
1073:
1074: return 0
1075:
1076:
1077: def isPresentation(path,folder_name):
1078: """check if foldername in path is full text"""
1079: try:
1080: dom=xml.dom.minidom.parse(path+"/index.meta")
1081: #print dom.toxml()
1082: for dirnode in dom.getElementsByTagName("dir"):
1083: try:
1084:
1085: if getText(dirnode.getElementsByTagName('content-type')[0].childNodes)=='presentation':
1086: if getText(dirnode.getElementsByTagName("name")[0].childNodes)==folder_name:
1087: return 1
1088: except:
1089: """nothing"""
1090: return 0
1091: except:
1092:
1093: return 0
1094:
1095:
1096:
1097:
1098:
1099: def changeName(path):
1100: try:
1101: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path)
1102: if temp2==None:
1103: return "digifiles/"+re.search(r"(.*)/mpiwg/production/docuserver/(.*)",path).group(2)
1104: else:
1105: return temp2.group(2)
1106: except: # hack - im archivbereich keine online darstellung gibt jetzt ein no zurück.
1107: return "NO"
1108:
1109:
1110: def test(self):
1111: self.i=1
1112: #newtemplate=PageTemplateFile('/usr/local/mpiwg/Zope/Extensions/test').__of__(self)
1113: self.manage_addProduct['OFSP'].manage_addDTMLMethod('neu','neu')
1114: self.getattr('neu').manage_edit('HELLO','neu')
1115: return "ok"
1116:
1117:
1118: class ls(Implicit, Persistent, RoleManager):
1119: """File entry class"""
1120: path = ""
1121: user = ""
1122: month = ""
1123: date =""
1124: time = ""
1125:
1126: security=ClassSecurityInfo()
1127:
1128: def __init__(self,start):
1129: self.outfile=tempfile.mktemp()
1130: start['outfile']=self
1131: os.popen("ls -R / >"+self.outfile+" &","r")
1132:
1133:
1134: security.declarePublic('read')
1135: def read(self):
1136: return self.f.read()
1137: security.declarePublic('retself')
1138: def retself(self):
1139: return self
1140: security.declarePublic('all')
1141: def all(self):
1142: ret=""
1143: for g in self.f:
1144: ret=ret+g
1145: return ret
1146:
1147: security.declarePublic('printOutfile')
1148: def printOutfile(self):
1149: while not os.path.exists(self.outfile):
1150: """nothing"""
1151: return open(self.outfile).readlines()
1152:
1153: class overview(Implicit,Persistent, RoleManager):
1154: dir=[]
1155: resources={}
1156: security=ClassSecurityInfo()
1157:
1158: def __init__(self,path):
1159: dir=os.listdir(path)
1160:
1161: for file in dir:
1162: self.resources[self.getResource(path,file)]=path+"/"+file
1163:
1164:
1165: def getResource(self,path,filename):
1166: f=file(path+"/"+filename,'r')
1167:
1168: for line in f.readlines():
1169:
1170: if line[0:4]=="INFO":
1171: if line[6:14]=="resource":
1172: return line
1173: return "error"
1174:
1175: def parsearchive(self,str):
1176: """parse for error"""
1177: retstr=''
1178:
1179: if not len(str)==0:
1180: for line in str:
1181: retstr=retstr+line+"<br>"
1182: check=re.search(r"(.*):(.*)",line)
1183: if check.group(1)=='ABORT':
1184: error="error"
1185: elif check.group(1)=='DONE':
1186: error="ok"
1187: else:
1188: error="running"
1189:
1190: return [retstr,error]
1191: else:
1192: return ['','running']
1193:
1194: security.declarePublic('messages')
1195: def messages(self):
1196: self.lines=[]
1197: for name in self.resources.keys():
1198: path=self.resources[name]
1199:
1200: self.lines.append(Error([name,self.parsearchive(open(path,"r").readlines())]))
1201: return self.lines
1202:
1203: security.declarePublic('printResource')
1204: def printResource(self):
1205: return self.resources
1206:
1207: def getoverview(path):
1208:
1209: return overview(path)
1210:
1211:
1212: def ls_test(self):
1213: tmp=ls(self.REQUEST.SESSION)
1214: return self.REQUEST.RESPONSE.redirect('next')
1215:
1216: def storeFile(self,something):
1217: self.REQUEST.SESSION['something']=something
1218: return 1
1219:
1220: def getFile(self):
1221: return self.REQUEST.SESSION['something']
1222:
1223: def isFolder(self,path):
1224: """returns TRUE, wenn path ein Folder ist in den weitere Objekte Folder oder Dokumente gelegt werden dürfen"""
1225: return not isScannedDocument(self,path) # vorläufig sind alle Documente die keine scanned documente sind folder.
1226:
1227: def isScannedDocument(self,path):
1228: """returns TRUE, wenn path der Stammordner eines gescannten Documents ist"""
1229: try:
1230: f = file(path+"/index.meta","r")
1231: lines = f.read()
1232:
1233: try:
1234: dom = xml.dom.minidom.parseString(lines)
1235: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
1.19 dwinter 1236: if (content_type=="scanned-document") or (content_type=="scanned document") or (content_type=="fulltext document"):
1.1 dwinter 1237: return 1
1238: else:
1239: return 0
1240: except:
1241: return 0
1242: except:
1243: return 0
1244:
1245: from time import localtime,strftime
1246:
1247: def date(self):
1248: return strftime("%d.%m.%Y",localtime())
1249:
1250:
1251:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>