Annotation of OSAS/OSA_system/OSAS_show.py, revision 1.14
1.1 dwinter 1: # Classes for displaying, browsing and organizing the archive
2:
1.4 dwinter 3:
1.2 dwinter 4: import addFolder
1.1 dwinter 5: from OFS.Folder import Folder
6: from OFS.SimpleItem import SimpleItem
7: from Products.PageTemplates.PageTemplateFile import PageTemplateFile
8: from Products.PageTemplates.PageTemplate import PageTemplate
1.8 dwinter 9: from AccessControl import ClassSecurityInfo
10: from Globals import InitializeClass
1.1 dwinter 11:
12: class OSAS_ShowOnline(SimpleItem):
1.8 dwinter 13: security=ClassSecurityInfo()
14:
1.1 dwinter 15: """OnlineBrowser"""
16: def __init__(self,id):
17: """initialize a new instance"""
18: self.id = id
19:
20: meta_type="OSAS_ShowOnline"
1.8 dwinter 21:
22: security.declareProtected('View','index_html')
1.1 dwinter 23: def index_html(self):
24: """main view"""
1.5 dwinter 25: pt=PageTemplateFile('Products/OSA_system/OSAS_ViewFiles.zpt').__of__(self)
1.1 dwinter 26: return pt()
27:
28: def getfilesystem2(self,start,reload=0):
29: """load filesystem"""
30:
31: k=filesystem2(start,1)
32: return k
33:
34: def tree(self,start):
35: """get the filetree"""
36: k=browse(start)
37: return k
38:
39: def path_to_link_view(self,path):
40: """generates navigation bar for viewfiles"""
41: return path_to_link_view(self.REQUEST['URL'],path)
42:
43: def isdigilib2(self,path):
44: """check if digilib"""
45: return isdigilib2(path)
46:
47: def changeName(self,name):
48: return changeName(name)
49:
50: def hasMetafile(self,path):
51: return hasMetafile(path)
52:
53: def getMetafile(self,path):
54: return getMetafile(path)
55:
56: def toggle_view(self,path,file):
57: """Oeffnen bzw. schließen der Subfolders"""
58: self.tree(path).toggle(path,file)
59: return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1']+"?path="+path)
1.8 dwinter 60:
61: InitializeClass(OSAS_ShowOnline)
62:
1.1 dwinter 63: def manage_AddOSAS_ShowOnlineForm(self):
64: """interface for adding the OSAS_root"""
1.5 dwinter 65: pt=PageTemplateFile('Products/OSA_system/AddOSAS_ShowOnline.zpt').__of__(self)
1.1 dwinter 66: return pt()
67:
68: def manage_AddOSAS_ShowOnline(self,id,RESPONSE=None):
69: """add the OSAS_root"""
70: newObj=OSAS_ShowOnline(id)
71: self._setObject(id,newObj)
72: if RESPONSE is not None:
73: RESPONSE.redirect('manage_main')
74:
75:
1.2 dwinter 76: class OSAS_StoreOnline(SimpleItem):
77: """Webfrontend für das Storagesystem"""
1.8 dwinter 78: security=ClassSecurityInfo()
79:
1.2 dwinter 80: def __init__(self,id):
81: """initialize a new instance"""
82: self.id = id
83:
84: meta_type="OSAS_StoreOnline"
85:
1.8 dwinter 86: security.declareProtected('View','index_html')
1.2 dwinter 87: def index_html(self):
88: """main view"""
1.5 dwinter 89: pt=PageTemplateFile('Products/OSA_system/OSAS_StoreFiles.zpt').__of__(self)
1.2 dwinter 90: return pt()
91:
1.4 dwinter 92: def readContexts(self,path):
93: """Zeige Contexte"""
1.6 dwinter 94: if os.path.exists(path+"/index.meta"):
1.7 dwinter 95:
1.6 dwinter 96: return readContexts(path)
1.7 dwinter 97:
1.6 dwinter 98: else:
1.7 dwinter 99:
1.6 dwinter 100: return []
1.9 dwinter 101:
102: def rescaleThumbs(self,path):
1.14 ! dwinter 103: """rescale thumbs of images in path"""
1.9 dwinter 104:
1.11 dwinter 105: os.popen("ssh nausikaa2.rz-berlin.mpg.de /usr/local/mpiwg/scripts/scaleomat.pl %s /mpiwg/temp/online/scaled/thumb 90 --replace >> /tmp/sc.out &"% re.sub('/mpiwg/online/','',self.REQUEST['path']))
1.10 dwinter 106:
107: self.REQUEST.SESSION['path']=self.REQUEST['path']
108: #return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1'])
109: pt=PageTemplateFile('Products/OSA_system/OSAS_scaled.zpt').__of__(self)
110: return pt()
111:
112:
1.2 dwinter 113: def getfilesystem2(self,start,reload=0):
114: """load filesystem"""
115:
116: k=filesystem2(start,1)
117: return k
118:
119: def tree(self,start):
120: """get the filetree"""
121: k=browse(start)
122: return k
123:
124: def path_to_link_store(self,path):
125: """generates navigation bar for viewfiles"""
126: return path_to_link_store(self.REQUEST['URL'],path)
127:
128: def isdigilib2(self,path):
129: """check if digilib"""
130: return isdigilib2(path)
131:
132: def changeName(self,name):
133: return changeName(name)
134:
135: def hasMetafile(self,path):
136: return hasMetafile(path)
137:
138: def getMetafile(self,path):
139: return getMetafile(path)
140:
141: def toggle_view(self,path,file):
142: """Oeffnen bzw. schließen der Subfolders"""
143: self.tree(path).toggle(path,file)
144: return self.REQUEST.RESPONSE.redirect(self.REQUEST['URL1']+"?path="+path)
145:
146: def isFolder(self,path):
147: """Test ob Eintrag ein Folder ist"""
148: return isFolder(self,path)
149:
150: def isScannedDocument(self,path):
1.14 ! dwinter 151: """Test ob Eintrag ein Scanned Document ist"""
1.2 dwinter 152: return isScannedDocument(self,path)
153:
154: def isFullText(self,path,folder_name):
155: """Test ob Eintrag ein Folder ist"""
156: return isFullText(path,folder_name)
157:
1.14 ! dwinter 158: def date(self):
! 159: return strftime("%d.%m.%Y",localtime())
! 160:
1.2 dwinter 161: def addFolderForm(self,path):
162: """add a new path"""
1.5 dwinter 163: pt=PageTemplateFile('Products/OSA_system/OSAS_addFolder.zpt').__of__(self)
1.2 dwinter 164: return pt()
165:
1.14 ! dwinter 166:
1.2 dwinter 167: def addFolder(self,path,folder_name,description,archive_creation_date,creator):
168: """add the folder to the filesystem and write the metadata files"""
169: return addFolder.addFolder(self,path,folder_name,description,archive_creation_date,creator)
170:
171: def EditIndex(self,path):
172: """Editiere das Index Metafile"""
173: try:
174: dom=xml.dom.minidom.parse(path+"/index.meta")
175: indexmeta=dom.toxml()
176: except:
177: indexmeta=""
1.14 ! dwinter 178:
1.2 dwinter 179: self.REQUEST.SESSION['indexmeta']=indexmeta
180: self.REQUEST.SESSION['path']=path
1.5 dwinter 181: newtemplate=PageTemplateFile('Products/OSA_system/editindex').__of__(self)
1.2 dwinter 182: return newtemplate()
183:
184: def EditIndex2(self):
185: """Sichern der Aenderungen in Index.meta"""
186: if not self.REQUEST.has_key('fileupload'):
187: #newtext=urllib.unquote(self.REQUEST['indexmeta'])
188: newtext=self.REQUEST['indexmeta']
1.3 dwinter 189:
1.2 dwinter 190: else:
191: self.file_name=self.REQUEST['fileupload'].filename
192: #newtext=self.REQUEST.form['fileupload'].read()
193: # HACK DW
194: newtext=self.REQUEST['indexmeta']
195:
196: indexmeta=file(self.REQUEST.SESSION['path']+"/index.meta","w")
197: indexmeta.writelines(newtext)
198: return self.REQUEST.response.redirect(self.REQUEST['URL1']+"?path="+self.REQUEST.SESSION['path'])
199:
1.3 dwinter 200: def add_metafile(self):
201: """nothing"""
1.5 dwinter 202: pt=PageTemplateFile('Products/OSA_system/OSAS_addmetadata.zpt').__of__(self)
1.3 dwinter 203: return pt()
1.4 dwinter 204:
1.8 dwinter 205: InitializeClass(OSAS_StoreOnline)
1.2 dwinter 206:
207: def manage_AddOSAS_StoreOnlineForm(self):
208: """interface for adding the OSAS_root"""
1.5 dwinter 209: pt=PageTemplateFile('Products/OSA_system/AddOSAS_StoreOnline.zpt').__of__(self)
1.2 dwinter 210: return pt()
211:
212: def manage_AddOSAS_StoreOnline(self,id,RESPONSE=None):
213: """add the OSAS_root"""
214: newObj=OSAS_StoreOnline(id)
215: self._setObject(id,newObj)
216: if RESPONSE is not None:
217: RESPONSE.redirect('manage_main')
218:
1.1 dwinter 219:
220:
221:
1.4 dwinter 222: def readContexts(path):
223: """ReadContext from index.meta"""
224: dom=xml.dom.minidom.parse(path+"/index.meta")
225: nodes=dom.getElementsByTagName('context')
226: ret=[]
227:
1.7 dwinter 228:
1.4 dwinter 229: for node in nodes:
1.7 dwinter 230: try:
231: link=getText(node.getElementsByTagName('link')[0].childNodes)
232: name=getText(node.getElementsByTagName('name')[0].childNodes)
233: ret.append((link,name))
234: except:
235: """nothing"""
1.4 dwinter 236: return ret
237:
238:
1.1 dwinter 239: ### Ab hier Baustelle
240:
241:
242: from types import *
243: import urllib
244: import os
245: import sys
246: import re
247: from AccessControl import ClassSecurityInfo
248: from AccessControl.Role import RoleManager
249: from Acquisition import Implicit
250: from Globals import Persistent
251: from time import strptime
252: from time import strftime
253: import time
254: import os.path
255: import dircache
256: import xml.dom.minidom
257: from Products.PageTemplates.PageTemplateFile import PageTemplateFile
258: from Products.PageTemplates.PageTemplate import PageTemplate
259: import tempfile
260: tempfile.tempdir="/var/tmp/archiver"
261:
262: exclusion=[".HSResource","lost+found","Network Trash Folder","TheFindByContentFolder","TheVolumeSettingsFolder"]
263: class fsentry(Implicit, Persistent, RoleManager):
264: """File entry class"""
265: path = ""
266: user = ""
267: month = ""
268: date =""
269: time = ""
270:
271: security=ClassSecurityInfo()
272: def __init__(self,extpath):
273: """initialize class"""
274: extpath=os.path.abspath(re.search(r"(.*)\n",extpath).group(1))
275: self.all=extpath
276: self.path=extpath
277: self.user=""
278: self.mtime=os.path.getmtime(extpath)
279:
280:
281: security.declarePublic('getPath')
282: def getPath(self):
283: """Ausgabe von path"""
284: return self.path
285:
286: security.declarePublic('getUser')
287: def getUser(self):
288: """Ausgabe von user"""
289: return self.user
290:
291: security.declarePublic('getDate')
292: def getDate(self):
293: """Ausgabe von Date"""
294: return strftime("%Y%m%d%H%M",time.gmtime(self.mtime))
295:
296: security.declarePublic('getDate')
297: def getID(self):
298: """Ausgabe einer eindeutigen Sortierbaren ID"""
299: return self.getDate()+self.getPath()
300:
301: security.declarePublic('getTime')
302: def getTime(self):
303: """Ausgabe von path"""
304: return self.time
305: security.declarePublic('getAll')
306: def getAll(self):
307: """Ausgabe von path"""
308: return self.all
309:
310: class filesystem(Implicit, Persistent, RoleManager):
311: """store filesystem"""
312: node={}
313: hasindex={}
314: security=ClassSecurityInfo()
315:
316: def getfs(self,start):
317: """load filessystem"""
318: f = os.popen("find "+ start+" -name '*' ","r")
319: lines = f.readlines()
320:
321: return lines
322:
323: def loadfs(self,start):
324: """analyse filesystem"""
325: for line in self.getfs(start):
326:
327: g=re.search(r"(.*/)(.*)\n",line)
328: if not g==None:
329: path=g.group(1)
330: file=g.group(2)
331: if self.node.has_key(path):
332: elements=self.node[path]
333: elements.append(file)
334: self.node[path]=elements
335: else:
336: self.node[path]=[file]
337: if (file=="index.meta") | (file=="meta"):
338: self.hasindex[path]="1"
339:
340: def __init__(self,start,reload=0):
341: if reload==1:
342: self.node={}
343: self.hasindex={}
344: self.loadfs(start)
345:
346:
347: security.declarePublic('getNode')
348: def getNode(self):
349: return self.node
350:
351: security.declarePublic('getKeys')
352: def getKeys(self):
353: return self.node.keys()
354:
355: security.declarePublic('clearnode')
356: def clearnode(self):
357: self.node={}
358: return 0
359:
360: security.declarePublic('hasIndex')
361: def hasIndex(self,path):
362:
363: return self.hasindex.has_key(path)
364:
365:
366: def onlyIndex_old(self):
367: """return only files with archive material"""
368: j={}
369: for k in self.node:
370: if self.hasindex.has_key(k):
371: if len(self.node[k])>1:
372: if (len(self.node[k])==2) & ('meta' not in self.node[k]):
373: j[k]=self.node[k]
374: elif (len(self.node[k])==2) & ('meta' in self.node[k]):
375: """ nothing """
376: else:
377: j[k]=self.node[k]
378: return j
379:
380: def archive_the_path(self,path):
381: """parse indexmeta and return digilib path"""
1.13 dwinter 382:
1.1 dwinter 383: try:
384: #f = os.popen("cat "+path+"/index.meta","r")
385: f =file(path+"/index.meta","r")
386:
387: lines = f.read()
1.13 dwinter 388:
1.1 dwinter 389: try:
390: dom = xml.dom.minidom.parseString(lines)
1.13 dwinter 391: if dom.getElementsByTagName("content-type"):
392: if getText(dom.getElementsByTagName("content-type")[0].childNodes)=="folder":
393: """folder nicht archivieren"""
394: return 0
395:
396: archive_storage_date=getText(dom.getElementsByTagName("archive-storage-date")[0].childNodes)
397:
398: if archive_storage_date=="":
399:
400: """leer also archivieren"""
401: return 1
1.1 dwinter 402: else:
1.13 dwinter 403: """nicht archivieren"""
404: return 0
1.1 dwinter 405: except:
406: """kein tag also archivieren"""
407: return 1
408: except:
409: """kein index.meta also nicht archivieren"""
410: return 0
411:
412: security.declarePublic('onlyIndex')
413: def onlyIndex(self):
414: """return only files with archive material (archive-storage-date not set)"""
415: j={}
416:
417: for k in self.node:
418: if self.archive_the_path(k):
419: j[k]=self.node[k]
420: return j
421: security.declarePublic('getImageDirs')
422: def getImageDirs(self,dom,path):
423: dirs=dom.getElementsByTagName("dir")
424: dirback=[]
425: for dir in dirs:
426: temp=getText(dir.getElementsByTagName("name")[0].childNodes)
427: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path+"/"+temp)
428: if not temp2==None:
429: try:
430: dirback.append(temp2.group(2))
431: except:
432: """nothing"""
433: else:
434: dirback.append(temp)
435: return dirback
436:
437:
438:
439:
440: security.declarePublic('digilib')
441: def digilib(self, path):
442: """check if folder is a container for digilib files"""
443: if self.hasindex.has_key(path+"/"):
444: return(self.parseIndexMeta(path))
445: else:
446: return "NO"
447:
448:
449:
450:
451: security.declarePublic('isdigilib')
452: def isdigilib(self, path):
453: """return number of possible image directories usefull for digilib"""
454: if self.hasindex.has_key(path+"/"):
455: return(len(self.parseIndexMeta(path)))
456: else:
457: return 0
458:
459: security.declarePublic('parseIndexMeta')
460: def parseIndexMeta(self,k):
461: """parse indexmeta and return digilib path"""
462: f = os.popen("cat "+k+"/index.meta","r")
463: lines = f.read()
464:
465: try:
466: dom = xml.dom.minidom.parseString(lines)
467: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
468: if (content_type=="scanned-document") or (content_type=="scanned document"):
469: dirs=self.getImageDirs(dom,k)
470:
471: return dirs
472: except:
473: return []
474:
475:
476: class filesystem2(Implicit, Persistent, RoleManager):
477: """store filesystem"""
478: node={}
479: hasindex={}
480: security=ClassSecurityInfo()
481:
482: def getfs(self,start):
483: """load filessystem"""
1.13 dwinter 484:
1.1 dwinter 485: f = os.popen("find "+ start+" -name '*' ","r")
486: lines = f.readlines()
487:
488: return lines
489:
490: def loadfs(self,start):
491: """analyse filesystem"""
492: for line in self.getfs(start):
493:
494: g=re.search(r"(.*/)(.*)\n",line)
495: if not g==None:
496: try:
497: path=g.group(1)
498: file=g.group(2)
499: except:
500: """nothing"""
501: if self.node.has_key(path):
502: elements=self.node[path]
503: elements.append(file)
504: self.node[path]=elements
505: else:
506: self.node[path]=[file]
507: if (file=="index.meta") | (file=="meta"):
508: self.hasindex[path]="1"
509:
510: def __init__(self,start,reload=0):
511: """nothing"""
512:
513:
514: security.declarePublic('getImageDirs')
515: def getImageDirs(self,dom,path):
516: dirs=dom.getElementsByTagName("dir")
517: dirback=[]
518: for dir in dirs:
519: temp=getText(dir.getElementsByTagName("name")[0].childNodes)
520: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path+"/"+temp)
521: if not temp2==None:
522: try:
523: dirback.append(temp2.group(2))
524: except:
525: """nothing"""
526: else:
527: dirback.append(temp)
528: return dirback
529:
530:
531: security.declarePublic('digilib')
532: def digilib(self, path):
533: """check if folder is a container for digilib files"""
534: if os.path.exists(path+"/index.meta"):
535: return(self.parseIndexMeta(path))
536: else:
537: return "NO"
538:
539: security.declarePublic('isdigilib')
540: def isdigilib(self, path):
541: if os.path.exists(path+"/index.meta"):
542: return(len(self.parseIndexMeta(path)))
543: else:
544: return 0
545: security.declarePublic('parseIndexMeta')
546: def parseIndexMeta(self,k):
547: """parse indexmeta and return digilib path"""
548: f = os.popen("cat "+k+"/index.meta","r")
549: lines = f.read()
550:
551: try:
552: dom = xml.dom.minidom.parseString(lines)
553: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
554: if content_type=="scanned-document":
555: dirs=self.getImageDirs(dom,k)
556:
557: return dirs
558: except:
559: return []
560:
561:
562: class browse(Implicit, Persistent, RoleManager):
563:
564: security=ClassSecurityInfo()
565: tree={}
566: toggledict={}
567:
568: def filterExcluded(self,dir):
569: ret=[]
570: for item in dir:
571: if not item in exclusion:
572: ret.append(item)
573: return ret
574:
575: def __init__(self,startpath):
576: self.tree={}
577: self.tree[startpath]=self.filterExcluded(dircache.listdir(startpath))
578:
579: security.declarePublic('getTree')
580: def getTree(self,path):
581: if self.tree.has_key(path):
582: return self.tree[path]
583: else:
584: self.tree[path]=self.filterExcluded(dircache.listdir(path))
585: return self.tree[path]
586:
587: security.declarePublic('isDirectory')
588: def isDirectory(self,path,file):
589: return os.path.isdir(os.path.abspath(path+"/"+file))
590:
591: security.declarePublic('toggle')
592: def toggle(self,tmppath,file):
593: path=tmppath+"/"+file
594:
595: if self.toggledict.has_key(path):
596: if self.toggledict[path]==0:
597: self.toggledict[path]=1
598:
599: else:
600: self.toggledict[path]=0
601:
602: else:
603: self.toggledict[path]=4
604:
605:
606: security.declarePublic('isToggle')
607: def isToggle(self,tmppath,file):
608: path=tmppath+"/"+file
609:
610: if self.toggledict.has_key(path):
611:
612: return self.toggledict[path]
613: else:
614:
615: return 0
616:
617:
618: def getfs(start):
619: """return filesystem"""
1.13 dwinter 620:
621:
1.1 dwinter 622: f = os.popen("find "+ start+" -name '*'","r")
623: lines = f.readlines()
624: return lines
625:
626: def showall(start):
627: lines = getfs(start)
628: for line in lines:
629: print line
630: return 0
631:
632: def entries(start):
633: """retrun list of entries of a filesystem"""
634: i=0
635: fs=[]
636: lines=getfs(start)
637: for line in lines:
638: try:
639: if os.path.exists(os.path.abspath(re.search(r"(.*)\n",line).group(1))):
640: fs.append(fsentry(line))
641: i=i+1
642: except:
643: """nothing"""
644: return fs
645:
646: def getfilesystem(start,reload=0):
647: """load filesystem"""
648:
649: k=filesystem(start,1)
650: return k
651:
652:
653:
654: def sort_by_date(fs):
655: """sorts lists of fileentries"""
656: ls=[]
657: dict={}
658: for k in fs:
659: ls.append(k.getID())
660: dict[k.getID()]=k
661: ls.sort()
662: ls.reverse()
663: ret=[]
664: for j in ls:
665: ret.append(dict[j])
666: return ret
667:
668: def path_to_link(path):
669: """generates navigation bar for showfiles"""
670: string=""
671:
672: tmppath=os.path.dirname(path)
673: i=0
674: pathes=[[path, os.path.basename(path)]]
675:
676: while not (len(tmppath)==1):
677:
678: i=i+1
679: if i>20: break
680:
681: pathes.append([tmppath, os.path.basename(tmppath)])
682: tmppath=os.path.dirname(tmppath)
683:
684: while i>=0:
685: string=string+"<a href=showfiles?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
686:
687: i=i-1
688: return string
689:
690: def path_to_link_view(URL,path):
691: """generates navigation bar for viewfiles"""
692: string=""
693:
694: tmppath=os.path.dirname(path)
695: i=0
696: pathes=[[path, os.path.basename(path)]]
697:
698: while not (len(tmppath)==1):
699:
700: i=i+1
701: if i>20: break
702:
703: pathes.append([tmppath, os.path.basename(tmppath)])
704: tmppath=os.path.dirname(tmppath)
705:
706: while i>=0:
707: string=string+"<a href="+URL+"?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
708:
709: i=i-1
710: return string
711:
1.2 dwinter 712: def path_to_link_store(URL,path):
1.1 dwinter 713: """generates navigation bar for viewfiles"""
714: string=""
715:
716: tmppath=os.path.dirname(path)
717: i=0
718: pathes=[[path, os.path.basename(path)]]
719:
720: while not (len(tmppath)==1):
721:
722: i=i+1
723: if i>20: break
724:
725: pathes.append([tmppath, os.path.basename(tmppath)])
726: tmppath=os.path.dirname(tmppath)
727:
728: while i>=0:
1.2 dwinter 729: string=string+"<a href="+URL+"?path="+pathes[i][0]+">"+pathes[i][1]+"</a>/"
1.1 dwinter 730:
731: i=i-1
732: return string
733:
734:
735: class Error(Implicit, Persistent, RoleManager):
736:
737: error=[]
738: security=ClassSecurityInfo()
739: def __init__(self,initerror):
740: self.error=initerror[0:]
741:
742: security.declarePublic('getError')
743: def getError(self):
744: return self.error
745:
746: class metacheck(Implicit, Persistent, RoleManager):
747: lines=[]
748: security=ClassSecurityInfo()
749: def parsearchive(self,str):
750: """parse for error"""
751: retstr=''
752:
753: if not len(str)==0:
754: for line in str:
755: retstr=retstr+line+"<br>"
756: check=re.search(r"(.*):(.*)",line)
757: if check.group(1)=='ABORT':
758: error="error"
759: elif check.group(1)=='DONE':
760: error="ok"
761: else:
762: error="running"
763:
764: return [retstr,error]
765: else:
766: return ['','running']
767: def __init__(self,path):
768: """archive the documents in path"""
769: self.lines=[]
770:
771: if type(path)==StringType:
772: f = os.popen("/usr/local/mpiwg/archive/metacheck "+path,"r")
773: self.lines.append(Error([path,self.parsearchive(f.readlines())]))
774: else:
775: for singlepath in path:
776: f = os.popen("/usr/local/mpiwg/archive/metacheck "+singlepath,"r")
777: self.lines.append(Error([singlepath,self.parsearchive(f.readlines())]))
778: security.declarePublic('messages')
779:
780: def messages(self):
781: return self.lines
782:
783:
784:
785:
786: class archive(Implicit, Persistent, RoleManager):
787: lines=[]
788: security=ClassSecurityInfo()
789: def parsearchive(self,str):
790: """parse for error"""
791: retstr=''
792:
793: if not len(str)==0:
794: for line in str:
795: retstr=retstr+line+"<br>"
796: check=re.search(r"(.*):(.*)",line)
797: if check.group(1)=='ABORT':
798: error="error"
799: elif check.group(1)=='DONE':
800: error="ok"
801: else:
802: error="running"
803:
804: return [retstr,error]
805: else:
806: return ['','running']
807:
808: def __init__(self,path,session):
809: """archive the documents in path"""
810: self.lines=[]
811: self.filenames={}
812: session['archiver']=self
813:
814:
815: if type(path)==StringType:
816: self.filenames[path]=tempfile.mktemp()
817: f = os.popen("/usr/local/mpiwg/archive/archiver "+path+" > "+self.filenames[path]+" &","r")
818: else:
819: for singlepath in path:
820: self.filenames[singlepath]=tempfile.mktemp()
821: f = os.popen("/usr/local/mpiwg/archive/archiver "+singlepath+" > "+self.filenames[singlepath]+" &","r")
822:
823: security.declarePublic('messages')
824: def messages(self):
825: self.lines=[]
826: for path in self.filenames.keys():
827:
828: self.lines.append(Error([path,self.parsearchive(open(self.filenames[path],"r").readlines())]))
829: return self.lines
830:
831:
832: def evalext(str):
833: return eval(str)
834:
835: def storeerror(ret,path,context,i):
836: session=context.REQUEST.SESSION
837: session['error%i'%i]=ret
838: session['path%i'%i]=path
839:
840: return 'error?number=%i'%i
841:
842: def geterror(str,context):
843: session=context.REQUEST.SESSION
844: return session[str]
845:
846: def readfile(path):
847:
848: ret=""
849: f=open(path,'r')
850: for g in f.readlines():
851: ret=ret+g
852: return ret
853:
854: def writefile(self,path,txt,REQUEST):
855: f=open(path,'w')
856: f.write(txt)
857: f.close()
858: rval=self.aq_acquire('archive2')
859: return rval()
860:
861:
862: def metachecker(self,path):
863: """check the metadata the documents in path"""
864: self.REQUEST.SESSION['path']=self.REQUEST['path']
865: return metacheck(path)
866:
867: def archiver(self,path):
868: """archive the documents in path"""
869: tmp=archive(path,self.REQUEST.SESSION)
870: return self.REQUEST.RESPONSE.redirect('archive4')
871:
872: def getText(nodelist):
873:
874: rc = ""
875: for node in nodelist:
876: if node.nodeType == node.TEXT_NODE:
877: rc = rc + node.data
878: return rc
879:
880: def getBib(nodelist):
881: rc= "<table border='0'>"
1.3 dwinter 882:
1.1 dwinter 883: for node in nodelist:
884:
885: if node.nodeType == node.ELEMENT_NODE:
886: """nothing"""
887: rc = rc+"<tr><td valign='right'>"+str(node.nodeName)+":</td><td> "+getText(node.childNodes)+"</td></tr>"
888: #print rc
889: return rc+"</table>"
890:
891: def getMetafile(path):
1.14 ! dwinter 892: """get index.meta and translate it to an HTML"""
1.1 dwinter 893: html=[]
894: if not os.path.exists(path+"/index.meta"):
895:
896: return "NO_METADATA"
897: else:
898: f = os.popen("cat "+path+"/index.meta","r")
899: lines = f.read()
900: dom = xml.dom.minidom.parseString(lines)
1.12 dwinter 901: try:
902: name=getText(dom.getElementsByTagName("name")[0].childNodes)
903: except:
904: name="NOT_DEFINED!!!"
905: try:
906: creator=getText(dom.getElementsByTagName("creator")[0].childNodes)
907: except:
908: creator="NOT_DEFINED!!!"
909:
910: try:
911: creation_date=getText(dom.getElementsByTagName("archive-creation-date")[0].childNodes)
912: except:
913: creation_date="NOT_DEFINED!!!"
914:
915: try:
916: description=getText(dom.getElementsByTagName("description")[0].childNodes)
917: except:
918: description="NOT_DEFINED!!!"
919:
1.1 dwinter 920: try:
921: type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
922: except:
923: type=""
924: if type=="scanned document":
925: html="<h3>Document: "+name+"</h3>"
926: elif type=="folder":
927: html="<h3>Folder: "+name+"</h3>"
928: else:
929: html="<h3>Document: "+name+"</h3>"
930:
931: html=html+"<p><i>created by: "+creator+" at: "+creation_date+"</i></p>"
932: html=html+"<h4>Description</h4><p>"+description+"</p>"
933: try:
934: bib = dom.getElementsByTagName("meta")[0].getElementsByTagName("bib")[0]
935: if bib.attributes.has_key('type'):
936: html=html+"<h4>Info ("+bib.attributes['type'].value+")</h4>"
937: else:
938: html=html+"<h4>Info</h4>"
939: html=html+getBib(bib.childNodes)
1.3 dwinter 940:
1.1 dwinter 941: except:
942: """none"""
943:
944: # html=html.encode('utf-8','replace')+getBib(bib.childNodes).encode('utf-8','replace')
945:
946: return html
947:
948: def hasMetafile(path):
949: """get index.meta"""
950: return os.path.exists(path+"/index.meta")
951: #return path
952:
953: def isdigilib2(path):
954: """check if folder is candidate for digilib without metadata"""
955: try:
956: dir=os.listdir(path)
957:
958: imagesuffixes=['.gif','.jpg','.jpeg','.png','.tiff','.tif','.JPG','.TIFF','.TIF']
959: ret=""
960: for a in dir:
961:
962: suffix=os.path.splitext(a)
963:
964: if suffix[1] in imagesuffixes:
965: return 1
966:
967: try:
968: dom=xml.dom.minidom.parse(os.path.split(path)[0]+"/index.meta")
969: for node in dom.getElementsByTagName("dir"):
970:
971: if getText(node.getElementsByTagName("content-type")[0].childNodes)=="images":
972:
973: if getText(node.getElementsByTagName("name")[0].childNodes)==os.path.split(path)[1]:
974: return 1
975: return 0
976: except:
977:
978: return 0
979:
980:
981:
982:
983:
984: except:
985: return 0
986:
987: def isFullText(path,folder_name):
988: """check if foldername in path is full text"""
989: try:
990: dom=xml.dom.minidom.parse(path+"/index.meta")
991: for node in dom.getElementsByTagName("dir"):
992:
993: if getText(node.getElementsByTagName("content-type")[0].childNodes)=="fulltext":
994:
995: if getText(node.getElementsByTagName("name")[0].childNodes)==folder_name:
996: return 1
997: return 0
998: except:
999:
1000: return 0
1001:
1002:
1003: def isPresentation(path,folder_name):
1004: """check if foldername in path is full text"""
1005: try:
1006: dom=xml.dom.minidom.parse(path+"/index.meta")
1007: #print dom.toxml()
1008: for dirnode in dom.getElementsByTagName("dir"):
1009: try:
1010:
1011: if getText(dirnode.getElementsByTagName('content-type')[0].childNodes)=='presentation':
1012: if getText(dirnode.getElementsByTagName("name")[0].childNodes)==folder_name:
1013: return 1
1014: except:
1015: """nothing"""
1016: return 0
1017: except:
1018:
1019: return 0
1020:
1021:
1022:
1023:
1024:
1025: def changeName(path):
1026: try:
1027: temp2=re.search(r"(.*)/mpiwg/online/(.*)",path)
1028: if temp2==None:
1029: return "digifiles/"+re.search(r"(.*)/mpiwg/production/docuserver/(.*)",path).group(2)
1030: else:
1031: return temp2.group(2)
1032: except: # hack - im archivbereich keine online darstellung gibt jetzt ein no zurück.
1033: return "NO"
1034:
1035:
1036: def test(self):
1037: self.i=1
1038: #newtemplate=PageTemplateFile('/usr/local/mpiwg/Zope/Extensions/test').__of__(self)
1039: self.manage_addProduct['OFSP'].manage_addDTMLMethod('neu','neu')
1040: self.getattr('neu').manage_edit('HELLO','neu')
1041: return "ok"
1042:
1043:
1044: class ls(Implicit, Persistent, RoleManager):
1045: """File entry class"""
1046: path = ""
1047: user = ""
1048: month = ""
1049: date =""
1050: time = ""
1051:
1052: security=ClassSecurityInfo()
1053:
1054: def __init__(self,start):
1055: self.outfile=tempfile.mktemp()
1056: start['outfile']=self
1057: os.popen("ls -R / >"+self.outfile+" &","r")
1058:
1059:
1060: security.declarePublic('read')
1061: def read(self):
1062: return self.f.read()
1063: security.declarePublic('retself')
1064: def retself(self):
1065: return self
1066: security.declarePublic('all')
1067: def all(self):
1068: ret=""
1069: for g in self.f:
1070: ret=ret+g
1071: return ret
1072:
1073: security.declarePublic('printOutfile')
1074: def printOutfile(self):
1075: while not os.path.exists(self.outfile):
1076: """nothing"""
1077: return open(self.outfile).readlines()
1078:
1079: class overview(Implicit,Persistent, RoleManager):
1080: dir=[]
1081: resources={}
1082: security=ClassSecurityInfo()
1083:
1084: def __init__(self,path):
1085: dir=os.listdir(path)
1086:
1087: for file in dir:
1088: self.resources[self.getResource(path,file)]=path+"/"+file
1089:
1090:
1091: def getResource(self,path,filename):
1092: f=file(path+"/"+filename,'r')
1093:
1094: for line in f.readlines():
1095:
1096: if line[0:4]=="INFO":
1097: if line[6:14]=="resource":
1098: return line
1099: return "error"
1100:
1101: def parsearchive(self,str):
1102: """parse for error"""
1103: retstr=''
1104:
1105: if not len(str)==0:
1106: for line in str:
1107: retstr=retstr+line+"<br>"
1108: check=re.search(r"(.*):(.*)",line)
1109: if check.group(1)=='ABORT':
1110: error="error"
1111: elif check.group(1)=='DONE':
1112: error="ok"
1113: else:
1114: error="running"
1115:
1116: return [retstr,error]
1117: else:
1118: return ['','running']
1119:
1120: security.declarePublic('messages')
1121: def messages(self):
1122: self.lines=[]
1123: for name in self.resources.keys():
1124: path=self.resources[name]
1125:
1126: self.lines.append(Error([name,self.parsearchive(open(path,"r").readlines())]))
1127: return self.lines
1128:
1129: security.declarePublic('printResource')
1130: def printResource(self):
1131: return self.resources
1132:
1133: def getoverview(path):
1134:
1135: return overview(path)
1136:
1137:
1138: def ls_test(self):
1139: tmp=ls(self.REQUEST.SESSION)
1140: return self.REQUEST.RESPONSE.redirect('next')
1141:
1142: def storeFile(self,something):
1143: self.REQUEST.SESSION['something']=something
1144: return 1
1145:
1146: def getFile(self):
1147: return self.REQUEST.SESSION['something']
1148:
1149: def isFolder(self,path):
1150: """returns TRUE, wenn path ein Folder ist in den weitere Objekte Folder oder Dokumente gelegt werden dürfen"""
1151: return not isScannedDocument(self,path) # vorläufig sind alle Documente die keine scanned documente sind folder.
1152:
1153: def isScannedDocument(self,path):
1154: """returns TRUE, wenn path der Stammordner eines gescannten Documents ist"""
1155: try:
1156: f = file(path+"/index.meta","r")
1157: lines = f.read()
1158:
1159: try:
1160: dom = xml.dom.minidom.parseString(lines)
1161: content_type=getText(dom.getElementsByTagName("content-type")[0].childNodes)
1162: if (content_type=="scanned-document") or (content_type=="scanned document"):
1163: return 1
1164: else:
1165: return 0
1166: except:
1167: return 0
1168: except:
1169: return 0
1170:
1171: from time import localtime,strftime
1172:
1173: def date(self):
1174: return strftime("%d.%m.%Y",localtime())
1175:
1176:
1177:
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>