# HG changeset patch
# User robcast
# Date 1097590003 -7200
# Node ID 03d63e8eae0561aac7de05314524b295b583e8d2
# Parent cfd5899eb2f586f122ee80d2181016ca17b395da
Servlet version 1.21b3
- searching in directories got faster (real binarySearch now!)
- cached file lists get disposed
- some code cleaning (Map types instead of HashMap)
diff -r cfd5899eb2f5 -r 03d63e8eae05 servlet/src/digilib/io/Directory.java
--- a/servlet/src/digilib/io/Directory.java Tue Oct 12 16:06:43 2004 +0200
+++ b/servlet/src/digilib/io/Directory.java Tue Oct 12 16:06:43 2004 +0200
@@ -125,10 +125,15 @@
public String[] getFilenames() {
return list;
}
+
/**
* @param filenames The filenames to set.
*/
public void setFilenames(String[] filenames) {
this.list = filenames;
}
+
+ public void clearFilenames() {
+ this.list = null;
+ }
}
diff -r cfd5899eb2f5 -r 03d63e8eae05 servlet/src/digilib/io/DocuDirCache.java
--- a/servlet/src/digilib/io/DocuDirCache.java Tue Oct 12 16:06:43 2004 +0200
+++ b/servlet/src/digilib/io/DocuDirCache.java Tue Oct 12 16:06:43 2004 +0200
@@ -27,6 +27,7 @@
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
import org.apache.log4j.Logger;
@@ -38,21 +39,23 @@
public class DocuDirCache {
/** general logger for this class */
- protected Logger logger = Logger.getLogger(this.getClass());
+ Logger logger = Logger.getLogger(this.getClass());
/** HashMap of directories */
- protected HashMap map = null;
+ Map map = null;
/** names of base directories */
- private String[] baseDirNames = null;
+ String[] baseDirNames = null;
/** array of allowed file classes (image/text) */
private int[] fileClasses = null;
/** number of files in the whole cache (approximate) */
- private long numFiles = 0;
+ long numFiles = 0;
/** number of cache hits */
- private long hits = 0;
+ long hits = 0;
/** number of cache misses */
- private long misses = 0;
+ long misses = 0;
/** use safe (but slow) indexing */
boolean safeDirIndex = false;
+ /** the root directory element */
+ public static Directory ROOT = null;
/**
* Constructor with array of base directory names and file classes.
@@ -111,8 +114,8 @@
*/
public void putDir(DocuDirectory newDir) {
put(newDir);
- String parent = newDir.getParentDirName();
- if (parent != null) {
+ String parent = FileOps.parent(newDir.getDirName());
+ if (parent != "") {
// check the parent in the cache
DocuDirectory pd = (DocuDirectory) map.get(parent);
if (pd == null) {
@@ -146,7 +149,7 @@
l.add(dd);
}
} else {
- if (dd.getParentDirName().equals(dirname)) {
+ if (FileOps.parent(dd.getDirName()).equals(dirname)) {
l.add(dd);
}
}
diff -r cfd5899eb2f5 -r 03d63e8eae05 servlet/src/digilib/io/DocuDirectory.java
--- a/servlet/src/digilib/io/DocuDirectory.java Tue Oct 12 16:06:43 2004 +0200
+++ b/servlet/src/digilib/io/DocuDirectory.java Tue Oct 12 16:06:43 2004 +0200
@@ -1,20 +1,20 @@
/* DocuDirectory -- Directory of DocuFilesets.
- Digital Image Library servlet components
+ Digital Image Library servlet components
- Copyright (C) 2003 Robert Casties (robcast@mail.berlios.de)
+ Copyright (C) 2003 Robert Casties (robcast@mail.berlios.de)
- This program is free software; you can redistribute it and/or modify it
- under the terms of the GNU General Public License as published by the
- Free Software Foundation; either version 2 of the License, or (at your
- option) any later version.
-
- Please read license.txt for the full details. A copy of the GPL
- may be found at http://www.gnu.org/copyleft/lgpl.html
+ This program is free software; you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by the
+ Free Software Foundation; either version 2 of the License, or (at your
+ option) any later version.
+
+ Please read license.txt for the full details. A copy of the GPL
+ may be found at http://www.gnu.org/copyleft/lgpl.html
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
* Created on 25.02.2003
*/
@@ -25,7 +25,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.HashMap;
+import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -37,34 +37,45 @@
*/
public class DocuDirectory extends Directory {
- // list of files (DocuDirent)
+ /** list of files (DocuDirent) */
private ArrayList[] list = null;
- // directory object is valid (exists on disk)
+
+ /** directory object is valid (exists on disk) */
private boolean isValid = false;
- // reference of the parent DocuDirCache
+
+ /** reference of the parent DocuDirCache */
private DocuDirCache cache = null;
- // directory name (digilib canonical form)
+
+ /** directory name (digilib canonical form) */
private String dirName = null;
- // directory metadata
- private HashMap dirMeta = null;
- // state of metadata is valid
+
+ /** directory metadata */
+ private Map dirMeta = null;
+
+ /** state of metadata is valid */
private boolean metaChecked = false;
- // unresolved file metadata
- private HashMap unresolvedFileMeta = null;
- // time of last access of this object (not the filesystem)
+
+ /** unresolved file metadata */
+ private Map unresolvedFileMeta = null;
+
+ /** time of last access of this object (not the filesystem) */
private long objectATime = 0;
- // time the file system directory was last modified
+
+ /** time directory was last modified on the file system */
private long dirMTime = 0;
- /** Constructor with digilib directory path and a parent DocuDirCache.
+ /**
+ * Constructor with digilib directory path and a parent DocuDirCache.
*
- * Directory names at the given path are appended to the base directories
- * from the cache. The directory is checked on disk and isValid is set.
+ * Directory names at the given path are appended to the base directories
+ * from the cache. The directory is checked on disk and isValid is set.
*
* @see readDir
- *
- * @param path digilib directory path name
- * @param cache parent DocuDirCache
+ *
+ * @param path
+ * digilib directory path name
+ * @param cache
+ * parent DocuDirCache
*/
public DocuDirectory(String path, DocuDirCache cache) {
this.dirName = path;
@@ -73,8 +84,9 @@
checkDir();
}
- /** Sets and checks the dir object.
- *
+ /**
+ * Sets and checks the dir object.
+ *
*/
protected void initDir() {
String baseDirName = cache.getBaseDirNames()[0];
@@ -86,35 +98,26 @@
dir = new File(baseDirName, dirName);
}
- /** The digilib name of the parent directory.
- *
- * Returns null if there is no parent.
+ /**
+ * number of DocuFiles in this directory.
+ *
*/
- public String getParentDirName() {
- String s = null;
- int lastidx = dirName.lastIndexOf("/");
- if (lastidx > 0) {
- s = dirName.substring(0, lastidx);
- }
- return s;
+ public int size() {
+ return ((list != null) && (list[0] != null)) ? list[0].size() : 0;
}
- /** number of DocuFiles in this directory.
+ /**
+ * number of files of this class in this directory.
*
+ * @param fc
+ * fileClass
*/
- public int size() {
- return ((list != null)&&(list[0] != null)) ? list[0].size() : 0;
+ public int size(int fc) {
+ return ((list != null) && (list[fc] != null)) ? list[fc].size() : 0;
}
- /** number of files of this class in this directory.
- *
- * @param fc fileClass
- */
- public int size(int fc) {
- return ((list != null)&&(list[fc] != null)) ? list[fc].size() : 0;
- }
-
- /** Returns the ImageFile at the index.
+ /**
+ * Returns the ImageFile at the index.
*
* @param index
* @return
@@ -126,10 +129,12 @@
return (ImageFileset) list[0].get(index);
}
- /** Returns the file of the class at the index.
+ /**
+ * Returns the file of the class at the index.
*
* @param index
- * @param fc fileClass
+ * @param fc
+ * fileClass
* @return
*/
public DocuDirent get(int index, int fc) {
@@ -139,7 +144,8 @@
return (DocuDirent) list[fc].get(index);
}
- /** Checks if the directory exists on the filesystem.
+ /**
+ * Checks if the directory exists on the filesystem.
*
* Sets isValid.
*
@@ -153,42 +159,26 @@
return isValid;
}
- /** Read the filesystem directory and fill this object.
+ /**
+ * Read the filesystem directory and fill this object.
*
* Clears the List and (re)reads all files.
*
* @return boolean the directory exists
*/
public boolean readDir() {
- // list of base dirs from the parent cache
- String[] baseDirNames = cache.getBaseDirNames();
- // first file extension to try for scaled directories
- String scalext = null;
- // number of base dirs
- int nb = baseDirNames.length;
- // array of base dirs
- Directory[] dirs = new Directory[nb];
// check directory first
checkDir();
if (!isValid) {
return false;
}
- // first entry is this directory
- dirs[0] = this;
- // fill array with the remaining directories
- for (int j = 1; j < nb; j++) {
- File d = new File(baseDirNames[j], dirName);
- if (d.isDirectory()) {
- dirs[j] = new Directory(d);
- dirs[j].readDir();
- }
- }
-
+ // first file extension to try for scaled directories
+ String scalext = null;
// read all filenames
- logger.debug("reading directory "+dir.getPath());
+ logger.debug("reading directory " + dir.getPath());
/*
- * using ReadableFileFilter is safer (we won't get directories
- * with file extensions) but slower.
+ * using ReadableFileFilter is safer (we won't get directories with file
+ * extensions) but slower.
*/
File[] allFiles = null;
if (cache.safeDirIndex) {
@@ -201,36 +191,67 @@
// not a directory
return false;
}
+ // list of base dirs from the parent cache
+ String[] baseDirNames = cache.getBaseDirNames();
+ // number of base dirs
+ int nb = baseDirNames.length;
+ // array of base dirs
+ Directory[] dirs = new Directory[nb];
+ // first entry is this directory
+ dirs[0] = this;
+ // fill array with the remaining directories
+ for (int j = 1; j < nb; j++) {
+ File d = new File(baseDirNames[j], dirName);
+ if (d.isDirectory()) {
+ dirs[j] = new Directory(d);
+ dirs[j].readDir();
+ }
+ }
+
// go through all file classes
- for (int nc = 0; nc < FileOps.NUM_CLASSES; nc++) {
- int fc = cache.getFileClasses()[nc];
- //logger.debug("filtering directory "+dir.getPath()+" for class "+fc);
- File[] fl = FileOps.listFiles(allFiles, FileOps.filterForClass(fc));
- //logger.debug(" done");
+ for (int classIdx = 0; classIdx < FileOps.NUM_CLASSES; classIdx++) {
+ int fileClass = cache.getFileClasses()[classIdx];
+ //logger.debug("filtering directory "+dir.getPath()+" for class
+ // "+fc);
+ File[] fileList = FileOps.listFiles(allFiles, FileOps
+ .filterForClass(fileClass));
+ //logger.debug(" done");
// number of files in the directory
- int nf = fl.length;
- if (nf > 0) {
+ int numFiles = fileList.length;
+ if (numFiles > 0) {
// create new list
- list[fc] = new ArrayList(nf);
+ list[fileClass] = new ArrayList(numFiles);
// sort the file names alphabetically and iterate the list
- Arrays.sort(fl);
+ Arrays.sort(fileList);
Map hints = FileOps.newHints(FileOps.HINT_BASEDIRS, dirs);
hints.put(FileOps.HINT_FILEEXT, scalext);
- for (int i = 0; i < nf; i++) {
- DocuDirent f = FileOps.fileForClass(fc, fl[i], hints);
+ for (int i = 0; i < numFiles; i++) {
+ DocuDirent f = FileOps.fileForClass(fileClass, fileList[i],
+ hints);
// add the file to our list
- list[fc].add(f);
+ list[fileClass].add(f);
f.setParent(this);
}
}
}
+ // clear the scaled directories
+ for (int j = 1; j < nb; j++) {
+ if (dirs[j] != null) {
+ dirs[j].clearFilenames();
+ }
+ }
+ // update number of cached files if this was the first time
+ if (dirMTime == 0) {
+ cache.numFiles += size();
+ }
dirMTime = dir.lastModified();
// read metadata as well
readMeta();
return isValid;
}
- /** Check to see if the directory has been modified and reread if necessary.
+ /**
+ * Check to see if the directory has been modified and reread if necessary.
*
* @return boolean the directory is valid
*/
@@ -245,8 +266,9 @@
return isValid;
}
- /** Read directory metadata.
- *
+ /**
+ * Read directory metadata.
+ *
*/
public void readMeta() {
// check for directory metadata...
@@ -255,12 +277,12 @@
XMLMetaLoader ml = new XMLMetaLoader();
try {
// read directory meta file
- HashMap fileMeta = ml.loadURL(mf.getAbsolutePath());
+ Map fileMeta = ml.loadURL(mf.getAbsolutePath());
if (fileMeta == null) {
return;
}
// meta for the directory itself is in the "" bin
- dirMeta = (HashMap) fileMeta.remove("");
+ dirMeta = (Map) fileMeta.remove("");
// read meta for files in this directory
readFileMeta(fileMeta, null);
// is there meta for other files left?
@@ -281,8 +303,9 @@
metaChecked = true;
}
- /** Read metadata from all known parent directories.
- *
+ /**
+ * Read metadata from all known parent directories.
+ *
*/
public void readParentMeta() {
// check the parent directories for additional file meta
@@ -299,16 +322,18 @@
}
}
- /** Read metadata for the files in this directory.
+ /**
+ * Read metadata for the files in this directory.
*
- * Takes a HashMap with meta-information, adding the relative path
- * before the lookup.
+ * Takes a Map with meta-information, adding the relative path before the
+ * lookup.
*
* @param fileMeta
* @param relPath
- * @param fc fileClass
+ * @param fc
+ * fileClass
*/
- protected void readFileMeta(HashMap fileMeta, String relPath) {
+ protected void readFileMeta(Map fileMeta, String relPath) {
if (list == null) {
// there are no files
return;
@@ -326,7 +351,7 @@
// prepend path to the filename
String fn = path + f.getName();
// look up meta for this file and remove from dir
- HashMap meta = (HashMap) fileMeta.remove(fn);
+ Map meta = (Map) fileMeta.remove(fn);
if (meta != null) {
// store meta in file
f.setFileMeta(meta);
@@ -335,7 +360,7 @@
}
}
- protected void notifyChildMeta(HashMap childmeta) {
+ protected void notifyChildMeta(Map childmeta) {
List children = cache.getChildren(this.dirName, true);
if (children.size() > 0) {
for (Iterator i = children.iterator(); i.hasNext();) {
@@ -345,7 +370,8 @@
}
}
- /** Update access time.
+ /**
+ * Update access time.
*
* @return long time of last access.
*/
@@ -355,26 +381,31 @@
return t;
}
- /** Searches for the file with the name fn
.
+ /**
+ * Searches for the file with the name fn
.
*
- * Searches the directory for the file with the name fn
and returns
- * its index. Returns -1 if the file cannot be found.
- *
- * @param fn filename
- * @param fc file class
+ * Searches the directory for the file with the name fn
and
+ * returns its index. Returns -1 if the file cannot be found.
+ *
+ * @param fn
+ * filename
+ * @param fc
+ * file class
* @return int index of file fn
*/
public int indexOf(String fn) {
int fc = FileOps.classForFilename(fn);
return indexOf(fn, fc);
}
-
- /** Searches for the file with the name fn
and class fc.
+
+ /**
+ * Searches for the file with the name fn
and class fc.
*
- * Searches the directory for the file with the name fn
and returns
- * its index. Returns -1 if the file cannot be found.
- *
- * @param fn filename
+ * Searches the directory for the file with the name fn
and
+ * returns its index. Returns -1 if the file cannot be found.
+ *
+ * @param fn
+ * filename
* @return int index of file fn
*/
public int indexOf(String fn, int fc) {
@@ -388,32 +419,37 @@
if (list[fc] == null) {
return -1;
}
- // linear search -> worst performance
- int n = list[fc].size();
- for (int i = 0; i < n; i++) {
- DocuDirent fs = (DocuDirent) list[fc].get(i);
- if (fs.getName().equals(fn)) {
- // filename matches
- return i;
+ // search for exact match
+ int idx = Collections.binarySearch(list[fc], fn);
+ if (idx >= 0) {
+ return idx;
+ } else {
+ // try closest matches without extension
+ idx = -idx;
+ String fb = FileOps.basename(fn);
+ DocuDirent fs = (DocuDirent) list[fc].get(idx - 1);
+ if (FileOps.basename(fs.getName()).equals(fb)) {
+ // basename matches
+ return idx - 1;
}
- }
- // try again without extension
- for (int i = 0; i < n; i++) {
- DocuDirent fs = (DocuDirent) list[fc].get(i);
- if (FileOps.basename(fs.getName()).equals(FileOps.basename(fn))) {
+ fs = (DocuDirent) list[fc].get(idx + 1);
+ if (FileOps.basename(fs.getName()).equals(fb)) {
// basename matches
- return i;
+ return idx + 1;
}
+
}
return -1;
}
- /** Finds the DocuDirent with the name fn
.
+ /**
+ * Finds the DocuDirent with the name fn
.
*
- * Searches the directory for the DocuDirent with the name fn
and returns
- * it. Returns null if the file cannot be found.
- *
- * @param fn filename
+ * Searches the directory for the DocuDirent with the name fn
+ * and returns it. Returns null if the file cannot be found.
+ *
+ * @param fn
+ * filename
* @return DocuDirent
*/
public DocuDirent find(String fn) {
@@ -425,12 +461,15 @@
return null;
}
- /** Finds the DocuDirent with the name fn
and class fc
.
+ /**
+ * Finds the DocuDirent with the name fn
and class
+ * fc
.
*
- * Searches the directory for the DocuDirent with the name fn
and returns
- * it. Returns null if the file cannot be found.
- *
- * @param fn filename
+ * Searches the directory for the DocuDirent with the name fn
+ * and returns it. Returns null if the file cannot be found.
+ *
+ * @param fn
+ * filename
* @return DocuDirent
*/
public DocuDirent find(String fn, int fc) {
@@ -442,13 +481,16 @@
}
/**
- * @return String
+ * Returns the digilib canonical name.
+ *
+ * @return
*/
public String getDirName() {
return dirName;
}
- /** The directory is valid (exists on disk).
+ /**
+ * The directory is valid (exists on disk).
*
* @return boolean
*/
@@ -456,7 +498,8 @@
return isValid;
}
- /** The directory has been read from disk.
+ /**
+ * The directory has been read from disk.
*
* @return
*/
@@ -474,10 +517,10 @@
/**
* @return Hashtable
*/
- public HashMap getDirMeta() {
+ public Map getDirMeta() {
return dirMeta;
}
-
+
/**
* Checks metadata
*
@@ -499,9 +542,11 @@
/**
* Sets the dirMeta.
- * @param dirMeta The dirMeta to set
+ *
+ * @param dirMeta
+ * The dirMeta to set
*/
- public void setDirMeta(HashMap dirMeta) {
+ public void setDirMeta(Map dirMeta) {
this.dirMeta = dirMeta;
}