changeset 1679:f0daa76caba1

indents and comments
author Robert Casties <casties@mpiwg-berlin.mpg.de>
date Fri, 09 Mar 2018 15:32:39 +0100
parents e3de0d89e1c1
children 395d56ba0112
files common/src/main/java/digilib/io/BaseDirDocuDirectory.java common/src/main/java/digilib/io/DocuDirCache.java
diffstat 2 files changed, 72 insertions(+), 74 deletions(-) [+]
line wrap: on
line diff
--- a/common/src/main/java/digilib/io/BaseDirDocuDirectory.java	Wed Jan 31 19:55:25 2018 +0100
+++ b/common/src/main/java/digilib/io/BaseDirDocuDirectory.java	Fri Mar 09 15:32:39 2018 +0100
@@ -77,77 +77,77 @@
     }
 
     @Override
-    public synchronized boolean readDir() {
-    	// check directory first
-    	if (!isValid) {
-    		return false;
-    	}
-    	// re-check modification time because the thread may have slept
-    	if (dir.lastModified() <= dirMTime) {
-    		return true;
-    	}
-    	// read all filenames
-    	logger.debug("reading directory "+this+" = "+dir.getPath());
-    	File[] allFiles = null;
-    	/*
-    	 * using ReadableFileFilter is safer (we won't get directories with file
-    	 * extensions) but slower.
-    	 */
-    	// allFiles = dir.listFiles(new FileOps.ReadableFileFilter());
-    	allFiles = dir.listFiles();
-    	if (allFiles == null) {
-    		// not a directory
-    		return false;
-    	}
-    	// init parallel directories
-    	if (dirs == null) {
-    		// number of base dirs
-    		int nb = baseDirNames.length;
-    		// array of parallel dirs
-    		dirs = new Directory[nb];
-    		// first entry is this directory
-    		dirs[0] = this;
-    		// fill array with the remaining directories
-    		for (int j = 1; j < nb; j++) {
-    			// add dirName to baseDirName
-    			File d = new File(baseDirNames[j], dirName);
-    			if (d.isDirectory()) {
-    				dirs[j] = new Directory(d);
-    				logger.debug("  reading scaled directory " + d.getPath());
-    				dirs[j].readDir();
-    			}
-    		}
-    	}
-    
-    	File[] fileList = FileOps.listFiles(allFiles, FileOps.filterForClass(fileClass));
-    	// number of files in the directory
-    	int numFiles = fileList.length;
-    	if (numFiles > 0) {
-    		// create new list
-    		ArrayList<DocuDirent> dl = new ArrayList<DocuDirent>(numFiles);
-    		files = dl;
-    		for (File f : fileList) {
-    			DocuDirent df = FileOps.fileForClass(fileClass, f, dirs);
-    			df.setParent(this);
-    			// add the file to our list
-    			dl.add(df);
-    		}
-    		/*
-    		 * we sort the ArrayList (the list of files) for binarySearch to work 
-    		 * (DocuDirent's natural sort order is by filename)
-    		 */
-    		Collections.sort(dl);
-    	}
-    	// clear the scaled directories
-    	for (Directory d: dirs) {
-    		if (d != null) {
-    			d.clearFilenames();
-    		}
-    	}
-    	dirMTime = dir.lastModified();
-    	// read metadata as well
-    	readMeta();
-    	return isValid;
+	public synchronized boolean readDir() {
+		// check directory first
+		if (!isValid) {
+			return false;
+		}
+		// re-check modification time because the thread may have slept
+		if (dir.lastModified() <= dirMTime) {
+			return true;
+		}
+		// read all filenames
+		logger.debug("reading directory " + this + " = " + dir.getPath());
+		File[] allFiles = null;
+		/*
+		 * using ReadableFileFilter is safer (we won't get directories with file
+		 * extensions) but slower.
+		 */
+		// allFiles = dir.listFiles(new FileOps.ReadableFileFilter());
+		allFiles = dir.listFiles();
+		if (allFiles == null) {
+			// not a directory
+			return false;
+		}
+		// init parallel directories
+		if (dirs == null) {
+			// number of base dirs
+			int nb = baseDirNames.length;
+			// array of parallel dirs
+			dirs = new Directory[nb];
+			// first entry is this directory
+			dirs[0] = this;
+			// fill array with the remaining directories
+			for (int j = 1; j < nb; j++) {
+				// add dirName to baseDirName
+				File d = new File(baseDirNames[j], dirName);
+				if (d.isDirectory()) {
+					dirs[j] = new Directory(d);
+					logger.debug("  reading scaled directory " + d.getPath());
+					dirs[j].readDir();
+				}
+			}
+		}
+
+		File[] fileList = FileOps.listFiles(allFiles, FileOps.filterForClass(fileClass));
+		// number of files in the directory
+		int numFiles = fileList.length;
+		if (numFiles > 0) {
+			// create new list
+			ArrayList<DocuDirent> dl = new ArrayList<DocuDirent>(numFiles);
+			files = dl;
+			for (File f : fileList) {
+				DocuDirent df = FileOps.fileForClass(fileClass, f, dirs);
+				df.setParent(this);
+				// add the file to our list
+				dl.add(df);
+			}
+			/*
+			 * we sort the ArrayList (the list of files) for binarySearch to work
+			 * (DocuDirent's natural sort order is by filename)
+			 */
+			Collections.sort(dl);
+		}
+		// clear the scaled directories
+		for (Directory d : dirs) {
+			if (d != null) {
+				d.clearFilenames();
+			}
+		}
+		dirMTime = dir.lastModified();
+		// read metadata as well
+		readMeta();
+		return isValid;
     }
 
 }
--- a/common/src/main/java/digilib/io/DocuDirCache.java	Wed Jan 31 19:55:25 2018 +0100
+++ b/common/src/main/java/digilib/io/DocuDirCache.java	Fri Mar 09 15:32:39 2018 +0100
@@ -156,7 +156,7 @@
     
     /**
 	 * Returns the DocuDirent with the pathname <code>fn</code> and the index
-	 * <code>in</code> and the class <code>fc</code>.
+	 * <code>in</code>.
 	 * 
 	 * If <code>fn</code> is a file then the corresponding DocuDirent is
 	 * returned and the index is ignored.
@@ -165,8 +165,6 @@
 	 *            digilib pathname
 	 * @param in
 	 *            file index
-	 * @param fc
-	 *            file class
 	 * @return
 	 */
 	public DocuDirent getFile(String fn, int in) {