diff src/com/tmate/hgkit/ll/LocalHgRepo.java @ 58:4cfc47bc14cc

Status against local working dir extracted into distinct class. Iterating over local files extracted for ease of os-dependant patching
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Mon, 17 Jan 2011 23:01:19 +0100
parents 8b0d6f1bd6b4
children b771e94a4f7c
line wrap: on
line diff
--- a/src/com/tmate/hgkit/ll/LocalHgRepo.java	Mon Jan 17 05:54:25 2011 +0100
+++ b/src/com/tmate/hgkit/ll/LocalHgRepo.java	Mon Jan 17 23:01:19 2011 +0100
@@ -6,7 +6,9 @@
 import java.io.BufferedInputStream;
 import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileFilter;
 import java.io.FileInputStream;
+import java.io.FilenameFilter;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.lang.ref.SoftReference;
@@ -18,6 +20,7 @@
 import java.util.TreeSet;
 
 import com.tmate.hgkit.fs.DataAccessProvider;
+import com.tmate.hgkit.fs.FileWalker;
 
 /**
  * @author artem
@@ -31,7 +34,7 @@
 	public LocalHgRepo(String repositoryPath) {
 		setInvalid(true);
 		repoLocation = repositoryPath;
-		dataAccess = null;
+		dataAccess = null;  
 	}
 	
 	public LocalHgRepo(File repositoryRoot) throws IOException {
@@ -48,155 +51,10 @@
 		return repoLocation;
 	}
 	
-	public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) {
-		LinkedList<File> folders = new LinkedList<File>();
-		final File rootDir = repoDir.getParentFile();
-		folders.add(rootDir);
-		final HgDirstate dirstate = loadDirstate();
-		final HgIgnore hgignore = loadIgnore();
-		TreeSet<String> knownEntries = dirstate.all();
-		final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount();
-		StatusCollector.ManifestRevisionInspector collect = null;
-		Set<String> baseRevFiles = Collections.emptySet();
-		if (!isTipBase) {
-			collect = new StatusCollector.ManifestRevisionInspector(baseRevision, baseRevision);
-			getManifest().walk(baseRevision, baseRevision, collect);
-			baseRevFiles = new TreeSet<String>(collect.files(baseRevision));
-		}
-		do {
-			File d = folders.removeFirst();
-			for (File f : d.listFiles()) {
-				if (f.isDirectory()) {
-					if (!".hg".equals(f.getName())) {
-						folders.addLast(f);
-					}
-				} else {
-					// FIXME path relative to rootDir - need more robust approach
-					String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1));
-					if (hgignore.isIgnored(fname)) {
-						inspector.ignored(fname);
-					} else {
-						if (knownEntries.remove(fname)) {
-							// modified, added, removed, clean
-							if (collect != null) { // need to check against base revision, not FS file
-								Nodeid nid1 = collect.nodeid(baseRevision, fname);
-								String flags = collect.flags(baseRevision, fname);
-								checkLocalStatusAgainstBaseRevision(baseRevFiles, nid1, flags, fname, f, dirstate, inspector);
-								baseRevFiles.remove(fname);
-							} else {
-								checkLocalStatusAgainstFile(fname, f, dirstate, inspector);
-							}
-						} else {
-							inspector.unknown(fname);
-						}
-					}
-				}
-			}
-		} while (!folders.isEmpty());
-		if (collect != null) {
-			for (String r : baseRevFiles) {
-				inspector.removed(r);
-			}
-		}
-		for (String m : knownEntries) {
-			// removed from the repository and missing from working dir shall not be reported as 'deleted' 
-			if (dirstate.checkRemoved(m) == null) {
-				inspector.missing(m);
-			}
-		}
+	public FileWalker createWorkingDirWalker() {
+		return new FileWalker(repoDir.getParentFile());
 	}
 	
-	private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
-		HgDirstate.Record r;
-		if ((r = dirstate.checkNormal(fname)) != null) {
-			// either clean or modified
-			if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
-				inspector.clean(fname);
-			} else {
-				// FIXME check actual content to avoid false modified files
-				inspector.modified(fname);
-			}
-		} else if ((r = dirstate.checkAdded(fname)) != null) {
-			if (r.name2 == null) {
-				inspector.added(fname);
-			} else {
-				inspector.copied(fname, r.name2);
-			}
-		} else if ((r = dirstate.checkRemoved(fname)) != null) {
-			inspector.removed(fname);
-		} else if ((r = dirstate.checkMerged(fname)) != null) {
-			inspector.modified(fname);
-		}
-	}
-	
-	// XXX refactor checkLocalStatus methods in more OO way
-	private void checkLocalStatusAgainstBaseRevision(Set<String> baseRevNames, Nodeid nid1, String flags, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
-		// fname is in the dirstate, either Normal, Added, Removed or Merged
-		HgDirstate.Record r;
-		if (nid1 == null) {
-			// normal: added?
-			// added: not known at the time of baseRevision, shall report
-			// merged: was not known, report as added?
-			if ((r = dirstate.checkAdded(fname)) != null) {
-				if (r.name2 != null && baseRevNames.contains(r.name2)) {
-					baseRevNames.remove(r.name2);
-					inspector.copied(r.name2, fname);
-					return;
-				}
-				// fall-through, report as added
-			} else if (dirstate.checkRemoved(fname) != null) {
-				// removed: removed file was not known at the time of baseRevision, and we should not report it as removed
-				return;
-			}
-			inspector.added(fname);
-		} else {
-			// was known; check whether clean or modified
-			// when added - seems to be the case of a file added once again, hence need to check if content is different
-			if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
-				// either clean or modified
-				HgDataFile fileNode = getFileNode(fname);
-				final int lengthAtRevision = fileNode.length(nid1);
-				if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
-					inspector.modified(fname);
-				} else {
-					// check actual content to see actual changes
-					// XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
-					if (areTheSame(f, fileNode.content(nid1))) {
-						inspector.clean(fname);
-					} else {
-						inspector.modified(fname);
-					}
-				}
-			}
-			// only those left in idsMap after processing are reported as removed 
-		}
-
-		// TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
-		// we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively 
-		// cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: 
-		// changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
-		// then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
-		// The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
-	}
-
-	private static String todoGenerateFlags(String fname) {
-		// FIXME implement
-		return null;
-	}
-	private static boolean areTheSame(File f, byte[] data) {
-		try {
-			BufferedInputStream is = new BufferedInputStream(new FileInputStream(f));
-			int i = 0;
-			while (i < data.length && data[i] == is.read()) {
-				i++; // increment only for successful match, otherwise won't tell last byte in data was the same as read from the stream
-			}
-			return i == data.length && is.read() == -1; // although data length is expected to be the same (see caller), check that we reached EOF, no more data left.
-		} catch (IOException ex) {
-			ex.printStackTrace(); // log warn
-		}
-		return false;
-	}
-
 	// XXX package-local, unless there are cases when required from outside (guess, working dir/revision walkers may hide dirstate access and no public visibility needed)
 	public final HgDirstate loadDirstate() {
 		// XXX may cache in SoftReference if creation is expensive