diff src/com/tmate/hgkit/ll/LocalHgRepo.java @ 57:8b0d6f1bd6b4

Local status is back
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Mon, 17 Jan 2011 05:54:25 +0100
parents 05829a70b30b
children 4cfc47bc14cc
line wrap: on
line diff
--- a/src/com/tmate/hgkit/ll/LocalHgRepo.java	Mon Jan 17 05:15:13 2011 +0100
+++ b/src/com/tmate/hgkit/ll/LocalHgRepo.java	Mon Jan 17 05:54:25 2011 +0100
@@ -11,8 +11,10 @@
 import java.io.InputStreamReader;
 import java.lang.ref.SoftReference;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedList;
+import java.util.Set;
 import java.util.TreeSet;
 
 import com.tmate.hgkit.fs.DataAccessProvider;
@@ -46,133 +48,136 @@
 		return repoLocation;
 	}
 	
-//	public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) {
-//		LinkedList<File> folders = new LinkedList<File>();
-//		final File rootDir = repoDir.getParentFile();
-//		folders.add(rootDir);
-//		final HgDirstate dirstate = loadDirstate();
-//		final HgIgnore hgignore = loadIgnore();
-//		TreeSet<String> knownEntries = dirstate.all();
-//		final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount();
-//		final StatusCollector.ManifestRevisionInspector collect = isTipBase ? null : new StatusCollector.ManifestRevisionInspector();
-//		if (!isTipBase) {
-//			getManifest().walk(baseRevision, baseRevision, collect);
-//		}
-//		do {
-//			File d = folders.removeFirst();
-//			for (File f : d.listFiles()) {
-//				if (f.isDirectory()) {
-//					if (!".hg".equals(f.getName())) {
-//						folders.addLast(f);
-//					}
-//				} else {
-//					// FIXME path relative to rootDir - need more robust approach
-//					String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1));
-//					if (hgignore.isIgnored(fname)) {
-//						inspector.ignored(fname);
-//					} else {
-//						if (knownEntries.remove(fname)) {
-//							// modified, added, removed, clean
-//							if (collect != null) { // need to check against base revision, not FS file
-//								checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector);
-//							} else {
-//								checkLocalStatusAgainstFile(fname, f, dirstate, inspector);
-//							}
-//						} else {
-//							inspector.unknown(fname);
-//						}
-//					}
-//				}
-//			}
-//		} while (!folders.isEmpty());
-//		if (collect != null) {
-//			for (String r : collect.idsMap.keySet()) {
-//				inspector.removed(r);
-//			}
-//		}
-//		for (String m : knownEntries) {
-//			// removed from the repository and missing from working dir shall not be reported as 'deleted' 
-//			if (dirstate.checkRemoved(m) == null) {
-//				inspector.missing(m);
-//			}
-//		}
-//	}
-//	
-//	private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
-//		HgDirstate.Record r;
-//		if ((r = dirstate.checkNormal(fname)) != null) {
-//			// either clean or modified
-//			if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
-//				inspector.clean(fname);
-//			} else {
-//				// FIXME check actual content to avoid false modified files
-//				inspector.modified(fname);
-//			}
-//		} else if ((r = dirstate.checkAdded(fname)) != null) {
-//			if (r.name2 == null) {
-//				inspector.added(fname);
-//			} else {
-//				inspector.copied(fname, r.name2);
-//			}
-//		} else if ((r = dirstate.checkRemoved(fname)) != null) {
-//			inspector.removed(fname);
-//		} else if ((r = dirstate.checkMerged(fname)) != null) {
-//			inspector.modified(fname);
-//		}
-//	}
-//	
-//	// XXX refactor checkLocalStatus methods in more OO way
-//	private void checkLocalStatusAgainstBaseRevision(StatusCollector.ManifestRevisionInspector collect, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
-//		// fname is in the dirstate, either Normal, Added, Removed or Merged
-//		Nodeid nid1 = collect.idsMap.remove(fname);
-//		String flags = collect.flagsMap.remove(fname);
-//		HgDirstate.Record r;
-//		if (nid1 == null) {
-//			// normal: added?
-//			// added: not known at the time of baseRevision, shall report
-//			// merged: was not known, report as added?
-//			if ((r = dirstate.checkAdded(fname)) != null) {
-//				if (r.name2 != null && collect.idsMap.containsKey(r.name2)) {
-//					collect.idsMap.remove(r.name2);
-//					collect.idsMap.remove(r.name2);
-//					inspector.copied(r.name2, fname);
-//					return;
-//				}
-//				// fall-through, report as added
-//			} else if (dirstate.checkRemoved(fname) != null) {
-//				// removed: removed file was not known at the time of baseRevision, and we should not report it as removed
-//				return;
-//			}
-//			inspector.added(fname);
-//		} else {
-//			// was known; check whether clean or modified
-//			// when added - seems to be the case of a file added once again, hence need to check if content is different
-//			if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
-//				// either clean or modified
-//				HgDataFile fileNode = getFileNode(fname);
-//				final int lengthAtRevision = fileNode.length(nid1);
-//				if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
-//					inspector.modified(fname);
-//				} else {
-//					// check actual content to see actual changes
-//					// XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
-//					if (areTheSame(f, fileNode.content(nid1))) {
-//						inspector.clean(fname);
-//					} else {
-//						inspector.modified(fname);
-//					}
-//				}
-//			}
-//			// only those left in idsMap after processing are reported as removed 
-//		}
-//
-//		// TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
-//		// we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively 
-//		// cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: 
-//		// changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
-//		// then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
-//		// The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
-//	}
+	public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) {
+		LinkedList<File> folders = new LinkedList<File>();
+		final File rootDir = repoDir.getParentFile();
+		folders.add(rootDir);
+		final HgDirstate dirstate = loadDirstate();
+		final HgIgnore hgignore = loadIgnore();
+		TreeSet<String> knownEntries = dirstate.all();
+		final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount();
+		StatusCollector.ManifestRevisionInspector collect = null;
+		Set<String> baseRevFiles = Collections.emptySet();
+		if (!isTipBase) {
+			collect = new StatusCollector.ManifestRevisionInspector(baseRevision, baseRevision);
+			getManifest().walk(baseRevision, baseRevision, collect);
+			baseRevFiles = new TreeSet<String>(collect.files(baseRevision));
+		}
+		do {
+			File d = folders.removeFirst();
+			for (File f : d.listFiles()) {
+				if (f.isDirectory()) {
+					if (!".hg".equals(f.getName())) {
+						folders.addLast(f);
+					}
+				} else {
+					// FIXME path relative to rootDir - need more robust approach
+					String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1));
+					if (hgignore.isIgnored(fname)) {
+						inspector.ignored(fname);
+					} else {
+						if (knownEntries.remove(fname)) {
+							// modified, added, removed, clean
+							if (collect != null) { // need to check against base revision, not FS file
+								Nodeid nid1 = collect.nodeid(baseRevision, fname);
+								String flags = collect.flags(baseRevision, fname);
+								checkLocalStatusAgainstBaseRevision(baseRevFiles, nid1, flags, fname, f, dirstate, inspector);
+								baseRevFiles.remove(fname);
+							} else {
+								checkLocalStatusAgainstFile(fname, f, dirstate, inspector);
+							}
+						} else {
+							inspector.unknown(fname);
+						}
+					}
+				}
+			}
+		} while (!folders.isEmpty());
+		if (collect != null) {
+			for (String r : baseRevFiles) {
+				inspector.removed(r);
+			}
+		}
+		for (String m : knownEntries) {
+			// removed from the repository and missing from working dir shall not be reported as 'deleted' 
+			if (dirstate.checkRemoved(m) == null) {
+				inspector.missing(m);
+			}
+		}
+	}
+	
+	private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
+		HgDirstate.Record r;
+		if ((r = dirstate.checkNormal(fname)) != null) {
+			// either clean or modified
+			if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
+				inspector.clean(fname);
+			} else {
+				// FIXME check actual content to avoid false modified files
+				inspector.modified(fname);
+			}
+		} else if ((r = dirstate.checkAdded(fname)) != null) {
+			if (r.name2 == null) {
+				inspector.added(fname);
+			} else {
+				inspector.copied(fname, r.name2);
+			}
+		} else if ((r = dirstate.checkRemoved(fname)) != null) {
+			inspector.removed(fname);
+		} else if ((r = dirstate.checkMerged(fname)) != null) {
+			inspector.modified(fname);
+		}
+	}
+	
+	// XXX refactor checkLocalStatus methods in more OO way
+	private void checkLocalStatusAgainstBaseRevision(Set<String> baseRevNames, Nodeid nid1, String flags, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
+		// fname is in the dirstate, either Normal, Added, Removed or Merged
+		HgDirstate.Record r;
+		if (nid1 == null) {
+			// normal: added?
+			// added: not known at the time of baseRevision, shall report
+			// merged: was not known, report as added?
+			if ((r = dirstate.checkAdded(fname)) != null) {
+				if (r.name2 != null && baseRevNames.contains(r.name2)) {
+					baseRevNames.remove(r.name2);
+					inspector.copied(r.name2, fname);
+					return;
+				}
+				// fall-through, report as added
+			} else if (dirstate.checkRemoved(fname) != null) {
+				// removed: removed file was not known at the time of baseRevision, and we should not report it as removed
+				return;
+			}
+			inspector.added(fname);
+		} else {
+			// was known; check whether clean or modified
+			// when added - seems to be the case of a file added once again, hence need to check if content is different
+			if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
+				// either clean or modified
+				HgDataFile fileNode = getFileNode(fname);
+				final int lengthAtRevision = fileNode.length(nid1);
+				if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
+					inspector.modified(fname);
+				} else {
+					// check actual content to see actual changes
+					// XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
+					if (areTheSame(f, fileNode.content(nid1))) {
+						inspector.clean(fname);
+					} else {
+						inspector.modified(fname);
+					}
+				}
+			}
+			// only those left in idsMap after processing are reported as removed 
+		}
+
+		// TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
+		// we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively 
+		// cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: 
+		// changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
+		// then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
+		// The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
+	}
 
 	private static String todoGenerateFlags(String fname) {
 		// FIXME implement