changeset 55:05829a70b30b

Status operation extracted into separate, cache-friendly class
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Mon, 17 Jan 2011 04:45:09 +0100
parents fd4f2c98995b
children 576d6e8a09f6
files design.txt src/com/tmate/hgkit/console/Status.java src/com/tmate/hgkit/ll/HgRepository.java src/com/tmate/hgkit/ll/LocalHgRepo.java src/com/tmate/hgkit/ll/StatusCollector.java
diffstat 5 files changed, 420 insertions(+), 211 deletions(-) [+]
line wrap: on
line diff
--- a/design.txt	Sun Jan 16 05:21:09 2011 +0100
+++ b/design.txt	Mon Jan 17 04:45:09 2011 +0100
@@ -39,7 +39,8 @@
 RevlogStream - Inflater. Perhaps, InflaterStream instead?
 Implement use of fncache (use names from it - perhaps, would help for Mac issues Alex mentioned) along with 'digest'-ing long file names
 
- 
+repo.status - use same collector class twice, difference as external code. add external walker that keeps collected maps and use it
+  in Log operation to give files+,files-  
  
 Status operation from GUI - guess, usually on a file/subfolder, hence API should allow for starting path (unlike cmdline, seems useless to implement include/exclide patterns - GUI users hardly enter them, ever)
 
--- a/src/com/tmate/hgkit/console/Status.java	Sun Jan 16 05:21:09 2011 +0100
+++ b/src/com/tmate/hgkit/console/Status.java	Mon Jan 17 04:45:09 2011 +0100
@@ -5,11 +5,16 @@
 
 import static com.tmate.hgkit.ll.HgRepository.TIP;
 
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
 import com.tmate.hgkit.fs.RepositoryLookup;
 import com.tmate.hgkit.ll.HgDataFile;
 import com.tmate.hgkit.ll.HgRepository;
 import com.tmate.hgkit.ll.LocalHgRepo;
 import com.tmate.hgkit.ll.Nodeid;
+import com.tmate.hgkit.ll.StatusCollector;
 
 /**
  *
@@ -30,17 +35,34 @@
 		final StatusDump dump = new StatusDump();
 		dump.showIgnored = false;
 		dump.showClean = false;
-		final int r1 = 0, r2 = 11;
+		StatusCollector sc = new StatusCollector(hgRepo);
+		final int r1 = 0, r2 = 3;
 		System.out.printf("Status for changes between revision %d and %d:\n", r1, r2);
-		hgRepo.status(r1, r2, dump);
-		System.out.println("\nStatus against working dir:");
-		((LocalHgRepo) hgRepo).statusLocal(TIP, dump);
-		System.out.println();
-		System.out.printf("Manifest of the revision %d:\n", r2);
-		hgRepo.getManifest().walk(r2, r2, new Manifest.Dump());
-		System.out.println();
-		System.out.printf("\nStatus of working dir against %d:\n", r2);
-		((LocalHgRepo) hgRepo).statusLocal(r2, dump);
+		sc.walk(r1, r2, dump);
+		// 
+		System.out.println("\n\nSame, but sorted in the way hg status does:");
+		StatusCollector.Record r = sc.status(r1, r2);
+		sortAndPrint('M', r.getModified());
+		sortAndPrint('A', r.getAdded());
+		sortAndPrint('R', r.getRemoved());
+//		System.out.println("\nStatus against working dir:");
+//		((LocalHgRepo) hgRepo).statusLocal(TIP, dump);
+//		System.out.println();
+//		System.out.printf("Manifest of the revision %d:\n", r2);
+//		hgRepo.getManifest().walk(r2, r2, new Manifest.Dump());
+//		System.out.println();
+//		System.out.printf("\nStatus of working dir against %d:\n", r2);
+//		((LocalHgRepo) hgRepo).statusLocal(r2, dump);
+	}
+	
+	private static void sortAndPrint(char prefix, List<String> ul) {
+		ArrayList<String> sortList = new ArrayList<String>(ul);
+		Collections.sort(sortList);
+		for (String s : sortList)  {
+			System.out.print(prefix);
+			System.out.print(' ');
+			System.out.println(s);
+		}
 	}
 	
 	protected static void testStatusInternals(HgRepository hgRepo) {
@@ -53,7 +75,7 @@
 		}
 	}
 
-	private static class StatusDump implements HgRepository.StatusInspector {
+	private static class StatusDump implements StatusCollector.Inspector {
 		public boolean hideStatusPrefix = false; // hg status -n option
 		public boolean showCopied = true; // -C
 		public boolean showIgnored = true; // -i
--- a/src/com/tmate/hgkit/ll/HgRepository.java	Sun Jan 16 05:21:09 2011 +0100
+++ b/src/com/tmate/hgkit/ll/HgRepository.java	Mon Jan 17 04:45:09 2011 +0100
@@ -67,17 +67,4 @@
 	 * Perhaps, should be separate interface, like ContentLookup
 	 */
 	protected abstract RevlogStream resolve(String repositoryPath);
-
-	public abstract void status(int rev1, int rev2 /*WorkingDir - TIP, TIP?*/, StatusInspector inspector);
-
-	public interface StatusInspector {
-		void modified(String fname);
-		void added(String fname);
-		void copied(String fnameOrigin, String fnameAdded); // if copied files of no interest, should delegate to self.added(fnameAdded);
-		void removed(String fname);
-		void clean(String fname);
-		void missing(String fname); // aka deleted (tracked by Hg, but not available in FS any more
-		void unknown(String fname); // not tracked
-		void ignored(String fname);
-	}
 }
--- a/src/com/tmate/hgkit/ll/LocalHgRepo.java	Sun Jan 16 05:21:09 2011 +0100
+++ b/src/com/tmate/hgkit/ll/LocalHgRepo.java	Mon Jan 17 04:45:09 2011 +0100
@@ -46,172 +46,133 @@
 		return repoLocation;
 	}
 	
-	@Override
-	public void status(int rev1, int rev2, final StatusInspector inspector) {
-		final ManifestRevisionCollector collect = new ManifestRevisionCollector();
-		getManifest().walk(rev1, rev1, collect);
-		
-		HgManifest.Inspector compare = new HgManifest.Inspector() {
-
-			public boolean begin(int revision, Nodeid nid) {
-				return true;
-			}
-
-			public boolean next(Nodeid nid, String fname, String flags) {
-				Nodeid nidR1 = collect.idsMap.remove(fname);
-				String flagsR1 = collect.flagsMap.remove(fname);
-				if (nidR1 == null) {
-					inspector.added(fname);
-				} else {
-					if (nidR1.equals(nid) && ((flags == null && flagsR1 == null) || flags.equals(flagsR1))) {
-						inspector.clean(fname);
-					} else {
-						inspector.modified(fname);
-					}
-				}
-				return true;
-			}
-
-			public boolean end(int revision) {
-				for (String fname : collect.idsMap.keySet()) {
-					inspector.removed(fname);
-				}
-				if (collect.idsMap.size() != collect.flagsMap.size()) {
-					throw new IllegalStateException();
-				}
-				return false;
-			}
-		};
-		getManifest().walk(rev2, rev2, compare);
-	}
-	
-	public void statusLocal(int baseRevision, StatusInspector inspector) {
-		LinkedList<File> folders = new LinkedList<File>();
-		final File rootDir = repoDir.getParentFile();
-		folders.add(rootDir);
-		final HgDirstate dirstate = loadDirstate();
-		final HgIgnore hgignore = loadIgnore();
-		TreeSet<String> knownEntries = dirstate.all();
-		final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount();
-		final ManifestRevisionCollector collect = isTipBase ? null : new ManifestRevisionCollector();
-		if (!isTipBase) {
-			getManifest().walk(baseRevision, baseRevision, collect);
-		}
-		do {
-			File d = folders.removeFirst();
-			for (File f : d.listFiles()) {
-				if (f.isDirectory()) {
-					if (!".hg".equals(f.getName())) {
-						folders.addLast(f);
-					}
-				} else {
-					// FIXME path relative to rootDir - need more robust approach
-					String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1));
-					if (hgignore.isIgnored(fname)) {
-						inspector.ignored(fname);
-					} else {
-						if (knownEntries.remove(fname)) {
-							// modified, added, removed, clean
-							if (collect != null) { // need to check against base revision, not FS file
-								checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector);
-							} else {
-								checkLocalStatusAgainstFile(fname, f, dirstate, inspector);
-							}
-						} else {
-							inspector.unknown(fname);
-						}
-					}
-				}
-			}
-		} while (!folders.isEmpty());
-		if (collect != null) {
-			for (String r : collect.idsMap.keySet()) {
-				inspector.removed(r);
-			}
-		}
-		for (String m : knownEntries) {
-			// removed from the repository and missing from working dir shall not be reported as 'deleted' 
-			if (dirstate.checkRemoved(m) == null) {
-				inspector.missing(m);
-			}
-		}
-	}
-	
-	private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusInspector inspector) {
-		HgDirstate.Record r;
-		if ((r = dirstate.checkNormal(fname)) != null) {
-			// either clean or modified
-			if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
-				inspector.clean(fname);
-			} else {
-				// FIXME check actual content to avoid false modified files
-				inspector.modified(fname);
-			}
-		} else if ((r = dirstate.checkAdded(fname)) != null) {
-			if (r.name2 == null) {
-				inspector.added(fname);
-			} else {
-				inspector.copied(fname, r.name2);
-			}
-		} else if ((r = dirstate.checkRemoved(fname)) != null) {
-			inspector.removed(fname);
-		} else if ((r = dirstate.checkMerged(fname)) != null) {
-			inspector.modified(fname);
-		}
-	}
-	
-	// XXX refactor checkLocalStatus methods in more OO way
-	private void checkLocalStatusAgainstBaseRevision(ManifestRevisionCollector collect, String fname, File f, HgDirstate dirstate, StatusInspector inspector) {
-		// fname is in the dirstate, either Normal, Added, Removed or Merged
-		Nodeid nid1 = collect.idsMap.remove(fname);
-		String flags = collect.flagsMap.remove(fname);
-		HgDirstate.Record r;
-		if (nid1 == null) {
-			// normal: added?
-			// added: not known at the time of baseRevision, shall report
-			// merged: was not known, report as added?
-			if ((r = dirstate.checkAdded(fname)) != null) {
-				if (r.name2 != null && collect.idsMap.containsKey(r.name2)) {
-					collect.idsMap.remove(r.name2);
-					collect.idsMap.remove(r.name2);
-					inspector.copied(r.name2, fname);
-					return;
-				}
-				// fall-through, report as added
-			} else if (dirstate.checkRemoved(fname) != null) {
-				// removed: removed file was not known at the time of baseRevision, and we should not report it as removed
-				return;
-			}
-			inspector.added(fname);
-		} else {
-			// was known; check whether clean or modified
-			// when added - seems to be the case of a file added once again, hence need to check if content is different
-			if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
-				// either clean or modified
-				HgDataFile fileNode = getFileNode(fname);
-				final int lengthAtRevision = fileNode.length(nid1);
-				if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
-					inspector.modified(fname);
-				} else {
-					// check actual content to see actual changes
-					// XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
-					if (areTheSame(f, fileNode.content(nid1))) {
-						inspector.clean(fname);
-					} else {
-						inspector.modified(fname);
-					}
-				}
-			}
-			// only those left in idsMap after processing are reported as removed 
-		}
-
-		// TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
-		// we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively 
-		// cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: 
-		// changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
-		// then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
-		// The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
-	}
+//	public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) {
+//		LinkedList<File> folders = new LinkedList<File>();
+//		final File rootDir = repoDir.getParentFile();
+//		folders.add(rootDir);
+//		final HgDirstate dirstate = loadDirstate();
+//		final HgIgnore hgignore = loadIgnore();
+//		TreeSet<String> knownEntries = dirstate.all();
+//		final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount();
+//		final StatusCollector.ManifestRevisionInspector collect = isTipBase ? null : new StatusCollector.ManifestRevisionInspector();
+//		if (!isTipBase) {
+//			getManifest().walk(baseRevision, baseRevision, collect);
+//		}
+//		do {
+//			File d = folders.removeFirst();
+//			for (File f : d.listFiles()) {
+//				if (f.isDirectory()) {
+//					if (!".hg".equals(f.getName())) {
+//						folders.addLast(f);
+//					}
+//				} else {
+//					// FIXME path relative to rootDir - need more robust approach
+//					String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1));
+//					if (hgignore.isIgnored(fname)) {
+//						inspector.ignored(fname);
+//					} else {
+//						if (knownEntries.remove(fname)) {
+//							// modified, added, removed, clean
+//							if (collect != null) { // need to check against base revision, not FS file
+//								checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector);
+//							} else {
+//								checkLocalStatusAgainstFile(fname, f, dirstate, inspector);
+//							}
+//						} else {
+//							inspector.unknown(fname);
+//						}
+//					}
+//				}
+//			}
+//		} while (!folders.isEmpty());
+//		if (collect != null) {
+//			for (String r : collect.idsMap.keySet()) {
+//				inspector.removed(r);
+//			}
+//		}
+//		for (String m : knownEntries) {
+//			// removed from the repository and missing from working dir shall not be reported as 'deleted' 
+//			if (dirstate.checkRemoved(m) == null) {
+//				inspector.missing(m);
+//			}
+//		}
+//	}
+//	
+//	private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
+//		HgDirstate.Record r;
+//		if ((r = dirstate.checkNormal(fname)) != null) {
+//			// either clean or modified
+//			if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
+//				inspector.clean(fname);
+//			} else {
+//				// FIXME check actual content to avoid false modified files
+//				inspector.modified(fname);
+//			}
+//		} else if ((r = dirstate.checkAdded(fname)) != null) {
+//			if (r.name2 == null) {
+//				inspector.added(fname);
+//			} else {
+//				inspector.copied(fname, r.name2);
+//			}
+//		} else if ((r = dirstate.checkRemoved(fname)) != null) {
+//			inspector.removed(fname);
+//		} else if ((r = dirstate.checkMerged(fname)) != null) {
+//			inspector.modified(fname);
+//		}
+//	}
+//	
+//	// XXX refactor checkLocalStatus methods in more OO way
+//	private void checkLocalStatusAgainstBaseRevision(StatusCollector.ManifestRevisionInspector collect, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) {
+//		// fname is in the dirstate, either Normal, Added, Removed or Merged
+//		Nodeid nid1 = collect.idsMap.remove(fname);
+//		String flags = collect.flagsMap.remove(fname);
+//		HgDirstate.Record r;
+//		if (nid1 == null) {
+//			// normal: added?
+//			// added: not known at the time of baseRevision, shall report
+//			// merged: was not known, report as added?
+//			if ((r = dirstate.checkAdded(fname)) != null) {
+//				if (r.name2 != null && collect.idsMap.containsKey(r.name2)) {
+//					collect.idsMap.remove(r.name2);
+//					collect.idsMap.remove(r.name2);
+//					inspector.copied(r.name2, fname);
+//					return;
+//				}
+//				// fall-through, report as added
+//			} else if (dirstate.checkRemoved(fname) != null) {
+//				// removed: removed file was not known at the time of baseRevision, and we should not report it as removed
+//				return;
+//			}
+//			inspector.added(fname);
+//		} else {
+//			// was known; check whether clean or modified
+//			// when added - seems to be the case of a file added once again, hence need to check if content is different
+//			if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
+//				// either clean or modified
+//				HgDataFile fileNode = getFileNode(fname);
+//				final int lengthAtRevision = fileNode.length(nid1);
+//				if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
+//					inspector.modified(fname);
+//				} else {
+//					// check actual content to see actual changes
+//					// XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
+//					if (areTheSame(f, fileNode.content(nid1))) {
+//						inspector.clean(fname);
+//					} else {
+//						inspector.modified(fname);
+//					}
+//				}
+//			}
+//			// only those left in idsMap after processing are reported as removed 
+//		}
+//
+//		// TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
+//		// we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively 
+//		// cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: 
+//		// changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
+//		// then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
+//		// The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
+//	}
 
 	private static String todoGenerateFlags(String fname) {
 		// FIXME implement
@@ -416,24 +377,4 @@
 		}
 		return path;
 	}
-
-	// XXX idsMap is being modified from outside. It's better to let outer (modifying) code to create these maps instead
-	private static final class ManifestRevisionCollector implements HgManifest.Inspector {
-		final HashMap<String, Nodeid> idsMap = new HashMap<String, Nodeid>();
-		final HashMap<String, String> flagsMap = new HashMap<String, String>();
-
-		public boolean next(Nodeid nid, String fname, String flags) {
-			idsMap.put(fname, nid);
-			flagsMap.put(fname, flags);
-			return true;
-		}
-
-		public boolean end(int revision) {
-			return false;
-		}
-
-		public boolean begin(int revision, Nodeid nid) {
-			return true;
-		}
-	}
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/com/tmate/hgkit/ll/StatusCollector.java	Mon Jan 17 04:45:09 2011 +0100
@@ -0,0 +1,258 @@
+/*
+ * Copyright (c) 2011 Artem Tikhomirov 
+ */
+package com.tmate.hgkit.ll;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeSet;
+
+/**
+ * RevisionWalker?
+ * @author artem
+ */
+public class StatusCollector {
+
+	private final HgRepository repo;
+	private final Map<Integer, ManifestRevisionInspector> cache; // sparse array, in fact
+
+	public StatusCollector(HgRepository hgRepo) {
+		this.repo = hgRepo;
+		cache = new HashMap<Integer, ManifestRevisionInspector>();
+	}
+	
+	private ManifestRevisionInspector get(int rev) {
+		ManifestRevisionInspector i = cache.get(rev);
+		if (i == null) {
+			i = new ManifestRevisionInspector(rev, rev);
+			cache.put(rev, i);
+			repo.getManifest().walk(rev, rev, i);
+		}
+		return i;
+	}
+
+	public void walk(int rev1, int rev2, Inspector inspector) {
+		if (rev1 == rev2) {
+			throw new IllegalArgumentException();
+		}
+		// in fact, rev1 and rev2 are often next (or close) to each other,
+		// thus, we can optimize Manifest reads here (manifest.walk(rev1, rev2))
+ 
+		ManifestRevisionInspector r1, r2;
+		if (!cache.containsKey(rev1) && !cache.containsKey(rev2) && Math.abs(rev1 - rev2) < 5 /*subjective equivalent of 'close enough'*/) {
+			int minRev = rev1 < rev2 ? rev1 : rev2;
+			int maxRev = minRev == rev1 ? rev2 : rev1;
+			r1 = r2 = new ManifestRevisionInspector(minRev, maxRev);
+			for (int i = minRev; i <= maxRev; i++) {
+				cache.put(i, r1);
+			}
+			repo.getManifest().walk(minRev, maxRev, r1);
+		} else {
+			r1 = get(rev1);
+			r2 = get(rev2);
+		}
+		
+		TreeSet<String> r1Files = new TreeSet<String>(r1.files(rev1));
+		for (String fname : r2.files(rev2)) {
+			if (r1Files.remove(fname)) {
+				Nodeid nidR1 = r1.nodeid(rev1, fname);
+				Nodeid nidR2 = r2.nodeid(rev2, fname);
+				String flagsR1 = r1.flags(rev1, fname);
+				String flagsR2 = r2.flags(rev2, fname);
+				if (nidR1.equals(nidR2) && ((flagsR2 == null && flagsR1 == null) || flagsR2.equals(flagsR1))) {
+					inspector.clean(fname);
+				} else {
+					inspector.modified(fname);
+				}
+			} else {
+				inspector.added(fname);
+			}
+		}
+		for (String left : r1Files) {
+			inspector.removed(left);
+		}
+		// inspector.done() if useful e.g. in UI client
+	}
+	
+	public Record status(int rev1, int rev2) {
+		Record rv = new Record();
+		walk(rev1, rev2, rv);
+		return rv;
+	}
+
+	public interface Inspector {
+		void modified(String fname);
+		void added(String fname);
+		void copied(String fnameOrigin, String fnameAdded); // if copied files of no interest, should delegate to self.added(fnameAdded);
+		void removed(String fname);
+		void clean(String fname);
+		void missing(String fname); // aka deleted (tracked by Hg, but not available in FS any more
+		void unknown(String fname); // not tracked
+		void ignored(String fname);
+	}
+
+	// XXX for r1..r2 status, only modified, added, removed (and perhaps, clean) make sense
+	public static class Record implements Inspector {
+		private List<String> modified, added, removed, clean, missing, unknown, ignored;
+		private Map<String, String> copied;
+		
+		public List<String> getModified() {
+			return proper(modified);
+		}
+
+		public List<String> getAdded() {
+			return proper(added);
+		}
+
+		public List<String> getRemoved() {
+			return proper(removed);
+		}
+
+		public Map<String,String> getCopied() {
+			if (copied == null) {
+				return Collections.emptyMap();
+			}
+			return Collections.unmodifiableMap(copied);
+		}
+
+		public List<String> getClean() {
+			return proper(clean);
+		}
+
+		public List<String> getMissing() {
+			return proper(missing);
+		}
+
+		public List<String> getUnknown() {
+			return proper(unknown);
+		}
+
+		public List<String> getIgnored() {
+			return proper(ignored);
+		}
+		
+		private List<String> proper(List<String> l) {
+			if (l == null) {
+				return Collections.emptyList();
+			}
+			return Collections.unmodifiableList(l);
+		}
+
+		//
+		//
+		
+		public void modified(String fname) {
+			modified = doAdd(modified, fname);
+		}
+
+		public void added(String fname) {
+			added = doAdd(added, fname);
+		}
+
+		public void copied(String fnameOrigin, String fnameAdded) {
+			if (copied == null) {
+				copied = new LinkedHashMap<String, String>();
+			}
+			copied.put(fnameOrigin, fnameAdded);
+		}
+
+		public void removed(String fname) {
+			removed = doAdd(removed, fname);
+		}
+
+		public void clean(String fname) {
+			clean = doAdd(clean, fname);
+		}
+
+		public void missing(String fname) {
+			missing = doAdd(missing, fname);
+		}
+
+		public void unknown(String fname) {
+			unknown = doAdd(unknown, fname);
+		}
+
+		public void ignored(String fname) {
+			ignored = doAdd(ignored, fname);
+		}
+
+		private static List<String> doAdd(List<String> l, String s) {
+			if (l == null) {
+				l = new LinkedList<String>();
+			}
+			l.add(s);
+			return l;
+		}
+	}
+
+	public /*XXX private, actually. Made public unless repo.statusLocal finds better place*/ static final class ManifestRevisionInspector implements HgManifest.Inspector {
+		private final HashMap<String, Nodeid>[] idsMap;
+		private final HashMap<String, String>[] flagsMap;
+		private final int baseRevision;
+		private int r = -1; // cursor
+
+		/**
+		 * [minRev, maxRev]
+		 * @param minRev - inclusive
+		 * @param maxRev - inclusive
+		 */
+		@SuppressWarnings("unchecked")
+		public ManifestRevisionInspector(int minRev, int maxRev) {
+			baseRevision = minRev;
+			int range = maxRev - minRev + 1;
+			idsMap = new HashMap[range];
+			flagsMap = new HashMap[range];
+		}
+		
+		public Collection<String> files(int rev) {
+			if (rev < baseRevision || rev >= baseRevision + idsMap.length) {
+				throw new IllegalArgumentException();
+			}
+			return idsMap[rev - baseRevision].keySet();
+		}
+
+		public Nodeid nodeid(int rev, String fname) {
+			if (rev < baseRevision || rev >= baseRevision + idsMap.length) {
+				throw new IllegalArgumentException();
+			}
+			return idsMap[rev - baseRevision].get(fname);
+		}
+
+		public String flags(int rev, String fname) {
+			if (rev < baseRevision || rev >= baseRevision + idsMap.length) {
+				throw new IllegalArgumentException();
+			}
+			return flagsMap[rev - baseRevision].get(fname);
+		}
+
+		//
+
+		public boolean next(Nodeid nid, String fname, String flags) {
+			idsMap[r].put(fname, nid);
+			flagsMap[r].put(fname, flags);
+			return true;
+		}
+
+		public boolean end(int revision) {
+			assert revision == r + baseRevision;
+			r = -1;
+			return revision+1 < baseRevision + idsMap.length;
+		}
+
+		public boolean begin(int revision, Nodeid nid) {
+			if (revision < baseRevision || revision >= baseRevision + idsMap.length) {
+				throw new IllegalArgumentException();
+			}
+			r = revision - baseRevision;
+			idsMap[r] = new HashMap<String, Nodeid>();
+			flagsMap[r] = new HashMap<String, String>();
+			return true;
+		}
+	}
+
+}