diff src/com/tmate/hgkit/ll/LocalHgRepo.java @ 22:603806cd2dc6

Status of local working dir against non-tip base revision
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Thu, 06 Jan 2011 03:30:20 +0100
parents e929cecae4e1
children d4fdd1845b3f
line wrap: on
line diff
--- a/src/com/tmate/hgkit/ll/LocalHgRepo.java	Wed Jan 05 04:10:28 2011 +0100
+++ b/src/com/tmate/hgkit/ll/LocalHgRepo.java	Thu Jan 06 03:30:20 2011 +0100
@@ -3,6 +3,7 @@
  */
 package com.tmate.hgkit.ll;
 
+import java.io.BufferedInputStream;
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
@@ -91,7 +92,7 @@
 		final HgDirstate dirstate = loadDirstate();
 		final HgIgnore hgignore = loadIgnore();
 		TreeSet<String> knownEntries = dirstate.all();
-		final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().revisionCount();
+		final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount();
 		final ManifestRevisionCollector collect = isTipBase ? null : new ManifestRevisionCollector();
 		if (!isTipBase) {
 			getManifest().walk(baseRevision, baseRevision, collect);
@@ -104,32 +105,17 @@
 						folders.addLast(f);
 					}
 				} else {
-					// FIXME path relative to rootDir
+					// FIXME path relative to rootDir - need more robust approach
 					String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1));
 					if (hgignore.isIgnored(fname)) {
 						inspector.ignored(fname);
 					} else {
 						if (knownEntries.remove(fname)) {
 							// modified, added, removed, clean
-							HgDirstate.Record r;
-							if ((r = dirstate.checkNormal(fname)) != null) {
-								// either clean or modified
-								if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
-									inspector.clean(fname);
-								} else {
-									// FIXME check actual content to avoid false modified files
-									inspector.modified(fname);
-								}
-							} else if ((r = dirstate.checkAdded(fname)) != null) {
-								if (r.name2 == null) {
-									inspector.added(fname);
-								} else {
-									inspector.copied(fname, r.name2);
-								}
-							} else if ((r = dirstate.checkRemoved(fname)) != null) {
-								inspector.removed(fname);
-							} else if ((r = dirstate.checkMerged(fname)) != null) {
-								inspector.modified(fname);
+							if (collect != null) { // need to check against base revision, not FS file
+								checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector);
+							} else {
+								checkLocalStatusAgainstFile(fname, f, dirstate, inspector);
 							}
 						} else {
 							inspector.unknown(fname);
@@ -138,11 +124,113 @@
 				}
 			}
 		} while (!folders.isEmpty());
+		if (collect != null) {
+			for (String r : collect.idsMap.keySet()) {
+				inspector.removed(r);
+			}
+		}
 		for (String m : knownEntries) {
-			inspector.missing(m);
+			// removed from the repository and missing from working dir shall not be reported as 'deleted' 
+			if (dirstate.checkRemoved(m) == null) {
+				inspector.missing(m);
+			}
 		}
 	}
 	
+	private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusInspector inspector) {
+		HgDirstate.Record r;
+		if ((r = dirstate.checkNormal(fname)) != null) {
+			// either clean or modified
+			if (f.lastModified() / 1000 == r.time && r.size == f.length()) {
+				inspector.clean(fname);
+			} else {
+				// FIXME check actual content to avoid false modified files
+				inspector.modified(fname);
+			}
+		} else if ((r = dirstate.checkAdded(fname)) != null) {
+			if (r.name2 == null) {
+				inspector.added(fname);
+			} else {
+				inspector.copied(fname, r.name2);
+			}
+		} else if ((r = dirstate.checkRemoved(fname)) != null) {
+			inspector.removed(fname);
+		} else if ((r = dirstate.checkMerged(fname)) != null) {
+			inspector.modified(fname);
+		}
+	}
+	
+	// XXX refactor checkLocalStatus methods in more OO way
+	private void checkLocalStatusAgainstBaseRevision(ManifestRevisionCollector collect, String fname, File f, HgDirstate dirstate, StatusInspector inspector) {
+		// fname is in the dirstate, either Normal, Added, Removed or Merged
+		Nodeid nid1 = collect.idsMap.remove(fname);
+		String flags = collect.flagsMap.remove(fname);
+		HgDirstate.Record r;
+		if (nid1 == null) {
+			// normal: added?
+			// added: not known at the time of baseRevision, shall report
+			// merged: was not known, report as added?
+			if ((r = dirstate.checkAdded(fname)) != null) {
+				if (r.name2 != null && collect.idsMap.containsKey(r.name2)) {
+					collect.idsMap.remove(r.name2);
+					collect.idsMap.remove(r.name2);
+					inspector.copied(r.name2, fname);
+					return;
+				}
+				// fall-through, report as added
+			} else if (dirstate.checkRemoved(fname) != null) {
+				// removed: removed file was not known at the time of baseRevision, and we should not report it as removed
+				return;
+			}
+			inspector.added(fname);
+		} else {
+			// was known; check whether clean or modified
+			// when added - seems to be the case of a file added once again, hence need to check if content is different
+			if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) {
+				// either clean or modified
+				HgDataFile fileNode = getFileNode(fname);
+				final int lengthAtRevision = fileNode.length(nid1);
+				if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) {
+					inspector.modified(fname);
+				} else {
+					// check actual content to see actual changes
+					// XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
+					if (areTheSame(f, fileNode.content(nid1))) {
+						inspector.clean(fname);
+					} else {
+						inspector.modified(fname);
+					}
+				}
+			}
+			// only those left in idsMap after processing are reported as removed 
+		}
+
+		// TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
+		// we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively 
+		// cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: 
+		// changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
+		// then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
+		// The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
+	}
+
+	private static String todoGenerateFlags(String fname) {
+		// FIXME implement
+		return null;
+	}
+	private static boolean areTheSame(File f, byte[] data) {
+		try {
+			BufferedInputStream is = new BufferedInputStream(new FileInputStream(f));
+			int i = 0;
+			while (i < data.length && data[i] == is.read()) {
+				i++; // increment only for successful match, otherwise won't tell last byte in data was the same as read from the stream
+			}
+			return i == data.length && is.read() == -1; // although data length is expected to be the same (see caller), check that we reached EOF, no more data left.
+		} catch (IOException ex) {
+			ex.printStackTrace(); // log warn
+		}
+		return false;
+	}
+
 	// XXX package-local, unless there are cases when required from outside (guess, working dir/revision walkers may hide dirstate access and no public visibility needed)
 	public final HgDirstate loadDirstate() {
 		// XXX may cache in SoftReference if creation is expensive
@@ -324,7 +412,8 @@
 		return path;
 	}
 
-	private final class ManifestRevisionCollector implements HgManifest.Inspector {
+	// XXX idsMap is being modified from outside. It's better to let outer (modifying) code to create these maps instead
+	private static final class ManifestRevisionCollector implements HgManifest.Inspector {
 		final HashMap<String, Nodeid> idsMap = new HashMap<String, Nodeid>();
 		final HashMap<String, String> flagsMap = new HashMap<String, String>();