Mercurial > jhg
diff src/com/tmate/hgkit/ll/LocalHgRepo.java @ 55:05829a70b30b
Status operation extracted into separate, cache-friendly class
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Mon, 17 Jan 2011 04:45:09 +0100 |
parents | f1db8610da62 |
children | 8b0d6f1bd6b4 |
line wrap: on
line diff
--- a/src/com/tmate/hgkit/ll/LocalHgRepo.java Sun Jan 16 05:21:09 2011 +0100 +++ b/src/com/tmate/hgkit/ll/LocalHgRepo.java Mon Jan 17 04:45:09 2011 +0100 @@ -46,172 +46,133 @@ return repoLocation; } - @Override - public void status(int rev1, int rev2, final StatusInspector inspector) { - final ManifestRevisionCollector collect = new ManifestRevisionCollector(); - getManifest().walk(rev1, rev1, collect); - - HgManifest.Inspector compare = new HgManifest.Inspector() { - - public boolean begin(int revision, Nodeid nid) { - return true; - } - - public boolean next(Nodeid nid, String fname, String flags) { - Nodeid nidR1 = collect.idsMap.remove(fname); - String flagsR1 = collect.flagsMap.remove(fname); - if (nidR1 == null) { - inspector.added(fname); - } else { - if (nidR1.equals(nid) && ((flags == null && flagsR1 == null) || flags.equals(flagsR1))) { - inspector.clean(fname); - } else { - inspector.modified(fname); - } - } - return true; - } - - public boolean end(int revision) { - for (String fname : collect.idsMap.keySet()) { - inspector.removed(fname); - } - if (collect.idsMap.size() != collect.flagsMap.size()) { - throw new IllegalStateException(); - } - return false; - } - }; - getManifest().walk(rev2, rev2, compare); - } - - public void statusLocal(int baseRevision, StatusInspector inspector) { - LinkedList<File> folders = new LinkedList<File>(); - final File rootDir = repoDir.getParentFile(); - folders.add(rootDir); - final HgDirstate dirstate = loadDirstate(); - final HgIgnore hgignore = loadIgnore(); - TreeSet<String> knownEntries = dirstate.all(); - final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount(); - final ManifestRevisionCollector collect = isTipBase ? null : new ManifestRevisionCollector(); - if (!isTipBase) { - getManifest().walk(baseRevision, baseRevision, collect); - } - do { - File d = folders.removeFirst(); - for (File f : d.listFiles()) { - if (f.isDirectory()) { - if (!".hg".equals(f.getName())) { - folders.addLast(f); - } - } else { - // FIXME path relative to rootDir - need more robust approach - String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1)); - if (hgignore.isIgnored(fname)) { - inspector.ignored(fname); - } else { - if (knownEntries.remove(fname)) { - // modified, added, removed, clean - if (collect != null) { // need to check against base revision, not FS file - checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector); - } else { - checkLocalStatusAgainstFile(fname, f, dirstate, inspector); - } - } else { - inspector.unknown(fname); - } - } - } - } - } while (!folders.isEmpty()); - if (collect != null) { - for (String r : collect.idsMap.keySet()) { - inspector.removed(r); - } - } - for (String m : knownEntries) { - // removed from the repository and missing from working dir shall not be reported as 'deleted' - if (dirstate.checkRemoved(m) == null) { - inspector.missing(m); - } - } - } - - private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusInspector inspector) { - HgDirstate.Record r; - if ((r = dirstate.checkNormal(fname)) != null) { - // either clean or modified - if (f.lastModified() / 1000 == r.time && r.size == f.length()) { - inspector.clean(fname); - } else { - // FIXME check actual content to avoid false modified files - inspector.modified(fname); - } - } else if ((r = dirstate.checkAdded(fname)) != null) { - if (r.name2 == null) { - inspector.added(fname); - } else { - inspector.copied(fname, r.name2); - } - } else if ((r = dirstate.checkRemoved(fname)) != null) { - inspector.removed(fname); - } else if ((r = dirstate.checkMerged(fname)) != null) { - inspector.modified(fname); - } - } - - // XXX refactor checkLocalStatus methods in more OO way - private void checkLocalStatusAgainstBaseRevision(ManifestRevisionCollector collect, String fname, File f, HgDirstate dirstate, StatusInspector inspector) { - // fname is in the dirstate, either Normal, Added, Removed or Merged - Nodeid nid1 = collect.idsMap.remove(fname); - String flags = collect.flagsMap.remove(fname); - HgDirstate.Record r; - if (nid1 == null) { - // normal: added? - // added: not known at the time of baseRevision, shall report - // merged: was not known, report as added? - if ((r = dirstate.checkAdded(fname)) != null) { - if (r.name2 != null && collect.idsMap.containsKey(r.name2)) { - collect.idsMap.remove(r.name2); - collect.idsMap.remove(r.name2); - inspector.copied(r.name2, fname); - return; - } - // fall-through, report as added - } else if (dirstate.checkRemoved(fname) != null) { - // removed: removed file was not known at the time of baseRevision, and we should not report it as removed - return; - } - inspector.added(fname); - } else { - // was known; check whether clean or modified - // when added - seems to be the case of a file added once again, hence need to check if content is different - if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) { - // either clean or modified - HgDataFile fileNode = getFileNode(fname); - final int lengthAtRevision = fileNode.length(nid1); - if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { - inspector.modified(fname); - } else { - // check actual content to see actual changes - // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison - if (areTheSame(f, fileNode.content(nid1))) { - inspector.clean(fname); - } else { - inspector.modified(fname); - } - } - } - // only those left in idsMap after processing are reported as removed - } - - // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest - // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively - // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: - // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest - // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). - // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' - } +// public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) { +// LinkedList<File> folders = new LinkedList<File>(); +// final File rootDir = repoDir.getParentFile(); +// folders.add(rootDir); +// final HgDirstate dirstate = loadDirstate(); +// final HgIgnore hgignore = loadIgnore(); +// TreeSet<String> knownEntries = dirstate.all(); +// final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount(); +// final StatusCollector.ManifestRevisionInspector collect = isTipBase ? null : new StatusCollector.ManifestRevisionInspector(); +// if (!isTipBase) { +// getManifest().walk(baseRevision, baseRevision, collect); +// } +// do { +// File d = folders.removeFirst(); +// for (File f : d.listFiles()) { +// if (f.isDirectory()) { +// if (!".hg".equals(f.getName())) { +// folders.addLast(f); +// } +// } else { +// // FIXME path relative to rootDir - need more robust approach +// String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1)); +// if (hgignore.isIgnored(fname)) { +// inspector.ignored(fname); +// } else { +// if (knownEntries.remove(fname)) { +// // modified, added, removed, clean +// if (collect != null) { // need to check against base revision, not FS file +// checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector); +// } else { +// checkLocalStatusAgainstFile(fname, f, dirstate, inspector); +// } +// } else { +// inspector.unknown(fname); +// } +// } +// } +// } +// } while (!folders.isEmpty()); +// if (collect != null) { +// for (String r : collect.idsMap.keySet()) { +// inspector.removed(r); +// } +// } +// for (String m : knownEntries) { +// // removed from the repository and missing from working dir shall not be reported as 'deleted' +// if (dirstate.checkRemoved(m) == null) { +// inspector.missing(m); +// } +// } +// } +// +// private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) { +// HgDirstate.Record r; +// if ((r = dirstate.checkNormal(fname)) != null) { +// // either clean or modified +// if (f.lastModified() / 1000 == r.time && r.size == f.length()) { +// inspector.clean(fname); +// } else { +// // FIXME check actual content to avoid false modified files +// inspector.modified(fname); +// } +// } else if ((r = dirstate.checkAdded(fname)) != null) { +// if (r.name2 == null) { +// inspector.added(fname); +// } else { +// inspector.copied(fname, r.name2); +// } +// } else if ((r = dirstate.checkRemoved(fname)) != null) { +// inspector.removed(fname); +// } else if ((r = dirstate.checkMerged(fname)) != null) { +// inspector.modified(fname); +// } +// } +// +// // XXX refactor checkLocalStatus methods in more OO way +// private void checkLocalStatusAgainstBaseRevision(StatusCollector.ManifestRevisionInspector collect, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) { +// // fname is in the dirstate, either Normal, Added, Removed or Merged +// Nodeid nid1 = collect.idsMap.remove(fname); +// String flags = collect.flagsMap.remove(fname); +// HgDirstate.Record r; +// if (nid1 == null) { +// // normal: added? +// // added: not known at the time of baseRevision, shall report +// // merged: was not known, report as added? +// if ((r = dirstate.checkAdded(fname)) != null) { +// if (r.name2 != null && collect.idsMap.containsKey(r.name2)) { +// collect.idsMap.remove(r.name2); +// collect.idsMap.remove(r.name2); +// inspector.copied(r.name2, fname); +// return; +// } +// // fall-through, report as added +// } else if (dirstate.checkRemoved(fname) != null) { +// // removed: removed file was not known at the time of baseRevision, and we should not report it as removed +// return; +// } +// inspector.added(fname); +// } else { +// // was known; check whether clean or modified +// // when added - seems to be the case of a file added once again, hence need to check if content is different +// if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) { +// // either clean or modified +// HgDataFile fileNode = getFileNode(fname); +// final int lengthAtRevision = fileNode.length(nid1); +// if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { +// inspector.modified(fname); +// } else { +// // check actual content to see actual changes +// // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison +// if (areTheSame(f, fileNode.content(nid1))) { +// inspector.clean(fname); +// } else { +// inspector.modified(fname); +// } +// } +// } +// // only those left in idsMap after processing are reported as removed +// } +// +// // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest +// // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively +// // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: +// // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest +// // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). +// // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' +// } private static String todoGenerateFlags(String fname) { // FIXME implement @@ -416,24 +377,4 @@ } return path; } - - // XXX idsMap is being modified from outside. It's better to let outer (modifying) code to create these maps instead - private static final class ManifestRevisionCollector implements HgManifest.Inspector { - final HashMap<String, Nodeid> idsMap = new HashMap<String, Nodeid>(); - final HashMap<String, String> flagsMap = new HashMap<String, String>(); - - public boolean next(Nodeid nid, String fname, String flags) { - idsMap.put(fname, nid); - flagsMap.put(fname, flags); - return true; - } - - public boolean end(int revision) { - return false; - } - - public boolean begin(int revision, Nodeid nid) { - return true; - } - } }