# HG changeset patch # User Artem Tikhomirov # Date 1315079173 -7200 # Node ID 6dbbc53fc46d78249dfba0126ef5c31414eaf93e # Parent 7232b94f2ae3ec3efc650acaba3ef104857806cd Use Path instead of plain String for manifest file names diff -r 7232b94f2ae3 -r 6dbbc53fc46d cmdline/org/tmatesoft/hg/console/Main.java --- a/cmdline/org/tmatesoft/hg/console/Main.java Sat Sep 03 13:12:13 2011 +0200 +++ b/cmdline/org/tmatesoft/hg/console/Main.java Sat Sep 03 21:46:13 2011 +0200 @@ -25,6 +25,7 @@ import java.util.Map; import org.junit.Assert; +import org.tmatesoft.hg.core.HgBadStateException; import org.tmatesoft.hg.core.HgDataStreamException; import org.tmatesoft.hg.core.HgLogCommand; import org.tmatesoft.hg.core.HgCatCommand; @@ -46,6 +47,7 @@ import org.tmatesoft.hg.repo.HgStatusCollector; import org.tmatesoft.hg.repo.HgStatusInspector; import org.tmatesoft.hg.repo.HgSubrepoLocation; +import org.tmatesoft.hg.repo.HgManifest.Flags; import org.tmatesoft.hg.repo.HgSubrepoLocation.Kind; import org.tmatesoft.hg.repo.HgWorkingCopyStatusCollector; import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; @@ -356,13 +358,16 @@ hgRepo.getManifest().walk(0, TIP, new ManifestDump()); } - public static final class ManifestDump implements HgManifest.Inspector { + public static final class ManifestDump implements HgManifest.Inspector2 { public boolean begin(int manifestRevision, Nodeid nid, int changelogRevision) { System.out.printf("%d : %s\n", manifestRevision, nid); return true; } public boolean next(Nodeid nid, String fname, String flags) { + throw new HgBadStateException(HgManifest.Inspector2.class.getName()); + } + public boolean next(Nodeid nid, Path fname, Flags flags) { System.out.println(nid + "\t" + fname + "\t\t" + flags); return true; } diff -r 7232b94f2ae3 -r 6dbbc53fc46d src/org/tmatesoft/hg/core/HgFileInformer.java --- a/src/org/tmatesoft/hg/core/HgFileInformer.java Sat Sep 03 13:12:13 2011 +0200 +++ b/src/org/tmatesoft/hg/core/HgFileInformer.java Sat Sep 03 21:46:13 2011 +0200 @@ -103,14 +103,14 @@ repo.getManifest().walk(csetRev, csetRev, cachedManifest); // cachedManifest shall be meaningful - changelog.getLocalRevision above ensures we've got version that exists. } - Nodeid toExtract = cachedManifest.nodeid(file.toString()); + Nodeid toExtract = cachedManifest.nodeid(file); try { if (toExtract == null && followRenames) { while (toExtract == null && dataFile.isCopy()) { renamed = true; file = dataFile.getCopySourceName(); dataFile = repo.getFileNode(file); - toExtract = cachedManifest.nodeid(file.toString()); + toExtract = cachedManifest.nodeid(file); } } } catch (HgDataStreamException ex) { diff -r 7232b94f2ae3 -r 6dbbc53fc46d src/org/tmatesoft/hg/core/HgManifestCommand.java --- a/src/org/tmatesoft/hg/core/HgManifestCommand.java Sat Sep 03 13:12:13 2011 +0200 +++ b/src/org/tmatesoft/hg/core/HgManifestCommand.java Sat Sep 03 21:46:13 2011 +0200 @@ -27,6 +27,7 @@ import org.tmatesoft.hg.repo.HgManifest; import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.HgManifest.Flags; import org.tmatesoft.hg.util.Path; import org.tmatesoft.hg.util.PathPool; import org.tmatesoft.hg.util.PathRewrite; @@ -128,7 +129,7 @@ } // I'd rather let HgManifestCommand implement HgManifest.Inspector directly, but this pollutes API alot - private class Mediator implements HgManifest.Inspector { + private class Mediator implements HgManifest.Inspector2 { // file names are likely to repeat in each revision, hence caching of Paths. // However, once HgManifest.Inspector switches to Path objects, perhaps global Path pool // might be more effective? @@ -179,11 +180,14 @@ return true; } public boolean next(Nodeid nid, String fname, String flags) { - Path p = pathPool.path(fname); - if (matcher != null && !matcher.accept(p)) { + throw new HgBadStateException(HgManifest.Inspector2.class.getName()); + } + + public boolean next(Nodeid nid, Path fname, Flags flags) { + if (matcher != null && !matcher.accept(fname)) { return true; } - HgFileRevision fr = new HgFileRevision(repo, nid, p); + HgFileRevision fr = new HgFileRevision(repo, nid, fname); if (needDirs) { manifestContent.add(fr); } else { diff -r 7232b94f2ae3 -r 6dbbc53fc46d src/org/tmatesoft/hg/internal/ManifestRevision.java --- a/src/org/tmatesoft/hg/internal/ManifestRevision.java Sat Sep 03 13:12:13 2011 +0200 +++ b/src/org/tmatesoft/hg/internal/ManifestRevision.java Sat Sep 03 21:46:13 2011 +0200 @@ -19,8 +19,10 @@ import java.util.Collection; import java.util.TreeMap; +import org.tmatesoft.hg.core.HgBadStateException; import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.repo.HgManifest; +import org.tmatesoft.hg.util.Path; /** * Specific revision of the manifest. @@ -29,32 +31,32 @@ * @author Artem Tikhomirov * @author TMate Software Ltd. */ -public final class ManifestRevision implements HgManifest.Inspector { - private final TreeMap idsMap; - private final TreeMap flagsMap; +public final class ManifestRevision implements HgManifest.Inspector2 { + private final TreeMap idsMap; + private final TreeMap flagsMap; private final Pool idsPool; - private final Pool namesPool; + private final Pool namesPool; private Nodeid changeset; private int changelogRev; // optional pools for effective management of nodeids and filenames (they are likely // to be duplicated among different manifest revisions - public ManifestRevision(Pool nodeidPool, Pool filenamePool) { + public ManifestRevision(Pool nodeidPool, Pool filenamePool) { idsPool = nodeidPool; namesPool = filenamePool; - idsMap = new TreeMap(); - flagsMap = new TreeMap(); + idsMap = new TreeMap(); + flagsMap = new TreeMap(); } - public Collection files() { + public Collection files() { return idsMap.keySet(); } - public Nodeid nodeid(String fname) { + public Nodeid nodeid(Path fname) { return idsMap.get(fname); } - public String flags(String fname) { + public HgManifest.Flags flags(Path fname) { return flagsMap.get(fname); } @@ -72,6 +74,10 @@ // public boolean next(Nodeid nid, String fname, String flags) { + throw new HgBadStateException(HgManifest.Inspector2.class.getName()); + } + + public boolean next(Nodeid nid, Path fname, HgManifest.Flags flags) { if (namesPool != null) { fname = namesPool.unify(fname); } @@ -81,7 +87,11 @@ idsMap.put(fname, nid); if (flags != null) { // TreeMap$Entry takes 32 bytes. No reason to keep null for such price - // Perhaps, Map> might be better solution + // Alternatively, Map> might come as a solution + // however, with low rate of elements with flags this would consume more memory + // than two distinct maps (sizeof(Pair) == 16). + // Map: n*(32+16) + // 2 Maps: n*32 + m*32 <-- consumes more with m>n/2 flagsMap.put(fname, flags); } return true; diff -r 7232b94f2ae3 -r 6dbbc53fc46d src/org/tmatesoft/hg/repo/HgManifest.java --- a/src/org/tmatesoft/hg/repo/HgManifest.java Sat Sep 03 13:12:13 2011 +0200 +++ b/src/org/tmatesoft/hg/repo/HgManifest.java Sat Sep 03 21:46:13 2011 +0200 @@ -41,6 +41,49 @@ */ public class HgManifest extends Revlog { private RevisionMapper revisionMap; + + public enum Flags { + Exec, Link; + + static Flags parse(String flags) { + if ("x".equalsIgnoreCase(flags)) { + return Exec; + } + if ("l".equalsIgnoreCase(flags)) { + return Link; + } + if (flags == null) { + return null; + } + throw new IllegalStateException(flags); + } + + static Flags parse(byte[] data, int start, int length) { + if (length == 0) { + return null; + } + if (length == 1) { + if (data[start] == 'x') { + return Exec; + } + if (data[start] == 'l') { + return Link; + } + // FALL THROUGH + } + throw new IllegalStateException(new String(data, start, length)); + } + + String nativeString() { + if (this == Exec) { + return "x"; + } + if (this == Link) { + return "l"; + } + throw new IllegalStateException(toString()); + } + } /*package-local*/ HgManifest(HgRepository hgRepo, RevlogStream content) { super(hgRepo, content); @@ -146,12 +189,17 @@ public interface Inspector { boolean begin(int mainfestRevision, Nodeid nid, int changelogRevision); + /** + * @deprecated switch to {@link Inspector2#next(Nodeid, Path, Flags)} + */ + @Deprecated boolean next(Nodeid nid, String fname, String flags); boolean end(int manifestRevision); } - public interface ElementProxy { - T get(); + @Experimental(reason="Explore Path alternative for filenames and enum for flags") + public interface Inspector2 extends Inspector { + boolean next(Nodeid nid, Path fname, Flags flags); } /** @@ -160,17 +208,17 @@ * For cpython repo, walk(0..10k), there are over 16 million filenames, of them only 3020 unique. * This means there are 15.9 million useless char[] instances and byte->char conversions * - * When String is wrapped into {@link StringProxy}, there's extra overhead of byte[] representation - * of the String, but these are only for unique Strings (3020 in the example above). Besides, I save + * When String (Path) is wrapped into {@link PathProxy}, there's extra overhead of byte[] representation + * of the String, but these are only for unique Strings (Paths) (3020 in the example above). Besides, I save * useless char[] and byte->char conversions. */ - private static class StringProxy { + private static class PathProxy { private byte[] data; private int start; private final int hash, length; - private String result; + private Path result; - public StringProxy(byte[] data, int start, int length) { + public PathProxy(byte[] data, int start, int length) { this.data = data; this.start = start; this.length = length; @@ -187,10 +235,10 @@ @Override public boolean equals(Object obj) { - if (false == obj instanceof StringProxy) { + if (false == obj instanceof PathProxy) { return false; } - StringProxy o = (StringProxy) obj; + PathProxy o = (PathProxy) obj; if (o.result != null && result != null) { return result.equals(o.result); } @@ -209,10 +257,11 @@ return hash; } - public String freeze() { + public Path freeze() { if (result == null) { - result = new String(data, start, length); + result = Path.create(new String(data, start, length)); // release reference to bigger data array, make a copy of relevant part only + // use original bytes, not those from String above to avoid cache misses due to different encodings byte[] d = new byte[length]; System.arraycopy(data, start, d, 0, length); data = d; @@ -225,17 +274,17 @@ private static class ManifestParser implements RevlogStream.Inspector { private boolean gtg = true; // good to go private final Inspector inspector; + private final Inspector2 inspector2; private Pool nodeidPool, thisRevPool; - private final Pool fnamePool; - private final Pool flagsPool; + private final Pool fnamePool; private byte[] nodeidLookupBuffer = new byte[20]; // get reassigned each time new Nodeid is added to pool public ManifestParser(Inspector delegate) { assert delegate != null; inspector = delegate; + inspector2 = delegate instanceof Inspector2 ? (Inspector2) delegate : null; nodeidPool = new Pool(); - fnamePool = new Pool(); - flagsPool = new Pool(); + fnamePool = new Pool(); thisRevPool = new Pool(); } @@ -245,8 +294,8 @@ } try { gtg = gtg && inspector.begin(revisionNumber, new Nodeid(nodeid, true), linkRevision); - String fname = null; - String flags = null; + Path fname = null; + Flags flags = null; Nodeid nid = null; int i; byte[] data = da.byteArray(); @@ -254,7 +303,7 @@ int x = i; for( ; data[i] != '\n' && i < actualLen; i++) { if (fname == null && data[i] == 0) { - StringProxy px = fnamePool.unify(new StringProxy(data, x, i - x)); + PathProxy px = fnamePool.unify(new PathProxy(data, x, i - x)); // if (cached = fnamePool.unify(px))== px then cacheMiss, else cacheHit // cpython 0..10k: hits: 15 989 152, misses: 3020 fname = px.freeze(); @@ -277,13 +326,21 @@ if (nodeidLen + x < i) { // 'x' and 'l' for executable bits and symlinks? // hg --debug manifest shows 644 for each regular file in my repo - // for cpython 0..10k, there are 4361062 flagPool checks, and there's only 1 unique flag - flags = flagsPool.unify(new StringProxy(data, x + nodeidLen, i-x-nodeidLen)).freeze(); + // for cpython 0..10k, there are 4361062 flag checks, and there's only 1 unique flag + flags = Flags.parse(data, x + nodeidLen, i-x-nodeidLen); + } else { + flags = null; } - gtg = gtg && inspector.next(nid, fname, flags); + if (inspector2 == null) { + String flagString = flags == null ? null : flags.nativeString(); + gtg = inspector.next(nid, fname.toString(), flagString); + } else { + gtg = inspector2.next(nid, fname, flags); + } } nid = null; - fname = flags = null; + fname = null; + flags = null; } gtg = gtg && inspector.end(revisionNumber); // diff -r 7232b94f2ae3 -r 6dbbc53fc46d src/org/tmatesoft/hg/repo/HgMergeState.java --- a/src/org/tmatesoft/hg/repo/HgMergeState.java Sat Sep 03 13:12:13 2011 +0200 +++ b/src/org/tmatesoft/hg/repo/HgMergeState.java Sat Sep 03 21:46:13 2011 +0200 @@ -99,11 +99,13 @@ return; } Pool nodeidPool = new Pool(); - Pool fnamePool = new Pool(); + Pool fnamePool = new Pool(); Pair wcParents = repo.getWorkingCopyParents(); wcp1 = nodeidPool.unify(wcParents.first()); wcp2 = nodeidPool.unify(wcParents.second()); ArrayList result = new ArrayList(); - PathPool pathPool = new PathPool(new PathRewrite.Empty()); + // FIXME need to settle use of Pool and PathPool + // latter is pool that can create objects on demand, former is just cache + PathPool pathPool = new PathPool(new PathRewrite.Empty()); final ManifestRevision m1 = new ManifestRevision(nodeidPool, fnamePool); final ManifestRevision m2 = new ManifestRevision(nodeidPool, fnamePool); if (!wcp2.isNull()) { @@ -117,9 +119,10 @@ repo.getManifest().walk(rp1, rp1, m1); while ((s = br.readLine()) != null) { String[] r = s.split("\\00"); - Nodeid nidP1 = m1.nodeid(r[3]); + Path p1fname = pathPool.path(r[3]); + Nodeid nidP1 = m1.nodeid(p1fname); Nodeid nidCA = nodeidPool.unify(Nodeid.fromAscii(r[5])); - HgFileRevision p1 = new HgFileRevision(repo, nidP1, pathPool.path(r[3])); + HgFileRevision p1 = new HgFileRevision(repo, nidP1, p1fname); HgFileRevision ca; if (nidCA == nidP1 && r[3].equals(r[4])) { ca = p1; @@ -128,12 +131,13 @@ } HgFileRevision p2; if (!wcp2.isNull() || !r[6].equals(r[4])) { - Nodeid nidP2 = m2.nodeid(r[6]); + final Path p2fname = pathPool.path(r[6]); + Nodeid nidP2 = m2.nodeid(p2fname); if (nidP2 == null) { assert false : "There's not enough information (or I don't know where to look) in merge/state to find out what's the second parent"; nidP2 = NULL; } - p2 = new HgFileRevision(repo, nidP2, pathPool.path(r[6])); + p2 = new HgFileRevision(repo, nidP2, p2fname); } else { // no second parent known. no idea what to do here, assume linear merge, use common ancestor as parent p2 = ca; diff -r 7232b94f2ae3 -r 6dbbc53fc46d src/org/tmatesoft/hg/repo/HgStatusCollector.java --- a/src/org/tmatesoft/hg/repo/HgStatusCollector.java Sat Sep 03 13:12:13 2011 +0200 +++ b/src/org/tmatesoft/hg/repo/HgStatusCollector.java Sat Sep 03 21:46:13 2011 +0200 @@ -27,6 +27,7 @@ import java.util.Map; import java.util.TreeSet; +import org.tmatesoft.hg.core.HgBadStateException; import org.tmatesoft.hg.core.HgDataStreamException; import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.internal.IntMap; @@ -54,7 +55,7 @@ private final int cacheMaxSize = 50; // do not keep too much manifest revisions private PathPool pathPool; private final Pool cacheNodes; - private final Pool cacheFilenames; // XXX in fact, need to think if use of PathPool directly instead is better solution + private final Pool cacheFilenames; private final ManifestRevision emptyFakeState; private Path.Matcher scope = new Path.Matcher.Any(); @@ -63,7 +64,7 @@ this.repo = hgRepo; cache = new IntMap(cacheMaxSize); cacheNodes = new Pool(); - cacheFilenames = new Pool(); + cacheFilenames = new Pool(); emptyFakeState = createEmptyManifestRevision(); } @@ -99,7 +100,7 @@ private void initCacheRange(int minRev, int maxRev) { ensureCacheSize(); - repo.getManifest().walk(minRev, maxRev, new HgManifest.Inspector() { + repo.getManifest().walk(minRev, maxRev, new HgManifest.Inspector2() { private ManifestRevision delegate; private boolean cacheHit; // range may include revisions we already know about, do not re-create them @@ -118,6 +119,10 @@ } public boolean next(Nodeid nid, String fname, String flags) { + throw new HgBadStateException(HgManifest.Inspector2.class.getName()); + } + + public boolean next(Nodeid nid, Path fname, HgManifest.Flags flags) { if (!cacheHit) { delegate.next(nid, fname, flags); } @@ -229,29 +234,27 @@ r1 = get(rev1); r2 = get(rev2); - PathPool pp = getPathPool(); - TreeSet r1Files = new TreeSet(r1.files()); - for (String fname : r2.files()) { - final Path r2filePath = pp.path(fname); - if (!scope.accept(r2filePath)) { + TreeSet r1Files = new TreeSet(r1.files()); + for (Path r2fname : r2.files()) { + if (!scope.accept(r2fname)) { continue; } - if (r1Files.remove(fname)) { - Nodeid nidR1 = r1.nodeid(fname); - Nodeid nidR2 = r2.nodeid(fname); - String flagsR1 = r1.flags(fname); - String flagsR2 = r2.flags(fname); - if (nidR1.equals(nidR2) && ((flagsR2 == null && flagsR1 == null) || (flagsR2 != null && flagsR2.equals(flagsR1)))) { - inspector.clean(r2filePath); + if (r1Files.remove(r2fname)) { + Nodeid nidR1 = r1.nodeid(r2fname); + Nodeid nidR2 = r2.nodeid(r2fname); + HgManifest.Flags flagsR1 = r1.flags(r2fname); + HgManifest.Flags flagsR2 = r2.flags(r2fname); + if (nidR1.equals(nidR2) && flagsR2 == flagsR1) { + inspector.clean(r2fname); } else { - inspector.modified(r2filePath); + inspector.modified(r2fname); } } else { try { - Path copyTarget = r2filePath; + Path copyTarget = r2fname; Path copyOrigin = getOriginIfCopy(repo, copyTarget, r1Files, rev1); if (copyOrigin != null) { - inspector.copied(pp.path(copyOrigin) /*pipe through pool, just in case*/, copyTarget); + inspector.copied(getPathPool().path(copyOrigin) /*pipe through pool, just in case*/, copyTarget); } else { inspector.added(copyTarget); } @@ -262,10 +265,9 @@ } } } - for (String left : r1Files) { - final Path r2filePath = pp.path(left); - if (scope.accept(r2filePath)) { - inspector.removed(r2filePath); + for (Path r1fname : r1Files) { + if (scope.accept(r1fname)) { + inspector.removed(r1fname); } } } @@ -276,11 +278,11 @@ return rv; } - /*package-local*/static Path getOriginIfCopy(HgRepository hgRepo, Path fname, Collection originals, int originalChangelogRevision) throws HgDataStreamException { + /*package-local*/static Path getOriginIfCopy(HgRepository hgRepo, Path fname, Collection originals, int originalChangelogRevision) throws HgDataStreamException { HgDataFile df = hgRepo.getFileNode(fname); while (df.isCopy()) { Path original = df.getCopySourceName(); - if (originals.contains(original.toString())) { + if (originals.contains(original)) { df = hgRepo.getFileNode(original); int changelogRevision = df.getChangesetLocalRevision(0); if (changelogRevision <= originalChangelogRevision) { @@ -326,7 +328,7 @@ if ((modified == null || !modified.contains(fname)) && (removed == null || !removed.contains(fname))) { return null; } - return statusHelper.raw(startRev).nodeid(fname.toString()); + return statusHelper.raw(startRev).nodeid(fname); } public Nodeid nodeidAfterChange(Path fname) { if (statusHelper == null || endRev == BAD_REVISION) { @@ -335,7 +337,7 @@ if ((modified == null || !modified.contains(fname)) && (added == null || !added.contains(fname))) { return null; } - return statusHelper.raw(endRev).nodeid(fname.toString()); + return statusHelper.raw(endRev).nodeid(fname); } public List getModified() { diff -r 7232b94f2ae3 -r 6dbbc53fc46d src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java --- a/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java Sat Sep 03 13:12:13 2011 +0200 +++ b/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java Sat Sep 03 21:46:13 2011 +0200 @@ -136,10 +136,10 @@ throw new IllegalArgumentException(String.valueOf(baseRevision)); } ManifestRevision collect = null; // non null indicates we compare against base revision - Set baseRevFiles = Collections.emptySet(); // files from base revision not affected by status calculation + Set baseRevFiles = Collections.emptySet(); // files from base revision not affected by status calculation if (baseRevision != TIP && baseRevision != WORKING_COPY) { collect = getManifest(baseRevision); - baseRevFiles = new TreeSet(collect.files()); + baseRevFiles = new TreeSet(collect.files()); } if (inspector instanceof HgStatusCollector.Record) { HgStatusCollector sc = baseRevisionCollector == null ? new HgStatusCollector(repo) : baseRevisionCollector; @@ -173,13 +173,13 @@ } // do not report it as removed later if (collect != null) { - baseRevFiles.remove(fname.toString()); + baseRevFiles.remove(fname); } } else { // chances are it was known in baseRevision. We may rely // that later iteration over baseRevFiles leftovers would yield correct Removed, // but it doesn't hurt to be explicit (provided we know fname *is* inScope of the FileIterator - if (collect != null && baseRevFiles.remove(fname.toString())) { + if (collect != null && baseRevFiles.remove(fname)) { inspector.removed(fname); } else { // not sure I shall report such files (i.e. arbitrary name coming from FileIterator) @@ -214,8 +214,7 @@ } } if (collect != null) { - for (String r : baseRevFiles) { - final Path fromBase = getPathPool().path(r); + for (Path fromBase : baseRevFiles) { if (repoWalker.inScope(fromBase)) { inspector.removed(fromBase); } @@ -233,7 +232,7 @@ } else { // removed from the repo // if we check against non-tip revision, do not report files that were added past that revision and now removed. - if (collect == null || baseRevFiles.contains(m.toString())) { + if (collect == null || baseRevFiles.contains(m)) { inspector.removed(m); } } @@ -262,7 +261,7 @@ // size is the same or unknown, and, perhaps, different timestamp // check actual content to avoid false modified files HgDataFile df = repo.getFileNode(fname); - Nodeid rev = getDirstateParentManifest().nodeid(fname.toString()); + Nodeid rev = getDirstateParentManifest().nodeid(fname); if (!areTheSame(f, df, rev)) { inspector.modified(df.getPath()); } else { @@ -288,10 +287,10 @@ } // XXX refactor checkLocalStatus methods in more OO way - private void checkLocalStatusAgainstBaseRevision(Set baseRevNames, ManifestRevision collect, int baseRevision, Path fname, File f, HgStatusInspector inspector) { + private void checkLocalStatusAgainstBaseRevision(Set baseRevNames, ManifestRevision collect, int baseRevision, Path fname, File f, HgStatusInspector inspector) { // fname is in the dirstate, either Normal, Added, Removed or Merged - Nodeid nid1 = collect.nodeid(fname.toString()); - String flags = collect.flags(fname.toString()); + Nodeid nid1 = collect.nodeid(fname); + HgManifest.Flags flags = collect.flags(fname); HgDirstate.Record r; if (nid1 == null) { // normal: added? @@ -311,7 +310,7 @@ } else if ((r = getDirstate().checkAdded(fname)) != null) { if (r.name2 != null && baseRevNames.contains(r.name2)) { baseRevNames.remove(r.name2); // XXX surely I shall not report rename source as Removed? - inspector.copied(getPathPool().path(r.name2), fname); + inspector.copied(r.name2, fname); return; } // fall-through, report as added @@ -322,7 +321,7 @@ inspector.added(fname); } else { // was known; check whether clean or modified - Nodeid nidFromDirstate = getDirstateParentManifest().nodeid(fname.toString()); + Nodeid nidFromDirstate = getDirstateParentManifest().nodeid(fname); if ((r = getDirstate().checkNormal(fname)) != null && nid1.equals(nidFromDirstate)) { // regular file, was the same up to WC initialization. Check if was modified since, and, if not, report right away // same code as in #checkLocalStatusAgainstFile @@ -340,7 +339,7 @@ handled = true; } if (handled) { - baseRevNames.remove(fname.toString()); // consumed, processed, handled. + baseRevNames.remove(fname); // consumed, processed, handled. return; } // otherwise, shall check actual content (size not the same, or unknown (-1 or -2), or timestamp is different, @@ -357,11 +356,11 @@ } else { inspector.modified(fname); } - baseRevNames.remove(fname.toString()); // consumed, processed, handled. + baseRevNames.remove(fname); // consumed, processed, handled. } else if (getDirstate().checkRemoved(fname) != null) { // was known, and now marked as removed, report it right away, do not rely on baseRevNames processing later inspector.removed(fname); - baseRevNames.remove(fname.toString()); // consumed, processed, handled. + baseRevNames.remove(fname); // consumed, processed, handled. } // only those left in baseRevNames after processing are reported as removed } @@ -451,7 +450,7 @@ return false; } - private static boolean todoCheckFlagsEqual(File f, String manifestFlags) { + private static boolean todoCheckFlagsEqual(File f, HgManifest.Flags originalManifestFlags) { // FIXME implement return true; } diff -r 7232b94f2ae3 -r 6dbbc53fc46d test/org/tmatesoft/hg/test/MapTagsToFileRevisions.java --- a/test/org/tmatesoft/hg/test/MapTagsToFileRevisions.java Sat Sep 03 13:12:13 2011 +0200 +++ b/test/org/tmatesoft/hg/test/MapTagsToFileRevisions.java Sat Sep 03 21:46:13 2011 +0200 @@ -7,8 +7,8 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.regex.Pattern; +import org.tmatesoft.hg.core.HgBadStateException; import org.tmatesoft.hg.core.HgChangeset; import org.tmatesoft.hg.core.HgChangesetHandler; import org.tmatesoft.hg.core.HgException; @@ -21,6 +21,7 @@ import org.tmatesoft.hg.repo.HgRepository; import org.tmatesoft.hg.repo.HgTags; import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; +import org.tmatesoft.hg.repo.HgManifest.Flags; import org.tmatesoft.hg.repo.HgTags.TagInfo; import org.tmatesoft.hg.util.CancelledException; import org.tmatesoft.hg.util.Path; @@ -69,16 +70,16 @@ System.out.println(System.getProperty("java.version")); final long start = System.currentTimeMillis(); final HgRepository repository = new HgLookup().detect(new File("/temp/hg/cpython")); - repository.getManifest().walk(0, 10000, new HgManifest.Inspector() { - + repository.getManifest().walk(0, 10000, new HgManifest.Inspector2() { public boolean begin(int mainfestRevision, Nodeid nid, int changelogRevision) { return true; } - public boolean next(Nodeid nid, String fname, String flags) { + throw new HgBadStateException(HgManifest.Inspector2.class.getName()); + } + public boolean next(Nodeid nid, Path fname, Flags flags) { return true; } - public boolean end(int manifestRevision) { return true; } @@ -106,7 +107,7 @@ tags.getTags().values().toArray(allTags); // file2rev2tag value is array of revisions, always of allTags.length. Revision index in the array // is index of corresponding TagInfo in allTags; - final Map file2rev2tag = new HashMap(); + final Map file2rev2tag = new HashMap(); System.out.printf("Collecting manifests for %d tags\n", allTags.length); // effective translation of changeset revisions to their local indexes final HgChangelog.RevisionMap clogrmap = repository.getChangelog().new RevisionMap().init(); @@ -127,32 +128,7 @@ } System.out.printf("Prepared tag revisions to analyze: %d ms\n", System.currentTimeMillis() - start); // - final int[] counts = new int[2]; - HgManifest.Inspector emptyInsp = new HgManifest.Inspector() { - public boolean begin(int mainfestRevision, Nodeid nid, int changelogRevision) { - counts[0]++; - return true; - } - public boolean next(Nodeid nid, String fname, String flags) { - counts[1]++; - return true; - } - public boolean end(int manifestRevision) { - return true; - } - }; -// final long start0 = System.currentTimeMillis(); -// repository.getManifest().walk(emptyInsp, tagLocalRevs[0]); // warm-up -// final long start1 = System.currentTimeMillis(); -// repository.getManifest().walk(emptyInsp, tagLocalRevs[0]); // warm-up -// counts[0] = counts[1] = 0; -// final long start2 = System.currentTimeMillis(); -// repository.getManifest().walk(emptyInsp, tagLocalRevs); -// // No-op iterate over selected revisions: 11719 ms (revs:189, files: 579652), warm-up: 218 ms. Cache built: 25281 ms -// // No-op iterate over selected revisions: 11719 ms (revs:189, files: 579652), warm-up1: 234 ms, warm-up2: 16 ms. Cache built: 25375 ms -// System.out.printf("No-op iterate over selected revisions: %d ms (revs:%d, files: %d), warm-up1: %d ms, warm-up2: %d ms \n", System.currentTimeMillis() - start2, counts[0], counts[1], start1-start0, start2-start1); - // - repository.getManifest().walk(new HgManifest.Inspector() { + repository.getManifest().walk(new HgManifest.Inspector2() { private int[] tagIndexAtRev = new int[4]; // it's unlikely there would be a lot of tags associated with a given cset public boolean begin(int mainfestRevision, Nodeid nid, int changelogRevision) { @@ -177,6 +153,10 @@ } public boolean next(Nodeid nid, String fname, String flags) { + throw new HgBadStateException(HgManifest.Inspector2.class.getName()); + } + + public boolean next(Nodeid nid, Path fname, HgManifest.Flags flags) { Nodeid[] m = file2rev2tag.get(fname); if (m == null) { file2rev2tag.put(fname, m = new Nodeid[allTags.length]); @@ -203,7 +183,7 @@ // look up specific file. This part is fast. final Path targetPath = Path.create("README"); HgDataFile fileNode = repository.getFileNode(targetPath); - final Nodeid[] allTagsOfTheFile = file2rev2tag.get(targetPath.toString()); + final Nodeid[] allTagsOfTheFile = file2rev2tag.get(targetPath); // TODO if fileNode.isCopy, repeat for each getCopySourceName() for (int localFileRev = 0; localFileRev < fileNode.getRevisionCount(); localFileRev++) { Nodeid fileRev = fileNode.getRevision(localFileRev);