# HG changeset patch # User Artem Tikhomirov # Date 1317259228 -7200 # Node ID 09628675bcee92fb1dfe9ad3b33a2a432283ee99 # Parent ee6b467c1a5f884020b43c3dca90bbb39d338f9f Rework file history build approach to match rest of the API diff -r ee6b467c1a5f -r 09628675bcee cmdline/org/tmatesoft/hg/console/Main.java --- a/cmdline/org/tmatesoft/hg/console/Main.java Wed Sep 28 13:09:16 2011 +0200 +++ b/cmdline/org/tmatesoft/hg/console/Main.java Thu Sep 29 03:20:28 2011 +0200 @@ -86,7 +86,7 @@ public static void main(String[] args) throws Exception { Main m = new Main(args); -// m.buildFileLog(); + m.buildFileLog(); // m.testConsoleLog(); // m.testTreeTraversal(); // m.testRevisionMap(); @@ -97,7 +97,7 @@ // m.testCatAtCsetRevision(); // m.testMergeState(); // m.testFileStatus(); - m.dumpBranches(); +// m.dumpBranches(); // m.inflaterLengthException(); // m.dumpIgnored(); // m.dumpDirstate(); @@ -110,26 +110,28 @@ private void buildFileLog() { final HgDataFile fn = hgRepo.getFileNode("file1"); - HgDataFile.HistoryWalker hw = fn.history(); - while (hw.hasNext()) { - hw.next(); - StringBuilder sb = new StringBuilder(); - Collection children = hw.childChangesets(); - for (Nodeid cc : children) { - sb.append(cc.shortNotation()); - sb.append(", "); + HgChangelog.TreeInspector insp = new HgChangelog.TreeInspector() { + + public void next(Nodeid changesetRevision, Pair parentChangesets, Collection childChangesets) { + StringBuilder sb = new StringBuilder(); + for (Nodeid cc : childChangesets) { + sb.append(cc.shortNotation()); + sb.append(", "); + } + final boolean isJoin = !parentChangesets.first().isNull() && !parentChangesets.second().isNull(); + final boolean isFork = childChangesets.size() > 1; + if (isJoin) { + System.out.printf("join[(%s, %s) => %s]\n", parentChangesets.first().shortNotation(), parentChangesets.second().shortNotation(), changesetRevision.shortNotation()); + } + if (isFork) { + System.out.printf("fork[%s => %s]\n", changesetRevision.shortNotation(), sb); + } + if (!isFork && !isJoin && !childChangesets.isEmpty()) { + System.out.printf("%s => %s\n", changesetRevision.shortNotation(), sb); + } } - if (hw.isJoin()) { - final Pair parents = hw.parentChangesets(); - System.out.printf("join[(%s, %s) => %s]\n", parents.first().shortNotation(), parents.second().shortNotation(), hw.changesetRevision().shortNotation()); - } - if (hw.isFork()) { - System.out.printf("fork[%s => %s]\n", hw.changesetRevision().shortNotation(), sb); - } - if (!hw.isFork() && !hw.isJoin() && !children.isEmpty()) { - System.out.printf("%s => %s\n", hw.changesetRevision().shortNotation(), sb); - } - } + }; + fn.history(insp); } private void buildFileLogOld() { diff -r ee6b467c1a5f -r 09628675bcee src/org/tmatesoft/hg/repo/HgChangelog.java --- a/src/org/tmatesoft/hg/repo/HgChangelog.java Wed Sep 28 13:09:16 2011 +0200 +++ b/src/org/tmatesoft/hg/repo/HgChangelog.java Thu Sep 29 03:20:28 2011 +0200 @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; +import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.Formatter; @@ -38,6 +39,7 @@ import org.tmatesoft.hg.internal.Pool; import org.tmatesoft.hg.internal.RevlogStream; import org.tmatesoft.hg.util.CancelSupport; +import org.tmatesoft.hg.util.Pair; import org.tmatesoft.hg.util.ProgressSupport; /** @@ -105,6 +107,19 @@ } /** + * Unlike regular {@link Inspector}, this one supplies changeset revision along with its parents and children according + * to parent information of the revlog this inspector visits. + * @see HgDataFile#history(TreeInspector) + */ + public interface TreeInspector { + // the reason TreeInsector is in HgChangelog, not in Revlog, because despite the fact it can + // be applied to any revlog, it's not meant to provide revisions of any revlog it's beeing applied to, + // but changeset revisions always. + // TODO HgChangelog.walk(TreeInspector) + void next(Nodeid changesetRevision, Pair parentChangesets, Collection childChangesets); + } + + /** * Entry in the Changelog */ public static class RawChangeset implements Cloneable /* for those that would like to keep a copy */{ diff -r ee6b467c1a5f -r 09628675bcee src/org/tmatesoft/hg/repo/HgDataFile.java --- a/src/org/tmatesoft/hg/repo/HgDataFile.java Wed Sep 28 13:09:16 2011 +0200 +++ b/src/org/tmatesoft/hg/repo/HgDataFile.java Thu Sep 29 03:20:28 2011 +0200 @@ -30,18 +30,15 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.NoSuchElementException; import org.tmatesoft.hg.core.HgDataStreamException; import org.tmatesoft.hg.core.HgException; import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.internal.DataAccess; -import org.tmatesoft.hg.internal.Experimental; import org.tmatesoft.hg.internal.FilterByteChannel; import org.tmatesoft.hg.internal.FilterDataAccess; import org.tmatesoft.hg.internal.IntMap; import org.tmatesoft.hg.internal.RevlogStream; -import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; import org.tmatesoft.hg.util.ByteChannel; import org.tmatesoft.hg.util.CancelSupport; import org.tmatesoft.hg.util.CancelledException; @@ -231,135 +228,117 @@ } } - @Experimental(reason="Investigate approaches to build complete file history log") - public interface HistoryWalker { - void next(); - boolean hasNext(); - Nodeid changesetRevision(); - RawChangeset changeset(); - boolean isFork(); - boolean isJoin(); - Pair parentChangesets(); - Collection childChangesets(); + private static class HistoryNode { + int changeset; + Nodeid cset; + HistoryNode parent1, parent2; + List children; + + HistoryNode(int cs, HistoryNode p1, HistoryNode p2) { + changeset = cs; + parent1 = p1; + parent2 = p2; + if (p1 != null) { + p1.addChild(this); + } + if (p2 != null) { + p2.addChild(this); + } + } + + Nodeid changesetRevision() { + assert cset != null : "we initialize all csets prior to use"; + return cset; + } + + void addChild(HistoryNode child) { + if (children == null) { + children = new ArrayList(2); + } + children.add(child); + } } - public HistoryWalker history() { - final boolean[] needsSorting = { false }; - class HistoryNode { - int changeset; - Nodeid cset; - HistoryNode parent1, parent2; - List children; - - public HistoryNode(int cs, HistoryNode p1, HistoryNode p2) { - changeset = cs; - parent1 = p1; - parent2 = p2; - if (p1 != null) { - p1.addChild(this); + public void history(HgChangelog.TreeInspector inspector) { + final CancelSupport cancelSupport = CancelSupport.Factory.get(inspector); + try { + final boolean[] needsSorting = { false }; + final HistoryNode[] completeHistory = new HistoryNode[getRevisionCount()]; + final int[] commitRevisions = new int[completeHistory.length]; + RevlogStream.Inspector insp = new RevlogStream.Inspector() { + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { + if (revisionNumber > 0) { + if (commitRevisions[revisionNumber-1] > linkRevision) { + needsSorting[0] = true; + } + } + commitRevisions[revisionNumber] = linkRevision; + HistoryNode p1 = null, p2 = null; + if (parent1Revision != -1) { + p1 = completeHistory[parent1Revision]; + } + if (parent2Revision != -1) { + p2 = completeHistory[parent2Revision]; + } + completeHistory[revisionNumber] = new HistoryNode(linkRevision, p1, p2); } - if (p2 != null) { - p2.addChild(this); + }; + content.iterate(0, getLastRevision(), false, insp); + cancelSupport.checkCancelled(); + if (needsSorting[0]) { + Arrays.sort(commitRevisions); + } + // read changeset revisions at once (to avoid numerous changelog.getRevision reads) + // but just nodeids, not RawChangeset (changelog.iterate(data=false) + ArrayList changesetRevisions = new ArrayList(commitRevisions.length); + getRepo().getChangelog().getRevisionsInternal(changesetRevisions, commitRevisions); + cancelSupport.checkCancelled(); + // assign them to corresponding HistoryNodes + for (int i = 0; i < completeHistory.length; i++ ) { + final HistoryNode n = completeHistory[i]; + if (needsSorting[0]) { + int x = Arrays.binarySearch(commitRevisions, n.changeset); + assert x >= 0; + n.cset = changesetRevisions.get(x); + } else { + // commit revisions were not sorted, may use original index directly + n.cset = changesetRevisions.get(i); } } - - public Nodeid changesetRevision() { - if (cset == null) { - cset = getRepo().getChangelog().getRevision(changeset); - } - return cset; - } - - public void addChild(HistoryNode child) { - if (children == null) { - children = new ArrayList(2); - } - children.add(child); - } - } - final HistoryNode[] completeHistory = new HistoryNode[getRevisionCount()]; - final int[] commitRevisions = new int[completeHistory.length]; - RevlogStream.Inspector insp = new RevlogStream.Inspector() { - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { - if (revisionNumber > 0) { - if (commitRevisions[revisionNumber-1] > linkRevision) { - needsSorting[0] = true; - } - } - HistoryNode p1 = null, p2 = null; - if (parent1Revision != -1) { - p1 = completeHistory[parent1Revision]; - } - if (parent2Revision != -1) { - p2 = completeHistory[parent2Revision]; - } - completeHistory[revisionNumber] = new HistoryNode(linkRevision, p1, p2); - } - }; - content.iterate(0, getLastRevision(), false, insp); - // XXX may read changeset revisions at once (to avoid numerous changelog.getRevision reads) - // but perhaps just nodeids, not RawChangeset (changelog.iterate(data=false) - // XXX sort completeHistory according to changeset numbers? - return new HistoryWalker() { - private int index = -1; - - public void next() { - if (!hasNext()) { - throw new NoSuchElementException(); - } - index++; - } - - public boolean hasNext() { - return index+1 < completeHistory.length; - } - - public Pair parentChangesets() { + cancelSupport.checkCancelled(); + // XXX shall sort completeHistory according to changeset numbers? + for (int i = 0; i < completeHistory.length; i++ ) { + final HistoryNode n = completeHistory[i]; HistoryNode p; Nodeid p1, p2; - if ((p = completeHistory[index].parent1) != null) { + if ((p = n.parent1) != null) { p1 = p.changesetRevision(); } else { p1 = Nodeid.NULL; } - if ((p = completeHistory[index].parent2) != null) { + if ((p= n.parent2) != null) { p2 = p.changesetRevision(); } else { p2 = Nodeid.NULL; } - return new Pair(p1, p2); - } - - public boolean isJoin() { - return completeHistory[index].parent1 != null && completeHistory[index].parent2 != null; - } - - public boolean isFork() { - HistoryNode n = completeHistory[index]; - return n.children != null && n.children.size() > 1; - } - - public Collection childChangesets() { - HistoryNode n = completeHistory[index]; + final Pair parentChangesets = new Pair(p1, p2); + final List childChangesets; if (n.children == null) { - return Collections.emptyList(); + childChangesets = Collections.emptyList(); + } else { + Nodeid[] revisions = new Nodeid[n.children.size()]; + int j = 0; + for (HistoryNode hn : n.children) { + revisions[j++] = hn.changesetRevision(); + } + childChangesets = Arrays.asList(revisions); } - ArrayList rv = new ArrayList(n.children.size()); - for (HistoryNode hn : n.children) { - rv.add(hn.changesetRevision()); - } - return rv; + inspector.next(n.changesetRevision(), parentChangesets, childChangesets); + cancelSupport.checkCancelled(); } - - public Nodeid changesetRevision() { - return completeHistory[index].changesetRevision(); - } - - public RawChangeset changeset() { - final int cs = completeHistory[index].changeset; - return getRepo().getChangelog().range(cs, cs).get(0); - } - }; + } catch (CancelledException ex) { + return; + } } public void history(HgChangelog.Inspector inspector) { diff -r ee6b467c1a5f -r 09628675bcee src/org/tmatesoft/hg/repo/Revlog.java --- a/src/org/tmatesoft/hg/repo/Revlog.java Wed Sep 28 13:09:16 2011 +0200 +++ b/src/org/tmatesoft/hg/repo/Revlog.java Thu Sep 29 03:20:28 2011 +0200 @@ -21,6 +21,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; @@ -87,6 +88,24 @@ // XXX cache nodeids? return Nodeid.fromBinary(content.nodeid(revision), 0); } + + public final List getRevisions(int... revisions) { + ArrayList rv = new ArrayList(revisions.length); + Arrays.sort(revisions); + getRevisionsInternal(rv, revisions); + return rv; + } + + /*package-local*/ void getRevisionsInternal(final List retVal, int[] sortedRevs) { + // once I have getRevisionMap and may find out whether it is avalable from cache, + // may use it, perhaps only for small number of revisions + content.iterate(sortedRevs, false, new RevlogStream.Inspector() { + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { + retVal.add(Nodeid.fromBinary(nodeid, 0)); + } + }); + } /** * Get local revision number (index) of the specified revision. @@ -195,7 +214,7 @@ } } } - + /* * XXX think over if it's better to do either: * pw = getChangelog().new ParentWalker(); pw.init() and pass pw instance around as needed