diff src/org/tmatesoft/hg/core/HgLogCommand.java @ 328:a674b8590362

Move file tree history to upper API level
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Wed, 05 Oct 2011 07:13:57 +0200
parents d68dcb3b5f49
children 5f9073eabf06
line wrap: on
line diff
--- a/src/org/tmatesoft/hg/core/HgLogCommand.java	Tue Oct 04 07:24:44 2011 +0200
+++ b/src/org/tmatesoft/hg/core/HgLogCommand.java	Wed Oct 05 07:13:57 2011 +0200
@@ -18,7 +18,10 @@
 
 import static org.tmatesoft.hg.repo.HgRepository.TIP;
 
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Calendar;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.ConcurrentModificationException;
 import java.util.LinkedList;
@@ -26,11 +29,16 @@
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.tmatesoft.hg.internal.IntMap;
+import org.tmatesoft.hg.internal.IntVector;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgStatusCollector;
+import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.CancelledException;
+import org.tmatesoft.hg.util.Pair;
 import org.tmatesoft.hg.util.Path;
 import org.tmatesoft.hg.util.ProgressSupport;
 
@@ -205,10 +213,7 @@
 		final ProgressSupport progressHelper = getProgressSupport(handler);
 		try {
 			count = 0;
-			HgChangelog.ParentWalker pw = parentHelper; // leave it uninitialized unless we iterate whole repo
-			if (file == null) {
-				pw = getParentHelper();
-			}
+			HgChangelog.ParentWalker pw = getParentHelper(file == null); // leave it uninitialized unless we iterate whole repo
 			// ChangesetTransfrom creates a blank PathPool, and #file(String, boolean) above 
 			// may utilize it as well. CommandContext? How about StatusCollector there as well?
 			csetTransform = new ChangesetTransformer(repo, handler, pw, progressHelper, getCancelSupport(handler, true));
@@ -250,7 +255,77 @@
 			progressHelper.done();
 		}
 	}
+	
+	public void execute(HgChangesetTreeHandler handler) throws CancelledException {
+		if (handler == null) {
+			throw new IllegalArgumentException();
+		}
+		if (csetTransform != null) {
+			throw new ConcurrentModificationException();
+		}
+		if (file == null) {
+			throw new IllegalArgumentException("History tree is supported for files only (at least now), please specify file");
+		}
+		if (followHistory) {
+			throw new UnsupportedOperationException("Can't follow file history when building tree (yet?)");
+		}
+		class TreeBuildInspector implements HgChangelog.ParentInspector, HgChangelog.RevisionInspector {
+			HistoryNode[] completeHistory;
+			int[] commitRevisions;
 
+			public void next(int revisionNumber, Nodeid revision, int linkedRevision) {
+				commitRevisions[revisionNumber] = linkedRevision;
+			}
+
+			public void next(int revisionNumber, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2) {
+				HistoryNode p1 = null, p2 = null;
+				if (parent1 != -1) {
+					p1 = completeHistory[parent1];
+				}
+				if (parent2!= -1) {
+					p2 = completeHistory[parent2];
+				}
+				completeHistory[revisionNumber] = new HistoryNode(commitRevisions[revisionNumber], revision, p1, p2);
+			}
+			
+			HistoryNode[] go(HgDataFile fileNode) {
+				completeHistory = new HistoryNode[fileNode.getRevisionCount()];
+				commitRevisions = new int[completeHistory.length];
+				fileNode.walk(0, TIP, this);
+				return completeHistory;
+			}
+		};
+		final ProgressSupport progressHelper = getProgressSupport(handler);
+		progressHelper.start(4);
+		final CancelSupport cancelHelper = getCancelSupport(handler, true);
+		cancelHelper.checkCancelled();
+		HgDataFile fileNode = repo.getFileNode(file);
+		// build tree of nodes according to parents in file's revlog
+		final TreeBuildInspector treeBuildInspector = new TreeBuildInspector();
+		final HistoryNode[] completeHistory = treeBuildInspector.go(fileNode);
+		progressHelper.worked(1);
+		cancelHelper.checkCancelled();
+		ElementImpl ei = new ElementImpl(treeBuildInspector.commitRevisions.length);
+		final ProgressSupport ph2;
+		if (treeBuildInspector.commitRevisions.length < 100 /*XXX is it really worth it? */) {
+			ei.initTransform();
+			repo.getChangelog().range(ei, treeBuildInspector.commitRevisions);
+			progressHelper.worked(1);
+			ph2 = new ProgressSupport.Sub(progressHelper, 2);
+		} else {
+			ph2 = new ProgressSupport.Sub(progressHelper, 3);
+		}
+		ph2.start(completeHistory.length);
+		// XXX shall sort completeHistory according to changeset numbers?
+		for (int i = 0; i < completeHistory.length; i++ ) {
+			final HistoryNode n = completeHistory[i];
+			handler.next(ei.init(n));
+			ph2.worked(1);
+			cancelHelper.checkCancelled();
+		}
+		progressHelper.done();
+	}
+	
 	//
 	
 	public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
@@ -280,8 +355,8 @@
 		csetTransform.next(revisionNumber, nodeid, cset);
 	}
 	
-	private HgChangelog.ParentWalker getParentHelper() {
-		if (parentHelper == null) {
+	private HgChangelog.ParentWalker getParentHelper(boolean create) {
+		if (parentHelper == null && create) {
 			parentHelper = repo.getChangelog().new ParentWalker();
 			parentHelper.init();
 		}
@@ -317,4 +392,210 @@
 			result.add(changeset.clone());
 		}
 	}
+
+	private static class HistoryNode {
+		final int changeset;
+		final Nodeid fileRevision;
+		final HistoryNode parent1, parent2;
+		List<HistoryNode> children;
+
+		HistoryNode(int cs, Nodeid revision, HistoryNode p1, HistoryNode p2) {
+			changeset = cs;
+			fileRevision = revision;
+			parent1 = p1;
+			parent2 = p2;
+			if (p1 != null) {
+				p1.addChild(this);
+			}
+			if (p2 != null) {
+				p2.addChild(this);
+			}
+		}
+		
+		void addChild(HistoryNode child) {
+			if (children == null) {
+				children = new ArrayList<HistoryNode>(2);
+			}
+			children.add(child);
+		}
+	}
+
+	private class ElementImpl implements HgChangesetTreeHandler.TreeElement, HgChangelog.Inspector {
+		private HistoryNode historyNode;
+		private Pair<HgChangeset, HgChangeset> parents;
+		private List<HgChangeset> children;
+		private IntMap<HgChangeset> cachedChangesets;
+		private ChangesetTransformer.Transformation transform;
+		private Nodeid changesetRevision;
+		private Pair<Nodeid,Nodeid> parentRevisions;
+		private List<Nodeid> childRevisions;
+		
+		public ElementImpl(int total) {
+			cachedChangesets = new IntMap<HgChangeset>(total);
+		}
+
+		ElementImpl init(HistoryNode n) {
+			historyNode = n;
+			parents = null;
+			children = null;
+			changesetRevision = null;
+			parentRevisions = null;
+			childRevisions = null;
+			return this;
+		}
+
+		public Nodeid fileRevision() {
+			return historyNode.fileRevision;
+		}
+
+		public HgChangeset changeset() {
+			return get(historyNode.changeset)[0];
+		}
+
+		public Pair<HgChangeset, HgChangeset> parents() {
+			if (parents != null) {
+				return parents;
+			}
+			HistoryNode p;
+			final int p1, p2;
+			if ((p = historyNode.parent1) != null) {
+				p1 = p.changeset;
+			} else {
+				p1 = -1;
+			}
+			if ((p = historyNode.parent2) != null) {
+				p2 = p.changeset;
+			} else {
+				p2 = -1;
+			}
+			HgChangeset[] r = get(p1, p2);
+			return parents = new Pair<HgChangeset, HgChangeset>(r[0], r[1]);
+		}
+
+		public Collection<HgChangeset> children() {
+			if (children != null) {
+				return children;
+			}
+			if (historyNode.children == null) {
+				children = Collections.emptyList();
+			} else {
+				int[] childrentChangesetNumbers = new int[historyNode.children.size()];
+				int j = 0;
+				for (HistoryNode hn : historyNode.children) {
+					childrentChangesetNumbers[j++] = hn.changeset;
+				}
+				children = Arrays.asList(get(childrentChangesetNumbers));
+			}
+			return children;
+		}
+		
+		void populate(HgChangeset cs) {
+			cachedChangesets.put(cs.getRevision(), cs);
+		}
+		
+		private HgChangeset[] get(int... changelogRevisionNumber) {
+			HgChangeset[] rv = new HgChangeset[changelogRevisionNumber.length];
+			IntVector misses = new IntVector(changelogRevisionNumber.length, -1);
+			for (int i = 0; i < changelogRevisionNumber.length; i++) {
+				if (changelogRevisionNumber[i] == -1) {
+					rv[i] = null;
+					continue;
+				}
+				HgChangeset cached = cachedChangesets.get(changelogRevisionNumber[i]);
+				if (cached != null) {
+					rv[i] = cached;
+				} else {
+					misses.add(changelogRevisionNumber[i]);
+				}
+			}
+			if (misses.size() > 0) {
+				final int[] changesets2read = misses.toArray();
+				initTransform();
+				repo.getChangelog().range(this, changesets2read);
+				for (int changeset2read : changesets2read) {
+					HgChangeset cs = cachedChangesets.get(changeset2read);
+						if (cs == null) {
+							throw new HgBadStateException();
+						}
+						// HgChangelog.range may reorder changesets according to their order in the changelog
+						// thus need to find original index
+						boolean sanity = false;
+						for (int i = 0; i < changelogRevisionNumber.length; i++) {
+							if (changelogRevisionNumber[i] == cs.getRevision()) {
+								rv[i] = cs;
+								sanity = true;
+								break;
+							}
+						}
+						assert sanity;
+				}
+			}
+			return rv;
+		}
+
+		// init only when needed
+		void initTransform() {
+			if (transform == null) {
+				transform = new ChangesetTransformer.Transformation(new HgStatusCollector(repo)/*XXX try to reuse from context?*/, getParentHelper(false));
+			}
+		}
+		
+		public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
+			HgChangeset cs = transform.handle(revisionNumber, nodeid, cset);
+			populate(cs.clone());
+		}
+
+		public Nodeid changesetRevision() {
+			if (changesetRevision == null) {
+				changesetRevision = getRevision(historyNode.changeset);
+			}
+			return changesetRevision;
+		}
+
+		public Pair<Nodeid, Nodeid> parentRevisions() {
+			if (parentRevisions == null) {
+				HistoryNode p;
+				final Nodeid p1, p2;
+				if ((p = historyNode.parent1) != null) {
+					p1 = getRevision(p.changeset);
+				} else {
+					p1 = Nodeid.NULL;;
+				}
+				if ((p = historyNode.parent2) != null) {
+					p2 = getRevision(p.changeset);
+				} else {
+					p2 = Nodeid.NULL;
+				}
+				parentRevisions = new Pair<Nodeid, Nodeid>(p1, p2);
+			}
+			return parentRevisions;
+		}
+
+		public Collection<Nodeid> childRevisions() {
+			if (childRevisions != null) {
+				return childRevisions;
+			}
+			if (historyNode.children == null) {
+				childRevisions = Collections.emptyList();
+			} else {
+				ArrayList<Nodeid> rv = new ArrayList<Nodeid>(historyNode.children.size());
+				for (HistoryNode hn : historyNode.children) {
+					rv.add(getRevision(hn.changeset));
+				}
+				childRevisions = Collections.unmodifiableList(rv);
+			}
+			return childRevisions;
+		}
+		
+		// reading nodeid involves reading index only, guess, can afford not to optimize multiple reads
+		private Nodeid getRevision(int changelogRevisionNumber) {
+			// XXX pipe through pool
+			HgChangeset cs = cachedChangesets.get(changelogRevisionNumber);
+			if (cs != null) {
+				return cs.getNodeid();
+			} else {
+				return repo.getChangelog().getRevision(changelogRevisionNumber);
+			}
+		}
+	}
 }