changeset 171:2c3e96674e2a

Towards outgoing changes - initial detection logic, get connected with remote repo stub
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Fri, 25 Mar 2011 00:05:52 +0100
parents 71ddbf8603e8
children 87f40938c9b2
files cmdline/org/tmatesoft/hg/console/Clone.java cmdline/org/tmatesoft/hg/console/Incoming.java cmdline/org/tmatesoft/hg/console/Outgoing.java cmdline/org/tmatesoft/hg/console/Remote.java src/org/tmatesoft/hg/repo/HgLookup.java src/org/tmatesoft/hg/repo/HgRemoteRepository.java src/org/tmatesoft/hg/repo/Revlog.java
diffstat 7 files changed, 237 insertions(+), 64 deletions(-) [+]
line wrap: on
line diff
--- a/cmdline/org/tmatesoft/hg/console/Clone.java	Wed Mar 23 20:46:00 2011 +0100
+++ b/cmdline/org/tmatesoft/hg/console/Clone.java	Fri Mar 25 00:05:52 2011 +0100
@@ -92,7 +92,7 @@
 		// //////// 4. process manifest, using map from step 3, collect manifest nodeids
 		// //////// 5. process every file, using map from 3, and consult set from step 4 to ensure repo is correct
 		// access source
-		HgRemoteRepository remoteRepo = new HgRemoteRepository();// new HgLookup().detect(new URL("https://asd/hg/"));
+		HgRemoteRepository remoteRepo = new HgLookup().detect(new URL("https://asd/hg/"));
 		// discover changes
 		HgBundle completeChanges = remoteRepo.getChanges(Collections.singletonList(NULL));
 		WriteDownMate mate = new WriteDownMate(destDir);
--- a/cmdline/org/tmatesoft/hg/console/Incoming.java	Wed Mar 23 20:46:00 2011 +0100
+++ b/cmdline/org/tmatesoft/hg/console/Incoming.java	Fri Mar 25 00:05:52 2011 +0100
@@ -16,6 +16,8 @@
  */
 package org.tmatesoft.hg.console;
 
+import static org.tmatesoft.hg.core.Nodeid.NULL;
+
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.LinkedHashSet;
@@ -24,6 +26,7 @@
 
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgRemoteRepository.RemoteBranch;
 import org.tmatesoft.hg.repo.HgRepository;
 
 
@@ -141,34 +144,12 @@
 		
 	}
 
-	static final class RemoteBranch {
-		public Nodeid head, root, p1, p2;
-
-		@Override
-		public boolean equals(Object obj) {
-			if (this == obj) {
-				return true;
-			}
-			if (false == obj instanceof RemoteBranch) {
-				return false;
-			}
-			RemoteBranch o = (RemoteBranch) obj;
-			return head.equals(o.head) && root.equals(o.root) && (p1 == null && o.p1 == null || p1.equals(o.p1)) && (p2 == null && o.p2 == null || p2.equals(o.p2));
-		}
-	}
 
 	private static void remoteBranches(Collection<Nodeid> unknownRemoteHeads, List<RemoteBranch> remoteBranches) {
-		// discovery.findcommonincoming:
-		// unknown = remote.branches(remote.heads); 
-		// sent: cmd=branches&roots=d6d2a630f4a6d670c90a5ca909150f2b426ec88f+
-		// received: d6d2a630f4a6d670c90a5ca909150f2b426ec88f dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
-		// head, root, first parent, second parent
 		//
 		// TODO implement this with remote access
 		//
-		RemoteBranch rb = new RemoteBranch();
-		rb.head = unknownRemoteHeads.iterator().next();
-		rb.root = Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40);
+		RemoteBranch rb = new RemoteBranch(unknownRemoteHeads.iterator().next(), Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6"), NULL, NULL);
 		remoteBranches.add(rb);
 	}
 
--- a/cmdline/org/tmatesoft/hg/console/Outgoing.java	Wed Mar 23 20:46:00 2011 +0100
+++ b/cmdline/org/tmatesoft/hg/console/Outgoing.java	Fri Mar 25 00:05:52 2011 +0100
@@ -16,19 +16,25 @@
  */
 package org.tmatesoft.hg.console;
 
+import static org.tmatesoft.hg.core.Nodeid.NULL;
+
+import java.net.URL;
 import java.util.Collection;
-import java.util.LinkedHashSet;
 import java.util.LinkedList;
 import java.util.List;
 
+import org.tmatesoft.hg.core.HgException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgRemoteRepository;
+import org.tmatesoft.hg.repo.HgRemoteRepository.RemoteBranch;
 import org.tmatesoft.hg.repo.HgRepository;
 
 
 /**
  * WORK IN PROGRESS, DO NOT USE
- * hg out
+ * hg outgoing
  * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
@@ -42,42 +48,100 @@
 			System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation());
 			return;
 		}
-		// FIXME detection of 
-		List<Nodeid> base = new LinkedList<Nodeid>();
-		base.add(Nodeid.fromAscii("d6d2a630f4a6d670c90a5ca909150f2b426ec88f".getBytes(), 0, 40));
-		//
-		// fill with all known
+		HgRemoteRepository hgRemote = new HgLookup().detect(new URL("hg4j-gc"));
+
 		HgChangelog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker();
 		pw.init();
-		LinkedHashSet<Nodeid> sendToRemote = new LinkedHashSet<Nodeid>(pw.allNodes());
-		dump("initial state", sendToRemote);
-		// remove base and its parents
-		LinkedList<Nodeid> queueToClean = new LinkedList<Nodeid>(base);
-		while (!queueToClean.isEmpty()) {
-			Nodeid nid = queueToClean.removeFirst();
-			if (sendToRemote.remove(nid)) {
-				pw.appendParentsOf(nid, queueToClean);
+		
+		List<Nodeid> commonKnown = findCommonWithRemote(pw, hgRemote);
+		dump("Nodes known to be both locally and at remote server", commonKnown);
+		// sanity check
+		for (Nodeid n : commonKnown) {
+			if (!pw.knownNode(n)) {
+				throw new HgException("Unknown node reported as common:" + n);
 			}
 		}
-		dump("Clean from known parents", sendToRemote);
-		// XXX I think sendToRemote is what we actually need here - everything local, missing from remote
-		// however, if we need to send only a subset of these, need to proceed.
-		LinkedList<Nodeid> result = new LinkedList<Nodeid>();
-		// find among left those without parents
-		for (Nodeid nid : sendToRemote) {
-			Nodeid p1 = pw.firstParent(nid);
-			// in fact, we may assume nulls are never part of sendToRemote
-			if (p1 != null && !sendToRemote.contains(p1)) {
-				Nodeid p2 = pw.secondParent(nid);
-				if (p2 == null || !sendToRemote.contains(p2)) {
-					result.add(nid);
+		// find all local children of commonKnown
+		List<Nodeid> result = pw.childrenOf(commonKnown);
+		dump("Result", result);
+	}
+	
+	private static List<Nodeid> findCommonWithRemote(HgChangelog.ParentWalker pwLocal, HgRemoteRepository hgRemote) {
+		List<Nodeid> remoteHeads = hgRemote.heads();
+		LinkedList<Nodeid> common = new LinkedList<Nodeid>(); // these remotes are known in local
+		LinkedList<Nodeid> toQuery = new LinkedList<Nodeid>(); // these need further queries to find common
+		for (Nodeid rh : remoteHeads) {
+			if (pwLocal.knownNode(rh)) {
+				common.add(rh);
+			} else {
+				toQuery.add(rh);
+			}
+		}
+		if (toQuery.isEmpty()) {
+			return common; 
+		}
+		LinkedList<RemoteBranch> checkUp2Head = new LinkedList<RemoteBranch>(); // branch.root and branch.head are of interest only.
+		// these are branches with unknown head but known root, which might not be the last common known,
+		// i.e. there might be children changeset that are also available at remote, [..?..common-head..remote-head] - need to 
+		// scroll up to common head.
+		while (!toQuery.isEmpty()) {
+			List<RemoteBranch> remoteBranches = hgRemote.branches(toQuery);	//head, root, first parent, second parent
+			toQuery.clear();
+			while(!remoteBranches.isEmpty()) {
+				RemoteBranch rb = remoteBranches.remove(0);
+				// I assume branches remote call gives branches with head equal to what I pass there, i.e.
+				// that I don't need to check whether rb.head is unknown.
+				if (pwLocal.knownNode(rb.root)) {
+					// we known branch start, common head is somewhere in its descendants line  
+					checkUp2Head.add(rb);
+				} else {
+					// dig deeper in the history, if necessary
+					if (!NULL.equals(rb.p1) && !pwLocal.knownNode(rb.p1)) {
+						toQuery.add(rb.p1);
+					}
+					if (!NULL.equals(rb.p2) && !pwLocal.knownNode(rb.p2)) {
+						toQuery.add(rb.p2);
+					}
 				}
 			}
 		}
-		dump("Result", result);
-		// final outcome is the collection of nodes between(lastresult and revision/tip)
-		//
-		System.out.println("TODO: nodes between result and tip");
+		// can't check nodes between checkUp2Head element and local heads, remote might have distinct descendants sequence
+		for (RemoteBranch rb : checkUp2Head) {
+			// rb.root is known locally
+			List<Nodeid> remoteRevisions = hgRemote.between(rb.root, rb.head);
+			if (remoteRevisions.isEmpty()) {
+				// head is immediate child
+				common.add(rb.root);
+			} else {
+				Nodeid root = rb.root;
+				while(!remoteRevisions.isEmpty()) {
+					Nodeid n = remoteRevisions.remove(0);
+					if (pwLocal.knownNode(n)) {
+						if (remoteRevisions.isEmpty()) {
+							// this is the last known node before an unknown
+							common.add(n);
+							break;
+						}
+						if (remoteRevisions.size() == 1) {
+							// there's only one left between known n and unknown head
+							// this check is to save extra between query, not really essential
+							Nodeid last = remoteRevisions.remove(0);
+							common.add(pwLocal.knownNode(last) ? last : n);
+							break;
+						}
+						// might get handy for next between query, to narrow search down
+						root = n;
+					} else {
+						remoteRevisions = hgRemote.between(root, n);
+						if (remoteRevisions.isEmpty()) {
+							common.add(root);
+						}
+					}
+				}
+			}
+		}
+		// TODO ensure unique elements in the list
+		return common;
 	}
 
 	private static void dump(String s, Collection<Nodeid> c) {
--- a/cmdline/org/tmatesoft/hg/console/Remote.java	Wed Mar 23 20:46:00 2011 +0100
+++ b/cmdline/org/tmatesoft/hg/console/Remote.java	Fri Mar 25 00:05:52 2011 +0100
@@ -20,7 +20,6 @@
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.net.HttpURLConnection;
 import java.net.URL;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
@@ -34,7 +33,6 @@
 import javax.net.ssl.TrustManager;
 import javax.net.ssl.X509TrustManager;
 
-import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.ConfigFile;
 import org.tmatesoft.hg.internal.Internals;
 
@@ -54,12 +52,49 @@
 	 cmd=heads gives space-separated list of nodeids (or just one)
 	 nodeids are in hex (printable) format, need to convert fromAscii()
 	 cmd=branchmap
+	 cmd=between needs argument pairs, with first element in the pair to be head(!), second to be root of the branch (
+	 	i.e. (newer-older), not (older-newer) as one might expect. Returned list of nodes comes in reversed order (from newer
+	 	to older) as well
+
+	cmd=branches&nodes=d6d2a630f4a6d670c90a5ca909150f2b426ec88f+
+	head, root, first parent, second parent
+	received: d6d2a630f4a6d670c90a5ca909150f2b426ec88f dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
+	
+	Sequence, for actual state with merged/closed branch, where 157:d5268ca7715b8d96204fc62abc632e8f55761547 is merge revision of 156 and 53 
+	>branches, 170:71ddbf8603e8e09d54ac9c5fe4bb5ae824589f1d
+	 71ddbf8603e8e09d54ac9c5fe4bb5ae824589f1d d5268ca7715b8d96204fc62abc632e8f55761547 643ddec3be36246fc052cf22ece503fa60cafe22 a6f39e595b2b54f56304470269a936ead77f5725
+
+	>branches, 156:643ddec3be36246fc052cf22ece503fa60cafe22
+	 643ddec3be36246fc052cf22ece503fa60cafe22 ade65afe0906febafbf8a2e41002052e0e446471 08754fce5778a3409476ecdb3cec6b5172c34367 40d04c4f771ebbd599eb229145252732a596740a
+	>branches, 53:a6f39e595b2b54f56304470269a936ead77f5725
+	 a6f39e595b2b54f56304470269a936ead77f5725 a6f39e595b2b54f56304470269a936ead77f5725 9429c7bd1920fab164a9d2b621d38d57bcb49ae0 30bd389788464287cee22ccff54c330a4b715de5
+
+	>branches, 84:08754fce5778a3409476ecdb3cec6b5172c34367  (p1:82) 
+	 08754fce5778a3409476ecdb3cec6b5172c34367 dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
+	>branches, 83:40d04c4f771ebbd599eb229145252732a596740a (p1:80)
+	 40d04c4f771ebbd599eb229145252732a596740a dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
+
+	>branches, 51:9429c7bd1920fab164a9d2b621d38d57bcb49ae0 (wrap-data-access branch)
+	 9429c7bd1920fab164a9d2b621d38d57bcb49ae0 dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
+	>branches, 52:30bd389788464287cee22ccff54c330a4b715de5 (p1:50)
+	 30bd389788464287cee22ccff54c330a4b715de5 dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
+
+
+	cmd=between&pairs=71ddbf8603e8e09d54ac9c5fe4bb5ae824589f1d-d5268ca7715b8d96204fc62abc632e8f55761547+40d04c4f771ebbd599eb229145252732a596740a-dbd663faec1f0175619cf7668bddc6350548b8d6
+	 8c8e3f372fa1fbfcf92b004b6f2ada2dbaf60028 dd525ca65de8e78cb133919de57ea0a6e6454664 1d0654be1466d522994f8bead510e360fbeb8d79 c17a08095e4420202ac1b2d939ef6d5f8bebb569
+	 4222b04f34ee885bc1ad547c7ef330e18a51afc1 5f9635c016819b322ae05a91b3378621b538c933 c677e159391925a50b9a23f557426b2246bc9c5d 0d279bcc44427cb5ae2f3407c02f21187ccc8aea e21df6259f8374ac136767321e837c0c6dd21907 b01500fe2604c2c7eadf44349cce9f438484474b 865bf07f381ff7d1b742453568def92576af80b6
+
+	Between two subsequent revisions (i.e. direct child in remote of a local root) 
+	cmd=between&pairs=71ddbf8603e8e09d54ac9c5fe4bb5ae824589f1d-8c8e3f372fa1fbfcf92b004b6f2ada2dbaf60028
+	 empty result
 	 */
 	public static void main(String[] args) throws Exception {
 		ConfigFile cfg = new Internals().newConfigFile();
 		cfg.addLocation(new File(System.getProperty("user.home"), ".hgrc"));
 		String svnkitServer = cfg.getSection("paths").get("svnkit");
-		URL url = new URL(svnkitServer + "?cmd=changegroup&roots=" + Nodeid.NULL.toString());
+//		URL url = new URL(svnkitServer + "?cmd=branches&nodes=30bd389788464287cee22ccff54c330a4b715de5");
+		URL url = new URL(svnkitServer + "?cmd=between&pairs=71ddbf8603e8e09d54ac9c5fe4bb5ae824589f1d-8c8e3f372fa1fbfcf92b004b6f2ada2dbaf60028"); 
+//		URL url = new URL(svnkitServer + "?cmd=changegroup&roots=" + Nodeid.NULL.toString());
 //		URL url = new URL("http://localhost:8000/" + "?cmd=stream_out");
 //		URL url = new URL(svnkitServer + "?cmd=stream_out");
 	
@@ -88,6 +123,7 @@
 		urlConnection.addRequestProperty("Authorization", "Basic " + authInfo);
 		urlConnection.setSSLSocketFactory(sslContext.getSocketFactory());
 		urlConnection.connect();
+		System.out.println("Query:" + url.getQuery());
 		System.out.println("Response headers:");
 		final Map<String, List<String>> headerFields = urlConnection.getHeaderFields();
 		for (String s : headerFields.keySet()) {
@@ -96,8 +132,8 @@
 		System.out.printf("Content type is %s and its length is %d\n", urlConnection.getContentType(), urlConnection.getContentLength());
 		InputStream is = urlConnection.getInputStream();
 		//
-//		dump(is, -1); // simple dump, any cmd
-		writeBundle(is, false, "HG10GZ"); // cmd=changegroup
+		dump(is, -1); // simple dump, any cmd
+//		writeBundle(is, false, "HG10GZ"); // cmd=changegroup
 		//writeBundle(is, true, "" or "HG10UN");
 		//
 		urlConnection.disconnect();
--- a/src/org/tmatesoft/hg/repo/HgLookup.java	Wed Mar 23 20:46:00 2011 +0100
+++ b/src/org/tmatesoft/hg/repo/HgLookup.java	Fri Mar 25 00:05:52 2011 +0100
@@ -18,10 +18,13 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.net.MalformedURLException;
 import java.net.URL;
 
 import org.tmatesoft.hg.core.HgException;
+import org.tmatesoft.hg.internal.ConfigFile;
 import org.tmatesoft.hg.internal.DataAccessProvider;
+import org.tmatesoft.hg.internal.Internals;
 
 /**
  * Utility methods to find Mercurial repository at a given location
@@ -72,9 +75,27 @@
 	}
 
 	public HgRemoteRepository detect(URL url) throws HgException {
+		if (url == null) {
+			throw new IllegalArgumentException();
+		}
 		if (Boolean.FALSE.booleanValue()) {
 			throw HgRepository.notImplemented();
 		}
-		return null;
+		if (url.getProtocol() == null) {
+			// try configuration keys
+			String key = url.getHost();
+			ConfigFile cfg = new Internals().newConfigFile();
+			cfg.addLocation(new File(System.getProperty("user.home"), ".hgrc"));
+			String server = cfg.getSection("paths").get(key);
+			if (server == null) {
+				throw new HgException(String.format("Can't find server %s specification in the config", key));
+			}
+			try {
+				url = new URL(server);
+			} catch (MalformedURLException ex) {
+				throw new HgException(ex);
+			}
+		}
+		return new HgRemoteRepository(url);
 	}
 }
--- a/src/org/tmatesoft/hg/repo/HgRemoteRepository.java	Wed Mar 23 20:46:00 2011 +0100
+++ b/src/org/tmatesoft/hg/repo/HgRemoteRepository.java	Fri Mar 25 00:05:52 2011 +0100
@@ -17,6 +17,8 @@
 package org.tmatesoft.hg.repo;
 
 import java.io.File;
+import java.net.URL;
+import java.util.Collections;
 import java.util.List;
 
 import org.tmatesoft.hg.core.HgException;
@@ -31,9 +33,47 @@
  * @author TMate Software Ltd.
  */
 public class HgRemoteRepository {
+	
+	HgRemoteRepository(URL url) {
+	}
+	
+	public List<Nodeid> heads() {
+		return Collections.emptyList();
+	}
+	
+	public List<Nodeid> between(Nodeid base, Nodeid tip) {
+		return Collections.emptyList();
+	}
+
+	public List<RemoteBranch> branches(List<Nodeid> nodes) {
+		return Collections.emptyList();
+	}
 
 	// WireProtocol wiki: roots = a list of the latest nodes on every service side changeset branch that both the client and server know about. 
 	public HgBundle getChanges(List<Nodeid> roots) throws HgException {
 		return new HgLookup().loadBundle(new File("/temp/hg/hg-bundle-000000000000-gz.tmp"));
 	}
+
+	public static final class RemoteBranch {
+		public final Nodeid head, root, p1, p2;
+		
+		public RemoteBranch(Nodeid h, Nodeid r, Nodeid parent1, Nodeid parent2) {
+			head = h;
+			root = r;
+			p1 = parent1;
+			p2 = parent2;
+		}
+
+		@Override
+		public boolean equals(Object obj) {
+			if (this == obj) {
+				return true;
+			}
+			if (false == obj instanceof RemoteBranch) {
+				return false;
+			}
+			RemoteBranch o = (RemoteBranch) obj;
+			return head.equals(o.head) && root.equals(o.root) && (p1 == null && o.p1 == null || p1.equals(o.p1)) && (p2 == null && o.p2 == null || p2.equals(o.p2));
+		}
+	}
 }
--- a/src/org/tmatesoft/hg/repo/Revlog.java	Wed Mar 23 20:46:00 2011 +0100
+++ b/src/org/tmatesoft/hg/repo/Revlog.java	Fri Mar 25 00:05:52 2011 +0100
@@ -25,7 +25,10 @@
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
@@ -199,11 +202,10 @@
 	public final class ParentWalker {
 		private Map<Nodeid, Nodeid> firstParent;
 		private Map<Nodeid, Nodeid> secondParent;
-		private Set<Nodeid> allNodes;
+		private final LinkedHashSet<Nodeid> allNodes = new LinkedHashSet<Nodeid>(); // childrenOf rely on ordering
 
 		public ParentWalker() {
 			firstParent = secondParent = Collections.emptyMap();
-			allNodes = Collections.emptySet();
 		}
 		
 		public void init() {
@@ -211,7 +213,6 @@
 			final int revisionCount = stream.revisionCount();
 			firstParent = new HashMap<Nodeid, Nodeid>(revisionCount);
 			secondParent = new HashMap<Nodeid, Nodeid>(firstParent.size() >> 1); // assume branches/merges are less frequent
-			allNodes = new LinkedHashSet<Nodeid>();
 			
 			RevlogStream.Inspector insp = new RevlogStream.Inspector() {
 				final Nodeid[] sequentialRevisionNodeids = new Nodeid[revisionCount];
@@ -279,6 +280,36 @@
 			}
 			return modified;
 		}
+
+		// XXX alternative (and perhaps more reliable) approach would be to make a copy of allNodes and remove 
+		// nodes, their parents and so on.
+		
+		// @return ordered collection of all children rooted at supplied nodes. Nodes shall not be descendants of each other! 
+		public List<Nodeid> childrenOf(List<Nodeid> nodes) {
+			HashSet<Nodeid> parents = new HashSet<Nodeid>();
+			LinkedHashSet<Nodeid> result = new LinkedHashSet<Nodeid>();
+			LinkedList<Nodeid> orderedResult = new LinkedList<Nodeid>();
+			for(Nodeid next : allNodes) {
+				// i assume allNodes is sorted, hence do not check any parents unless we hit any common known node first 
+				if (nodes.contains(next)) {
+					parents.add(next);
+				} else {
+					if (parents.isEmpty()) {
+						// didn't scroll up to any known yet
+						continue;
+					}
+					// record each and every node reported after first common known node hit  
+					orderedResult.addLast(next);
+					if (parents.contains(firstParent(next)) || parents.contains(secondParent(next))) {
+						result.add(next);
+						parents.add(next); // to find next's children
+					}
+				}
+			}
+			// leave only those of interest in ordered sequence 
+			orderedResult.retainAll(result);
+			return orderedResult;
+		}
 	}
 
 	protected static class ContentPipe implements RevlogStream.Inspector, CancelSupport {