changeset 471:7bcfbc255f48

Merge changes from smartgit3 branch into 1.1 stream
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Wed, 11 Jul 2012 20:40:47 +0200
parents 31bd09da0dcf (current diff) 2078692eeb58 (diff)
children 2a0b09eec376
files cmdline/org/tmatesoft/hg/console/ChangesetDumpHandler.java cmdline/org/tmatesoft/hg/console/Main.java src/org/tmatesoft/hg/core/HgChangeset.java src/org/tmatesoft/hg/internal/IntMap.java src/org/tmatesoft/hg/internal/PhasesHelper.java src/org/tmatesoft/hg/internal/RevisionDescendants.java src/org/tmatesoft/hg/repo/HgRepository.java src/org/tmatesoft/hg/repo/HgStatusCollector.java src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java src/org/tmatesoft/hg/repo/Revlog.java src/org/tmatesoft/hg/repo/ext/MqManager.java test/org/tmatesoft/hg/test/TestAuxUtilities.java
diffstat 13 files changed, 955 insertions(+), 56 deletions(-) [+]
line wrap: on
line diff
--- a/cmdline/org/tmatesoft/hg/console/ChangesetDumpHandler.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/cmdline/org/tmatesoft/hg/console/ChangesetDumpHandler.java	Wed Jul 11 20:40:47 2012 +0200
@@ -106,6 +106,7 @@
 			sb.append('\n');
 		}
 		if (complete) {
+			f.format("phase:       %s\n", cset.getPhase().name());
 			Nodeid p1 = cset.getFirstParentRevision();
 			Nodeid p2 = cset.getSecondParentRevision();
 			int p1x = p1.isNull() ? -1 : repo.getChangelog().getRevisionIndex(p1);
--- a/cmdline/org/tmatesoft/hg/console/Main.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/cmdline/org/tmatesoft/hg/console/Main.java	Wed Jul 11 20:40:47 2012 +0200
@@ -45,7 +45,9 @@
 import org.tmatesoft.hg.internal.ByteArrayChannel;
 import org.tmatesoft.hg.internal.DigestHelper;
 import org.tmatesoft.hg.internal.PathGlobMatcher;
+import org.tmatesoft.hg.internal.PhasesHelper;
 import org.tmatesoft.hg.internal.RelativePathRewrite;
+import org.tmatesoft.hg.internal.RevisionDescendants;
 import org.tmatesoft.hg.internal.StreamLogFacility;
 import org.tmatesoft.hg.repo.HgBranches;
 import org.tmatesoft.hg.repo.HgChangelog;
@@ -59,12 +61,16 @@
 import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
 import org.tmatesoft.hg.repo.HgMergeState;
+import org.tmatesoft.hg.repo.HgParentChildMap;
+import org.tmatesoft.hg.repo.HgPhase;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.repo.HgStatusCollector;
 import org.tmatesoft.hg.repo.HgStatusInspector;
 import org.tmatesoft.hg.repo.HgSubrepoLocation;
 import org.tmatesoft.hg.repo.HgSubrepoLocation.Kind;
+import org.tmatesoft.hg.repo.ext.MqManager;
+import org.tmatesoft.hg.repo.ext.MqManager.PatchRecord;
 import org.tmatesoft.hg.repo.HgWorkingCopyStatusCollector;
 import org.tmatesoft.hg.repo.HgRevisionMap;
 import org.tmatesoft.hg.util.FileWalker;
@@ -102,6 +108,9 @@
 //		m.checkWalkFileRevisions();
 //		m.checkSubProgress();
 //		m.checkFileFlags();
+		m.testMqManager();
+//		m.testRevisionDescendants();
+//		m.dumpPhases();
 //		m.buildFileLog();
 //		m.testConsoleLog();
 //		m.testTreeTraversal();
@@ -124,6 +133,82 @@
 //		m.bunchOfTests();
 	}
 	
+	
+	// TODO as junit tests in 'default'
+	// -R ${system_property:user.home}/hg/test-mq
+	private void testMqManager() throws Exception {
+		MqManager mqManager = new MqManager(hgRepo);
+		mqManager.refresh();
+		int i = 1;
+		System.out.println("Complete patch queue:");
+		for (PatchRecord pr : mqManager.getAllKnownPatches()) {
+			System.out.printf("#%-3d %s from %s\n", i++, pr.getName(), pr.getPatchLocation());
+		}
+		i = 1;
+		System.out.println("Patches from the queue already applied to the repo:");
+		for (PatchRecord pr : mqManager.getAppliedPatches()) {
+			System.out.printf("#%-3d %s, known as cset:%s\n", i++, pr.getName(), pr.getRevision().shortNotation());
+		}
+		boolean allAppliedAreKnown = mqManager.getAllKnownPatches().containsAll(mqManager.getAppliedPatches());
+		System.out.printf("[sanity] allAppliedAreKnown:%b, not yet applied:%d\n", allAppliedAreKnown, mqManager.getQueueSize());
+		Assert.assertTrue(allAppliedAreKnown);
+
+		System.out.printf("Queues: %s, active:%s\n", mqManager.getQueueNames(), mqManager.getActiveQueueName());
+		Assert.assertTrue(mqManager.getQueueNames().size() > 1);
+		Assert.assertTrue(mqManager.getActiveQueueName().length() > 0);
+	}
+	
+	
+	// -R {junit-test-repos}/branches-1
+	private void testRevisionDescendants() throws Exception {
+		int[] roots = new int[] {0, 1, 2, 3, 4, 5};
+		RevisionDescendants[] result = new RevisionDescendants[roots.length];
+		for (int i = 0; i < roots.length; i++) {
+			result[i] = new RevisionDescendants(hgRepo, roots[i]);
+			result[i].build();
+		}
+		for (int i = 0; i < roots.length; i++) {
+			System.out.printf("For root %d descendats are:", roots[i]);
+			for (int j = roots[i], x = hgRepo.getChangelog().getLastRevision(); j <= x; j++) {
+				if (result[i].isDescendant(j)) {
+					System.out.printf("%3d ", j);
+				}
+			}
+			System.out.printf(", isEmpty:%b\n", !result[i].hasDescendants());
+		}
+	}
+	
+	// -R ${system_property:user.home}/hg/test-phases/
+	// TODO as junit test
+	private void dumpPhases() throws Exception {
+		HgPhase[] result1 = new HgPhase[hgRepo.getChangelog().getRevisionCount()];
+		HgPhase[] result2 = new HgPhase[hgRepo.getChangelog().getRevisionCount()];
+		final long start1 = System.nanoTime();
+		HgParentChildMap<HgChangelog> pw = new HgParentChildMap<HgChangelog>(hgRepo.getChangelog());
+		pw.init();
+		final long start1bis = System.nanoTime();
+		PhasesHelper ph = new PhasesHelper(hgRepo, pw);
+		for (int i = 0, l = hgRepo.getChangelog().getLastRevision(); i <= l; i++) {
+			result1[i] = ph.getPhase(i, null);
+		}
+		final long start2 = System.nanoTime();
+		ph = new PhasesHelper(hgRepo);
+		for (int i = 0, l = hgRepo.getChangelog().getLastRevision(); i <= l; i++) {
+			result2[i] = ph.getPhase(i, null);
+		}
+		final long end = System.nanoTime();
+		System.out.printf("With ParentWalker(simulates log command for whole repo): %d ms (pw init: %,d ns)\n", (start2 - start1)/1000, start1bis - start1);
+		printPhases(result1);
+		System.out.printf("Without ParentWalker (simulates log command for single file): %d ms\n", (end - start2)/1000);
+		printPhases(result2);
+	}
+	
+	private static void printPhases(HgPhase[] phase) {
+		for (int i = 0; i < phase.length; i++) {
+			System.out.printf("rev:%3d, phase:%s\n", i, phase[i]);
+		}
+	}
+
 	// hg4j repo
 	public void checkWalkFileRevisions() throws Exception {
 		//  hg --debug manifest --rev 150 | grep cmdline/org/tmatesoft/hg/console/Main.java
@@ -131,6 +216,7 @@
 	}
 	
 	// no repo
+	// FIXME as test, perhaps in TestAuxUtilities
 	private void checkSubProgress() {
 		ProgressSupport ps = new ProgressSupport() {
 			private int units;
@@ -176,7 +262,9 @@
 		System.out.println("File: " + file.getFlags(TIP));
 	}
 	
+
 	private void buildFileLog() throws Exception {
+		final long start = System.nanoTime();
 		HgLogCommand cmd = new HgLogCommand(hgRepo);
 		cmd.file("file1", false);
 		cmd.execute(new HgChangesetTreeHandler() {
@@ -193,7 +281,7 @@
 				final boolean isJoin = !parents.first().isNull() && !parents.second().isNull();
 				final boolean isFork = entry.children().size() > 1;
 				final HgChangeset cset = entry.changeset();
-				System.out.printf("%d:%s - %s\n", cset.getRevisionIndex(), cset.getNodeid().shortNotation(), cset.getComment());
+				System.out.printf("%d:%s - %s (%s)\n", cset.getRevisionIndex(), cset.getNodeid().shortNotation(), cset.getComment(), cset.getPhase());
 				if (!isJoin && !isFork && !entry.children().isEmpty()) {
 					System.out.printf("\t=> %s\n", sb);
 				}
@@ -216,6 +304,8 @@
 				}
 			}
 		});
+		final long end = System.nanoTime();
+		System.out.printf("buildFileLog: %,d ms\n", (end-start)/1000);
 	}
 
 	private void buildFileLogOld() throws Exception {
--- a/src/org/tmatesoft/hg/core/HgChangeset.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/src/org/tmatesoft/hg/core/HgChangeset.java	Wed Jul 11 20:40:47 2012 +0200
@@ -21,8 +21,10 @@
 import java.util.List;
 import java.util.Map;
 
+import org.tmatesoft.hg.internal.PhasesHelper;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
+import org.tmatesoft.hg.repo.HgPhase;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.repo.HgRuntimeException;
@@ -41,26 +43,37 @@
  * @author TMate Software Ltd.
  */
 public class HgChangeset implements Cloneable {
-	private final HgStatusCollector statusHelper;
-	private final Path.Source pathHelper;
 
-	private HgParentChildMap<HgChangelog> parentHelper;
-
-	//
+	// these get initialized
 	private RawChangeset changeset;
+	private int revNumber;
 	private Nodeid nodeid;
 
-	//
+	class ShareDataStruct {
+		ShareDataStruct(HgStatusCollector statusCollector, Path.Source pathFactory) {
+			statusHelper = statusCollector;
+			pathHelper = pathFactory;
+		}
+		public final HgStatusCollector statusHelper;
+		public final Path.Source pathHelper;
+
+		public HgParentChildMap<HgChangelog> parentHelper;
+		public PhasesHelper phaseHelper;
+	};
+
+	// Helpers/utilities shared among few instances of HgChangeset
+	private final ShareDataStruct shared;
+
+	// these are built on demand
 	private List<HgFileRevision> modifiedFiles, addedFiles;
 	private List<Path> deletedFiles;
-	private int revNumber;
 	private byte[] parent1, parent2;
+	
 
 	// XXX consider CommandContext with StatusCollector, PathPool etc. Commands optionally get CC through a cons or create new
 	// and pass it around
 	/*package-local*/HgChangeset(HgStatusCollector statusCollector, Path.Source pathFactory) {
-		statusHelper = statusCollector;
-		pathHelper = pathFactory;
+		shared = new ShareDataStruct(statusCollector, pathFactory);
 	}
 
 	/*package-local*/ void init(int localRevNumber, Nodeid nid, RawChangeset rawChangeset) {
@@ -70,16 +83,16 @@
 		modifiedFiles = addedFiles = null;
 		deletedFiles = null;
 		parent1 = parent2 = null;
-		// keep references to parentHelper, statusHelper and pathHelper
+		// keep references to shared (and everything in there: parentHelper, statusHelper, phaseHelper and pathHelper)
 	}
 
 	/*package-local*/ void setParentHelper(HgParentChildMap<HgChangelog> pw) {
-		parentHelper = pw;
-		if (parentHelper != null) {
-			if (parentHelper.getRepo() != statusHelper.getRepo()) {
+		if (pw != null) {
+			if (pw.getRepo() != shared.statusHelper.getRepo()) {
 				throw new IllegalArgumentException();
 			}
 		}
+		shared.parentHelper = pw;
 	}
 
 	/**
@@ -157,7 +170,7 @@
 		// what #files() gives).
 		ArrayList<Path> rv = new ArrayList<Path>(changeset.files().size());
 		for (String name : changeset.files()) {
-			rv.add(pathHelper.path(name));
+			rv.add(shared.pathHelper.path(name));
 		}
 		return rv;
 	}
@@ -211,14 +224,14 @@
 	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
 	public Nodeid getFirstParentRevision() throws HgRuntimeException {
-		if (parentHelper != null) {
-			return parentHelper.safeFirstParent(nodeid);
+		if (shared.parentHelper != null) {
+			return shared.parentHelper.safeFirstParent(nodeid);
 		}
 		// read once for both p1 and p2
 		if (parent1 == null) {
 			parent1 = new byte[20];
 			parent2 = new byte[20];
-			statusHelper.getRepo().getChangelog().parents(revNumber, new int[2], parent1, parent2);
+			getRepo().getChangelog().parents(revNumber, new int[2], parent1, parent2);
 		}
 		return Nodeid.fromBinary(parent1, 0);
 	}
@@ -228,17 +241,36 @@
 	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
 	public Nodeid getSecondParentRevision() throws HgRuntimeException {
-		if (parentHelper != null) {
-			return parentHelper.safeSecondParent(nodeid);
+		if (shared.parentHelper != null) {
+			return shared.parentHelper.safeSecondParent(nodeid);
 		}
 		if (parent2 == null) {
 			parent1 = new byte[20];
 			parent2 = new byte[20];
-			statusHelper.getRepo().getChangelog().parents(revNumber, new int[2], parent1, parent2);
+			getRepo().getChangelog().parents(revNumber, new int[2], parent1, parent2);
 		}
 		return Nodeid.fromBinary(parent2, 0);
 	}
 
+	/**	
+	 * Tells the phase this changeset belongs to.
+	 * @return one of {@link HgPhase} values
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 */
+	public HgPhase getPhase() throws HgRuntimeException {
+		if (shared.phaseHelper == null) {
+			// XXX would be handy to obtain ProgressSupport (perhaps, from statusHelper?)
+			// and pass it to #init(), so that  there could be indication of file being read and cache being built
+			synchronized (shared) {
+				// ensure field is initialized only once 
+				if (shared.phaseHelper == null) {
+					shared.phaseHelper = new PhasesHelper(getRepo(), shared.parentHelper);
+				}
+			}
+		}
+		return shared.phaseHelper.getPhase(this);
+	}
+
 	/**
 	 * Create a copy of this changeset 
 	 */
@@ -252,6 +284,10 @@
 			throw new InternalError(ex.toString());
 		}
 	}
+	
+	private HgRepository getRepo() {
+		return shared.statusHelper.getRepo();
+	}
 
 	private /*synchronized*/ void initFileChanges() throws HgRuntimeException {
 		ArrayList<Path> deleted = new ArrayList<Path>();
@@ -259,12 +295,12 @@
 		ArrayList<HgFileRevision> added = new ArrayList<HgFileRevision>();
 		HgStatusCollector.Record r = new HgStatusCollector.Record();
 		try {
-			statusHelper.change(revNumber, r);
+			shared.statusHelper.change(revNumber, r);
 		} catch (CancelledException ex) {
 			// Record can't cancel
 			throw new HgInvalidStateException("Internal error");
 		}
-		final HgRepository repo = statusHelper.getRepo();
+		final HgRepository repo = getRepo();
 		for (Path s : r.getModified()) {
 			Nodeid nid = r.nodeidAfterChange(s);
 			if (nid == null) {
--- a/src/org/tmatesoft/hg/internal/IntMap.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/src/org/tmatesoft/hg/internal/IntMap.java	Wed Jul 11 20:40:47 2012 +0200
@@ -24,6 +24,7 @@
 
 /**
  * Map implementation that uses plain int keys and performs with log n effectiveness.
+ * May contain null values
  * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/PhasesHelper.java	Wed Jul 11 20:40:47 2012 +0200
@@ -0,0 +1,200 @@
+/*
+ * Copyright (c) 2012 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.repo.HgPhase.Draft;
+import static org.tmatesoft.hg.repo.HgPhase.Secret;
+import static org.tmatesoft.hg.util.LogFacility.Severity.Info;
+import static org.tmatesoft.hg.util.LogFacility.Severity.Warn;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.tmatesoft.hg.core.HgChangeset;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgParentChildMap;
+import org.tmatesoft.hg.repo.HgPhase;
+import org.tmatesoft.hg.repo.HgRepository;
+
+/**
+ * Support to deal with phases feature fo Mercurial (as of Mercutial version 2.1)
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class PhasesHelper {
+
+	private final HgRepository repo;
+	private final HgParentChildMap<HgChangelog> parentHelper;
+	private Boolean repoSupporsPhases;
+	private List<Nodeid> draftPhaseRoots;
+	private List<Nodeid> secretPhaseRoots;
+	private RevisionDescendants[][] phaseDescendants = new RevisionDescendants[HgPhase.values().length][];
+
+	public PhasesHelper(HgRepository hgRepo) {
+		this(hgRepo, null);
+	}
+
+	public PhasesHelper(HgRepository hgRepo, HgParentChildMap<HgChangelog> pw) {
+		repo = hgRepo;
+		parentHelper = pw;
+	}
+
+	public boolean isCapableOfPhases() throws HgInvalidControlFileException {
+		if (null == repoSupporsPhases) {
+			repoSupporsPhases = readRoots();
+		}
+		return repoSupporsPhases.booleanValue();
+	}
+
+
+	public HgPhase getPhase(HgChangeset cset) throws HgInvalidControlFileException {
+		final Nodeid csetRev = cset.getNodeid();
+		final int csetRevIndex = cset.getRevisionIndex();
+		return getPhase(csetRevIndex, csetRev);
+	}
+
+	public HgPhase getPhase(final int csetRevIndex, Nodeid csetRev) throws HgInvalidControlFileException {
+		if (!isCapableOfPhases()) {
+			return HgPhase.Undefined;
+		}
+		// csetRev is only used when parentHelper is available
+		if (parentHelper != null && (csetRev == null || csetRev.isNull())) {
+			csetRev = repo.getChangelog().getRevision(csetRevIndex);
+		}
+					
+		for (HgPhase phase : new HgPhase[] {HgPhase.Secret, HgPhase.Draft }) {
+			List<Nodeid> roots = getPhaseRoots(phase);
+			if (roots.isEmpty()) {
+				continue;
+			}
+			if (parentHelper != null) {
+				if (roots.contains(csetRev)) {
+					return phase;
+				}
+				if (parentHelper.childrenOf(roots).contains(csetRev)) {
+					return phase;
+				}
+			} else {
+				// no parent helper
+				// search all descendants.RevisuionDescendats includes root as well.
+				for (RevisionDescendants rd : getPhaseDescendants(phase)) {
+					// isCandidate is to go straight to another root if changeset was added later that the current root
+					if (rd.isCandidate(csetRevIndex) && rd.isDescendant(csetRevIndex)) {
+						return phase;
+					}
+				}
+			}
+		}
+		return HgPhase.Public;
+
+	}
+
+	private Boolean readRoots() throws HgInvalidControlFileException {
+		// FIXME shall access phaseroots through HgRepository#repoPathHelper
+		File phaseroots = new File(HgInternals.getRepositoryDir(repo), "store/phaseroots");
+		BufferedReader br = null;
+		try {
+			if (!phaseroots.exists()) {
+				return Boolean.FALSE;
+			}
+			HashMap<HgPhase, List<Nodeid>> phase2roots = new HashMap<HgPhase, List<Nodeid>>();
+			br = new BufferedReader(new FileReader(phaseroots));
+			String line;
+			while ((line = br.readLine()) != null) {
+				String[] lc = line.trim().split("\\s+");
+				if (lc.length == 0) {
+					continue;
+				}
+				if (lc.length != 2) {
+					HgInternals.getContext(repo).getLog().dump(getClass(), Warn, "Bad line in phaseroots:%s", line);
+					continue;
+				}
+				int phaseIndex = Integer.parseInt(lc[0]);
+				Nodeid rootRev = Nodeid.fromAscii(lc[1]);
+				if (!repo.getChangelog().isKnown(rootRev)) {
+					HgInternals.getContext(repo).getLog().dump(getClass(), Warn, "Phase(%d) root node %s doesn't exist in the repository, ignored.", phaseIndex, rootRev);
+					continue;
+				}
+				HgPhase phase = HgPhase.parse(phaseIndex);
+				List<Nodeid> roots = phase2roots.get(phase);
+				if (roots == null) {
+					phase2roots.put(phase, roots = new LinkedList<Nodeid>());
+				}
+				roots.add(rootRev);
+			}
+			draftPhaseRoots = phase2roots.containsKey(Draft) ? phase2roots.get(Draft) : Collections.<Nodeid>emptyList();
+			secretPhaseRoots = phase2roots.containsKey(Secret) ? phase2roots.get(Secret) : Collections.<Nodeid>emptyList();
+		} catch (IOException ex) {
+			throw new HgInvalidControlFileException(ex.toString(), ex, phaseroots);
+		} finally {
+			if (br != null) {
+				try {
+					br.close();
+				} catch (IOException ex) {
+					HgInternals.getContext(repo).getLog().dump(getClass(), Info, ex, null);
+					// ignore the exception otherwise 
+				}
+			}
+		}
+		return Boolean.TRUE;
+	}
+
+	private List<Nodeid> getPhaseRoots(HgPhase phase) {
+		switch (phase) {
+		case Draft : return draftPhaseRoots;
+		case Secret : return secretPhaseRoots;
+		}
+		return Collections.emptyList();
+	}
+
+
+	private RevisionDescendants[] getPhaseDescendants(HgPhase phase) throws HgInvalidControlFileException {
+		int ordinal = phase.ordinal();
+		if (phaseDescendants[ordinal] == null) {
+			phaseDescendants[ordinal] = buildPhaseDescendants(phase);
+		}
+		return phaseDescendants[ordinal];
+	}
+
+	private RevisionDescendants[] buildPhaseDescendants(HgPhase phase) throws HgInvalidControlFileException {
+		int[] roots = toIndexes(getPhaseRoots(phase));
+		RevisionDescendants[] rv = new RevisionDescendants[roots.length];
+		for (int i = 0; i < roots.length; i++) {
+			rv[i] = new RevisionDescendants(repo, roots[i]);
+			rv[i].build();
+		}
+		return rv;
+	}
+	
+	private int[] toIndexes(List<Nodeid> roots) throws HgInvalidControlFileException {
+		int[] rv = new int[roots.size()];
+		for (int i = 0; i < rv.length; i++) {
+			rv[i] = repo.getChangelog().getRevisionIndex(roots.get(i));
+		}
+		return rv;
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/RevisionDescendants.java	Wed Jul 11 20:40:47 2012 +0200
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2012 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.util.BitSet;
+
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.repo.HgRepository;
+
+/**
+ * Represent indicators which revisions are descendants of the supplied root revision
+ * This is sort of lightweight alternative to ParentWalker#childrenOf 
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class RevisionDescendants {
+
+	private final HgRepository repo;
+	private final int rootRevIndex;
+	private final int tipRevIndex; // this is the last revision we cache to
+	private final BitSet descendants;
+
+	// in fact, may be refactored to deal not only with changelog, but any revlog (not sure what would be the usecase, though)
+	public RevisionDescendants(HgRepository hgRepo, int revisionIndex) {
+		repo = hgRepo;
+		rootRevIndex = revisionIndex;
+		// even if tip moves, we still answer correctly for those isCandidate()
+		tipRevIndex = repo.getChangelog().getLastRevision(); 
+		if (revisionIndex < 0 || revisionIndex > tipRevIndex) {
+			String m = "Revision to build descendants for shall be in range [%d,%d], not %d";
+			throw new IllegalArgumentException(String.format(m, 0, tipRevIndex, revisionIndex));
+		}
+		descendants = new BitSet(tipRevIndex - rootRevIndex + 1);
+	}
+	
+	public void build() throws HgInvalidControlFileException {
+		final BitSet result = descendants;
+		result.set(0);
+		if (rootRevIndex == tipRevIndex) {
+			return;
+		}
+		repo.getChangelog().indexWalk(rootRevIndex+1, tipRevIndex, new HgChangelog.ParentInspector() {
+			// TODO ParentRevisionInspector, with no parent nodeids, just indexes?
+
+			private int i = 1; // above we start with revision next to rootRevIndex, which is at offset 0
+			public void next(int revisionIndex, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2) {
+				int p1x = parent1 - rootRevIndex;
+				int p2x = parent2 - rootRevIndex;
+				boolean p1IsDescendant = false, p2IsDescendant = false;
+				if (p1x >= 0) { // parent1 is among descendants candidates
+					assert p1x < result.size();
+					p1IsDescendant = result.get(p1x);
+				}
+				if (p2x >= 0) {
+					assert p2x < result.size();
+					p2IsDescendant = result.get(p2x);
+				}
+				//
+				int rx = revisionIndex - rootRevIndex;
+				if (rx != i) {
+					throw new HgInvalidStateException(String.format("Sanity check failed. Revision %d. Expected:%d, was:%d", revisionIndex, rx, i));
+				}
+				// current revision is descendant if any of its parents is descendant
+				result.set(rx, p1IsDescendant || p2IsDescendant);
+				i++;
+			}
+		});
+	}
+
+	// deliberately doesn't allow TIP
+	public boolean isCandidate(int revIndex) {
+		return (revIndex >= rootRevIndex && revIndex <= tipRevIndex) ;
+	}
+
+	public boolean hasDescendants() { // isEmpty is better name?
+		// bit at rootRevIndex is always set
+		return descendants.nextSetBit(rootRevIndex+1) != -1;
+	}
+
+	public boolean isDescendant(int revisionIndex) {
+		assert isCandidate(revisionIndex);
+		int ix = revisionIndex - rootRevIndex;
+		assert ix < descendants.size();
+		return descendants.get(ix);
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/repo/HgPhase.java	Wed Jul 11 20:40:47 2012 +0200
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2012 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.repo;
+
+/**
+ * Phases for a changeset is a new functionality in Mercurial 2.1
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public enum HgPhase {
+	
+	Public("public"), Draft("draft"), Secret("secret"), Undefined("");
+
+	@SuppressWarnings("unused")
+	private final String hgString;
+
+	private HgPhase(String stringRepresentation) {
+		hgString = stringRepresentation;
+	}
+
+//	public String toMercurialString() {
+//		return hgString;
+//	}
+
+	public static HgPhase parse(int value) {
+		switch (value) {
+		case 0 : return Public;
+		case 1 : return Draft;
+		case 2 : return Secret;
+		}
+		throw new IllegalArgumentException(String.format("Bad phase index: %d", value));
+	}
+}
--- a/src/org/tmatesoft/hg/repo/HgRepository.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/src/org/tmatesoft/hg/repo/HgRepository.java	Wed Jul 11 20:40:47 2012 +0200
@@ -97,9 +97,9 @@
 	private final File workingDir; // .hg/../
 	private final String repoLocation;
 	private final DataAccessProvider dataAccess;
-	private final PathRewrite normalizePath;
-	private final PathRewrite dataPathHelper;
-	private final PathRewrite repoPathHelper;
+	private final PathRewrite normalizePath; // normalized slashes but otherwise regular file names
+	private final PathRewrite dataPathHelper; // access to file storage area (usually under .hg/store/data/), with filenames mangled  
+	private final PathRewrite repoPathHelper; // access to system files
 	private final SessionContext sessionContext;
 
 	private HgChangelog changelog;
--- a/src/org/tmatesoft/hg/repo/HgStatusCollector.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/src/org/tmatesoft/hg/repo/HgStatusCollector.java	Wed Jul 11 20:40:47 2012 +0200
@@ -56,6 +56,8 @@
 	private final Pool<Path> cacheFilenames;
 	private final ManifestRevision emptyFakeState;
 	private Path.Matcher scope = new Path.Matcher.Any();
+	// @see #detectCopies()
+	private boolean detectCopies = true;
 	
 
 	public HgStatusCollector(HgRepository hgRepo) {
@@ -182,6 +184,30 @@
 		// do not assign null, ever
 		scope = scopeMatcher == null ? new Path.Matcher.Any() : scopeMatcher;
 	}
+
+	/**
+	 * Select whether Collector shall tell "added-new" from "added-by-copy/rename" files.
+	 * This is analogous to '-C' switch of 'hg status' command.
+	 * 
+	 * <p>With copy detection turned off, files continue be reported as plain 'added' files.
+	 * 
+	 * <p>By default, copy detection is <em>on</em>, as it's reasonably cheap. However,
+	 * in certain scenarios it may be reasonable to turn it off, for example when it's a merge
+	 * of two very different branches and there are a lot of files added/moved.
+	 *  
+	 * Another legitimate reason to set detection to off if you're lazy to 
+	 * implement {@link HgStatusInspector#copied(Path, Path)} ;)
+	 * 
+	 * @param detect <code>true</code> if copies detection is desirable
+	 */
+	public void detectCopies(boolean detect) {
+		// cpython, revision:72161, p1:72159, p2:72160
+		// p2 comes from another branch with 321 file added (looks like copied/moved, however, the isCopy
+		// record present only for couple of them). With 2,5 ms per isCopy() operation, almost a second
+		// is spent detecting origins (according to Marc, of little use in this scenario, as it's second parent 
+		// in the merge) - in fact, most of the time of the status operation
+		detectCopies = detect;
+	}
 	
 	/**
 	 * 'hg status --change REV' command counterpart.
@@ -290,7 +316,7 @@
 			} else {
 				try {
 					Path copyTarget = r2fname;
-					Path copyOrigin = getOriginIfCopy(repo, copyTarget, r1Files, rev1);
+					Path copyOrigin = detectCopies ? getOriginIfCopy(repo, copyTarget, r1Files, rev1) : null;
 					if (copyOrigin != null) {
 						inspector.copied(getPathPool().mangle(copyOrigin) /*pipe through pool, just in case*/, copyTarget);
 					} else {
--- a/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java	Wed Jul 11 20:40:47 2012 +0200
@@ -385,29 +385,31 @@
 		Nodeid nid1 = collect.nodeid(fname);
 		HgManifest.Flags flags = collect.flags(fname);
 		HgDirstate.Record r;
+		final HgDirstate ds = getDirstateImpl();
 		if (nid1 == null) {
-			// normal: added?
-			// added: not known at the time of baseRevision, shall report
-			// merged: was not known, report as added?
-			if ((r = getDirstateImpl().checkNormal(fname)) != null) {
+			// not known at the time of baseRevision:
+			// normal, added, merged: either added or copied since base revision.
+			// removed: nothing to report, 
+			if (ds.checkNormal(fname) != null || ds.checkMerged(fname) != null) {
 				try {
 					Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision);
 					if (origin != null) {
 						inspector.copied(getPathPool().mangle(origin), fname);
 						return;
 					}
+					// fall-through, report as added
 				} catch (HgInvalidFileException ex) {
 					// report failure and continue status collection
 					inspector.invalid(fname, ex);
 				}
-			} else if ((r = getDirstateImpl().checkAdded(fname)) != null) {
+			} else if ((r = ds.checkAdded(fname)) != null) {
 				if (r.copySource() != null && baseRevNames.contains(r.copySource())) {
-					baseRevNames.remove(r.copySource()); // XXX surely I shall not report rename source as Removed?
+					baseRevNames.remove(r.copySource()); // FIXME likely I shall report rename source as Removed, same as above for Normal?
 					inspector.copied(r.copySource(), fname);
 					return;
 				}
 				// fall-through, report as added
-			} else if (getDirstateImpl().checkRemoved(fname) != null) {
+			} else if (ds.checkRemoved(fname) != null) {
 				// removed: removed file was not known at the time of baseRevision, and we should not report it as removed
 				return;
 			}
@@ -415,7 +417,7 @@
 		} else {
 			// was known; check whether clean or modified
 			Nodeid nidFromDirstate = getDirstateParentManifest().nodeid(fname);
-			if ((r = getDirstateImpl().checkNormal(fname)) != null && nid1.equals(nidFromDirstate)) {
+			if ((r = ds.checkNormal(fname)) != null && nid1.equals(nidFromDirstate)) {
 				// regular file, was the same up to WC initialization. Check if was modified since, and, if not, report right away
 				// same code as in #checkLocalStatusAgainstFile
 				final boolean timestampEqual = f.lastModified() == r.modificationTime(), sizeEqual = r.size() == f.length();
@@ -439,7 +441,7 @@
 				// or nodeid in dirstate is different, but local change might have brought it back to baseRevision state)
 				// FALL THROUGH
 			}
-			if (r != null || (r = getDirstateImpl().checkMerged(fname)) != null || (r = getDirstateImpl().checkAdded(fname)) != null) {
+			if (r != null || (r = ds.checkMerged(fname)) != null || (r = ds.checkAdded(fname)) != null) {
 				try {
 					// check actual content to see actual changes
 					// when added - seems to be the case of a file added once again, hence need to check if content is different
--- a/src/org/tmatesoft/hg/repo/Revlog.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/src/org/tmatesoft/hg/repo/Revlog.java	Wed Jul 11 20:40:47 2012 +0200
@@ -28,6 +28,7 @@
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.DataAccess;
 import org.tmatesoft.hg.internal.Experimental;
+import org.tmatesoft.hg.internal.IntMap;
 import org.tmatesoft.hg.internal.Preview;
 import org.tmatesoft.hg.internal.RevlogStream;
 import org.tmatesoft.hg.util.Adaptable;
@@ -287,33 +288,82 @@
 	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
 	@Experimental
-	public void indexWalk(int start, int end, final Revlog.Inspector inspector) throws HgRuntimeException {
+	public final void indexWalk(int start, int end, final Revlog.Inspector inspector) throws HgRuntimeException { 
 		int lastRev = getLastRevision();
-		if (start == TIP) {
-			start = lastRev;
-		}
+		final int _start = start == TIP ? lastRev : start;
 		if (end == TIP) {
 			end = lastRev;
 		}
 		final RevisionInspector revisionInsp = Adaptable.Factory.getAdapter(inspector, RevisionInspector.class, null);
 		final ParentInspector parentInsp = Adaptable.Factory.getAdapter(inspector, ParentInspector.class, null);
-		final Nodeid[] allRevisions = parentInsp == null ? null : new Nodeid[end - start + 1]; 
+		final Nodeid[] allRevisions = parentInsp == null ? null : new Nodeid[end - _start + 1];
+		// next are to build set of parent indexes that are not part of the range iteration
+		// i.e. those parents we need to read separately. See Issue 31 for details.
+		final int[]      firstParentIndexes = parentInsp == null || _start == 0 ? null : new int[allRevisions.length];
+		final int[]     secondParentIndexes = parentInsp == null || _start == 0 ? null : new int[allRevisions.length];
+		final IntMap<Nodeid> missingParents = parentInsp == null || _start == 0 ? null : new IntMap<Nodeid>(16); 
 
-		content.iterate(start, end, false, new RevlogStream.Inspector() {
+		content.iterate(_start, end, false, new RevlogStream.Inspector() {
+			private int i = 0;
 			
-			public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
+			public void next(int revisionIndex, int actualLen, int baseRevIndex, int linkRevIndex, int parent1RevIndex, int parent2RevIndex, byte[] nodeid, DataAccess data) {
 				Nodeid nid = Nodeid.fromBinary(nodeid, 0);
 				if (revisionInsp != null) {
-					revisionInsp.next(revisionNumber, nid, linkRevision);
+					revisionInsp.next(revisionIndex, nid, linkRevIndex);
 				}
 				if (parentInsp != null) {
-					Nodeid p1 = parent1Revision == -1 ? Nodeid.NULL : allRevisions[parent1Revision];
-					Nodeid p2 = parent2Revision == -1 ? Nodeid.NULL : allRevisions[parent2Revision];
-					allRevisions[revisionNumber] = nid;
-					parentInsp.next(revisionNumber, nid, parent1Revision, parent2Revision, p1, p2);
+					allRevisions[i] = nid;
+					if (_start > 0) {
+						firstParentIndexes[i] = parent1RevIndex;
+						secondParentIndexes[i] = parent2RevIndex;
+						if (parent1RevIndex < _start && parent1RevIndex >= 0) {
+							missingParents.put(parent1RevIndex, null);
+						}
+						if (parent2RevIndex < _start && parent2RevIndex >= 0) {
+							missingParents.put(parent2RevIndex, null);
+						}
+					} else {
+						Nodeid p1 = parent1RevIndex == -1 ? Nodeid.NULL : allRevisions[parent1RevIndex];
+						Nodeid p2 = parent2RevIndex == -1 ? Nodeid.NULL : allRevisions[parent2RevIndex];
+						parentInsp.next(revisionIndex, allRevisions[i], parent1RevIndex, parent2RevIndex, p1, p2);
+					}
+					i++;
 				}
 			}
 		});
+		if (parentInsp != null && _start > 0) {
+			assert missingParents.size() > 0; // in fact, more relaxed than assert. rather 'assume'
+			// TODO int[] IntMap#keys() or even sort of iterator that can modify values
+			for (int k = missingParents.firstKey(), l = missingParents.lastKey(); k <= l; k++) {
+				if (missingParents.containsKey(k)) {
+					Nodeid nid = getRepo().getChangelog().getRevision(k);
+					missingParents.put(k, nid);
+				}
+			}
+
+			for (int i = 0, revNum = _start; i < allRevisions.length; i++, revNum++) {
+				int riP1 = firstParentIndexes[i];
+				int riP2 = secondParentIndexes[i];
+				Nodeid p1, p2;
+				p1 = p2 = Nodeid.NULL;
+				if (riP1 >= _start) {
+					// p1 of revNum's revision is out of iterated range
+					// (don't check for riP1<end as I assume parents come prior to children in the changelog)
+					p1 = allRevisions[riP1 - start];
+				} else if (riP1 != -1) {
+					assert riP1 >=0 && riP1 < _start;
+					p1 = missingParents.get(riP1);
+				}
+				// same for Pp2
+				if (riP2 >= _start) {
+					p2 = allRevisions[riP2 - start];
+				} else if (riP2 != -1) {
+					assert riP2 >= 0 && riP2 < _start;
+					p2 = missingParents.get(riP2);
+				}
+				parentInsp.next(revNum, allRevisions[i], riP1, riP2, p1, p2);
+			}
+		}
 	}
 
 	/**
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/repo/ext/MqManager.java	Wed Jul 11 20:40:47 2012 +0200
@@ -0,0 +1,325 @@
+/*
+ * Copyright (c) 2012 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.repo.ext;
+
+import static org.tmatesoft.hg.util.LogFacility.Severity.Warn;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgInvalidFileException;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.util.LogFacility;
+import org.tmatesoft.hg.util.Path;
+
+/**
+ * Mercurial Queues Support. 
+ * Access to MqExtension functionality.
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class MqManager {
+	
+	private static final String PATCHES_DIR = "patches";
+
+	private final HgRepository repo;
+	private List<PatchRecord> applied = Collections.emptyList();
+	private List<PatchRecord> allKnown = Collections.emptyList();
+	private List<String> queueNames = Collections.emptyList();
+	private String activeQueue = PATCHES_DIR;
+
+	public MqManager(HgRepository hgRepo) {
+		repo = hgRepo;
+	}
+	
+	/**
+	 * Updates manager with up-to-date state of the mercurial queues.
+	 */
+	public void refresh() throws HgInvalidControlFileException {
+		applied = allKnown = Collections.emptyList();
+		queueNames = Collections.emptyList();
+		File repoDir = HgInternals.getRepositoryDir(repo);
+		final LogFacility log = HgInternals.getContext(repo).getLog();
+		try {
+			File queues = new File(repoDir, "patches.queues");
+			if (queues.isFile()) {
+				LineReader lr = new LineReader(queues, log).trimLines(true).skipEmpty(true);
+				lr.read(new SimpleLineCollector(), queueNames = new LinkedList<String>());
+			}
+			final String queueLocation; // path under .hg to patch queue information (status, series and diff files)
+			File activeQueueFile = new File(repoDir, "patches.queue");
+			// file is there only if it's not default queue ('patches') that is active
+			if (activeQueueFile.isFile()) {
+				ArrayList<String> contents = new ArrayList<String>();
+				new LineReader(activeQueueFile, log).read(new SimpleLineCollector(), contents);
+				if (contents.isEmpty()) {
+					log.dump(getClass(), Warn, "File %s with active queue name is empty", activeQueueFile.getName());
+					activeQueue = PATCHES_DIR;
+					queueLocation = PATCHES_DIR + '/';
+				} else {
+					activeQueue = contents.get(0);
+					queueLocation = PATCHES_DIR + '-' + activeQueue +  '/';
+				}
+			} else {
+				activeQueue = PATCHES_DIR;
+				queueLocation = PATCHES_DIR + '/';
+			}
+			final Path.Source patchLocation = new Path.Source() {
+				
+				public Path path(CharSequence p) {
+					StringBuilder sb = new StringBuilder(64);
+					sb.append(".hg/");
+					sb.append(queueLocation);
+					sb.append(p);
+					return Path.create(sb);
+				}
+			};
+			final File fileStatus = new File(repoDir, queueLocation + "status");
+			final File fileSeries = new File(repoDir, queueLocation + "series");
+			if (fileStatus.isFile()) {
+				new LineReader(fileStatus, log).read(new LineConsumer<List<PatchRecord>>() {
+	
+					public boolean consume(String line, List<PatchRecord> result) throws IOException {
+						int sep = line.indexOf(':');
+						if (sep == -1) {
+							log.dump(MqManager.class, Warn, "Bad line in %s:%s", fileStatus.getPath(), line);
+							return true;
+						}
+						Nodeid nid = Nodeid.fromAscii(line.substring(0, sep));
+						String name = new String(line.substring(sep+1));
+						result.add(new PatchRecord(nid, name, patchLocation.path(name)));
+						return true;
+					}
+				}, applied = new LinkedList<PatchRecord>());
+			}
+			if (fileSeries.isFile()) {
+				final Map<String,PatchRecord> name2patch = new HashMap<String, PatchRecord>();
+				for (PatchRecord pr : applied) {
+					name2patch.put(pr.getName(), pr);
+				}
+				LinkedList<String> knownPatchNames = new LinkedList<String>();
+				new LineReader(fileSeries, log).read(new SimpleLineCollector(), knownPatchNames);
+				// XXX read other queues?
+				allKnown = new ArrayList<PatchRecord>(knownPatchNames.size());
+				for (String name : knownPatchNames) {
+					PatchRecord pr = name2patch.get(name);
+					if (pr == null) {
+						pr = new PatchRecord(null, name, patchLocation.path(name));
+					}
+					allKnown.add(pr);
+				}
+			}
+		} catch (HgInvalidFileException ex) {
+			HgInvalidControlFileException th = new HgInvalidControlFileException(ex.getMessage(), ex.getCause(), ex.getFile());
+			th.setStackTrace(ex.getStackTrace());
+			throw th;
+		}
+	}
+	
+	static class SimpleLineCollector implements LineConsumer<Collection<String>> {
+
+		public boolean consume(String line, Collection<String> result) throws IOException {
+			result.add(line);
+			return true;
+		}
+	}
+	
+	/**
+	 * Number of patches not yet applied
+	 * @return positive value when there are 
+	 */
+	public int getQueueSize() {
+		return getAllKnownPatches().size() - getAppliedPatches().size();
+	}
+
+	/**
+	 * Subset of the patches from the queue that were already applied to the repository
+	 * <p>Analog of 'hg qapplied'
+	 * 
+	 * <p>Clients shall call {@link #refresh()} prior to first use
+	 * @return collection of records in no particular order, may be empty if none applied
+	 */
+	public List<PatchRecord> getAppliedPatches() {
+		return Collections.unmodifiableList(applied);
+	}
+	
+	/**
+	 * All of the patches in the active queue that MQ knows about for this repository
+	 * 
+	 * <p>Clients shall call {@link #refresh()} prior to first use
+	 * @return collection of records in no particular order, may be empty if there are no patches in the queue
+	 */
+	public List<PatchRecord> getAllKnownPatches() {
+		return Collections.unmodifiableList(allKnown);
+	}
+	
+	/**
+	 * Name of the patch queue <code>hg qqueue --active</code> which is active now.
+	 * @return patch queue name
+	 */
+	public String getActiveQueueName() {
+		return activeQueue;
+	}
+
+	/**
+	 * Patch queues known in the repository, <code>hg qqueue -l</code> analog.
+	 * There's at least one patch queue (default one names 'patches'). Only one patch queue at a time is active.
+	 * 
+	 * @return names of patch queues
+	 */
+	public List<String> getQueueNames() {
+		return Collections.unmodifiableList(queueNames);
+	}
+	
+	public class PatchRecord {
+		private final Nodeid nodeid;
+		private final String name;
+		private final Path location;
+		
+		// hashCode/equals might be useful if cons becomes public
+
+		PatchRecord(Nodeid revision, String name, Path diffLocation) {
+			nodeid = revision;
+			this.name = name;
+			this.location = diffLocation;
+		}
+
+		/**
+		 * Identifies changeset of the patch that has been applied to the repository
+		 * 
+		 * @return changeset revision or <code>null</code> if this patch is not yet applied
+		 */
+		public Nodeid getRevision() {
+			return nodeid;
+		}
+
+		/**
+		 * Identifies patch, either based on a user-supplied name (<code>hg qnew <i>patch-name</i></code>) or 
+		 * an automatically generated name (like <code><i>revisionIndex</i>.diff</code> for imported changesets).
+		 * Clients shall not rely on this naming scheme, though.
+		 * 
+		 * @return never <code>null</code>
+		 */
+		public String getName() {
+			return name;
+		}
+		
+		/**
+		 * Location of diff file with the patch, relative to repository root
+		 * @return path to the patch, never <code>null</code>
+		 */
+		public Path getPatchLocation() {
+			return location;
+		}
+	}
+
+	// TODO refine API and extract into separate classes
+
+	interface LineConsumer<T> {
+//		boolean begin(File f, T paramObj) throws IOException;
+		boolean consume(String line, T paramObj) throws IOException;
+//		boolean end(File f, T paramObj) throws IOException;
+	}
+	
+	class LineReader {
+		
+		private final File file;
+		private final LogFacility log;
+		private boolean trimLines = true;
+		private boolean skipEmpty = true;
+		private String ignoreThatStars = null;
+
+		LineReader(File f, LogFacility logFacility) {
+			file = f;
+			log = logFacility;
+		}
+		
+		/**
+		 * default: <code>true</code>
+		 * <code>false</code> to return line as is
+		 */
+		LineReader trimLines(boolean trim) {
+			trimLines = trim;
+			return this;
+		}
+		
+		/**
+		 * default: <code>true</code>
+		 * <code>false</code> to pass empty lines to consumer
+		 */
+		LineReader skipEmpty(boolean skip) {
+			skipEmpty = skip;
+			return this;
+		}
+		
+		/**
+		 * default: doesn't skip any line.
+		 * set e.g. to "#" or "//" to skip lines that start with such prefix
+		 */
+		LineReader ignoreLineComments(String lineStart) {
+			ignoreThatStars = lineStart;
+			return this;
+		}
+
+		<T> void read(LineConsumer<T> consumer, T paramObj) throws HgInvalidFileException {
+			BufferedReader statusFileReader = null;
+			try {
+//				consumer.begin(file, paramObj);
+				statusFileReader = new BufferedReader(new FileReader(file));
+				String line;
+				boolean ok = true;
+				while (ok && (line = statusFileReader.readLine()) != null) {
+					if (trimLines) {
+						line = line.trim();
+					}
+					if (ignoreThatStars != null && line.startsWith(ignoreThatStars)) {
+						continue;
+					}
+					if (!skipEmpty || line.length() > 0) {
+						ok = consumer.consume(line, paramObj);
+					}
+				}
+			} catch (IOException ex) {
+				throw new HgInvalidFileException(ex.getMessage(), ex, file);
+			} finally {
+				try {
+					statusFileReader.close();
+				} catch (IOException ex) {
+					log.dump(MqManager.class, Warn, ex, null);
+				}
+//				try {
+//					consumer.end(file, paramObj);
+//				} catch (IOException ex) {
+//					log.warn(MqManager.class, ex, null);
+//				}
+			}
+		}
+	}
+}
--- a/test/org/tmatesoft/hg/test/TestAuxUtilities.java	Wed Jul 11 19:06:30 2012 +0200
+++ b/test/org/tmatesoft/hg/test/TestAuxUtilities.java	Wed Jul 11 20:40:47 2012 +0200
@@ -241,27 +241,43 @@
 				Assert.assertEquals(fileNode.getRevision(localRevision), revision);
 			}
 		});
-		fileNode.indexWalk(0, TIP, new HgDataFile.ParentInspector() {
-			int i = 0;
-			Nodeid[] all = new Nodeid[fileNode.getRevisionCount()];
+		class ParentInspectorCheck implements HgDataFile.ParentInspector {
+			private int i, c;
+			private Nodeid[] all;
+			private final int start;
+			
+			public ParentInspectorCheck(int start, int total) {
+				this.start = start;
+				i = start; // revision index being iterated
+				c = 0; // index/counter of visited revisions
+				all = new Nodeid[total];
+			}
 
 			public void next(int localRevision, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2) {
 				Assert.assertEquals(i++, localRevision);
-				all[localRevision] = revision;
+				all[c++] = revision;
 				Assert.assertNotNull(revision);
 				Assert.assertFalse(localRevision == 0 && (parent1 != -1 || parent2 != -1));
 				Assert.assertFalse(localRevision > 0 && parent1 == -1 && parent2 == -1);
 				if (parent1 != -1) {
 					Assert.assertNotNull(nidParent1);
-					// deliberately ==, not asserEquals to ensure same instance
-					Assert.assertTrue(nidParent1 == all[parent1]);  
+					if (parent1 >= start) {
+						// deliberately ==, not asserEquals to ensure same instance
+						Assert.assertTrue(nidParent1 == all[parent1-start]);  
+					}
 				}
 				if (parent2 != -1) {
 					Assert.assertNotNull(nidParent2);
-					Assert.assertTrue(nidParent2 == all[parent2]);  
+					if (parent2 >= start) {
+						Assert.assertTrue(nidParent2 == all[parent2-start]);
+					}
 				}
 			}
-		});
+		}; 
+		fileNode.indexWalk(0, TIP, new ParentInspectorCheck(0, fileNode.getRevisionCount()));
+		assert fileNode.getRevisionCount() > 2 : "prereq"; // need at least few revisions
+		// there used to be a defect in #walk impl, assumption all parents come prior to a revision
+		fileNode.indexWalk(1, 3, new ParentInspectorCheck(1, 3));
 	}
 
 	@Test