changeset 661:5d8798772cca

Merge branch smartgit-4.5 (no actual changes, merely to denote branch is inactive
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Wed, 10 Jul 2013 11:48:55 +0200
parents 4fd317a2fecf (diff) 49f0749307a0 (current diff)
children af5223b86dd3
files src/org/tmatesoft/hg/repo/HgManifest.java src/org/tmatesoft/hg/repo/HgRepositoryLock.java src/org/tmatesoft/hg/util/Path.java
diffstat 153 files changed, 8756 insertions(+), 2864 deletions(-) [+]
line wrap: on
line diff
--- a/.hgtags	Thu Jun 06 14:21:11 2013 +0200
+++ b/.hgtags	Wed Jul 10 11:48:55 2013 +0200
@@ -6,3 +6,7 @@
 3ca4ae7bdd3890b8ed89bfea1b42af593e04b373 v1.0.0
 2103388d4010bff6dcf8d2e4c42a67b9d95aa646 v1.1m2
 32453f30de07efe9d7b386c084ebd607dbeaba2b v1.1m3
+f41dd9a3b8af1a5f74b533cd9f00b7d77423cc04 v1.1m4
+5afc7eedb3dd109f75e5f5a02dd88c9c4e7b7f3b v1.1rc1
+54e16ab771ec03d69cb05e38622ebdf9c3302c8c v1.1rc2
+2f33f102a8fa59274a27ebbe1c2903cecac6c5d5 v1.1.0
--- a/COPYING	Thu Jun 06 14:21:11 2013 +0200
+++ b/COPYING	Wed Jul 10 11:48:55 2013 +0200
@@ -1,4 +1,4 @@
-Copyright (C) 2010-2012 TMate Software Ltd
+Copyright (C) 2010-2013 TMate Software Ltd
 
 This program is free software; you can redistribute it and/or modify
 it under the terms of the GNU General Public License as published by
--- a/build.gradle	Thu Jun 06 14:21:11 2013 +0200
+++ b/build.gradle	Wed Jul 10 11:48:55 2013 +0200
@@ -1,9 +1,22 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
  */
 def isRelease = false
 
-  version = '1.1.0-SNAPSHOT'
+  version = '1.2.0-SNAPSHOT'
   description = 'Pure Java API and Toolkit for Mercurial DVCS'
   group = 'org.tmatesoft.hg4j'
   
--- a/build.xml	Thu Jun 06 14:21:11 2013 +0200
+++ b/build.xml	Wed Jul 10 11:48:55 2013 +0200
@@ -27,7 +27,7 @@
 
 	<property name="junit.jar" value="lib/junit-4.8.2.jar" />
 	<property name="ver.qualifier" value="" />
-	<property name="version.lib" value="1.1.0" />
+	<property name="version.lib" value="1.2" />
 	<property name="version.jar" value="${version.lib}${ver.qualifier}" />
 	<property name="compile-with-debug" value="yes"/>
 
@@ -84,12 +84,14 @@
 			<test name="org.tmatesoft.hg.test.TestIntMap" />
 			<test name="org.tmatesoft.hg.test.TestAuxUtilities" />
 			<test name="org.tmatesoft.hg.test.TestConfigFileParser" />
+			<test name="org.tmatesoft.hg.test.TestInflaterDataAccess" />
 			<test name="org.tmatesoft.hg.test.TestHistory" />
 			<test name="org.tmatesoft.hg.test.TestManifest" />
 			<test name="org.tmatesoft.hg.test.TestStatus" />
 			<test name="org.tmatesoft.hg.test.TestStorePath" />
 			<test name="org.tmatesoft.hg.test.TestNewlineFilter" />
 			<test name="org.tmatesoft.hg.test.TestIgnore" />
+			<test name="org.tmatesoft.hg.test.TestConfigFiles" />
 			<test name="org.tmatesoft.hg.test.TestDirstate" />
 			<test name="org.tmatesoft.hg.test.TestBranches" />
 			<test name="org.tmatesoft.hg.test.TestByteChannel" />
@@ -107,6 +109,11 @@
 			<test name="org.tmatesoft.hg.test.TestCommit" />
 			<test name="org.tmatesoft.hg.test.TestBlame" />
 			<test name="org.tmatesoft.hg.test.TestDiffHelper" />
+			<test name="org.tmatesoft.hg.test.TestRepositoryLock" />
+			<test name="org.tmatesoft.hg.test.TestRevisionSet" />
+			<test name="org.tmatesoft.hg.test.TestRevisionMaps" />
+			<test name="org.tmatesoft.hg.test.TestPush" />
+			<test name="org.tmatesoft.hg.test.ComplexTest" />
 		</junit>
 	</target>
 
@@ -134,7 +141,7 @@
 
 	<target name="build-lib">
 		<mkdir dir="bin" />
-		<javac srcdir="src" destdir="bin" debug="${compile-with-debug}" includeantruntime="no"/>
+		<javac srcdir="src" destdir="bin" debug="${compile-with-debug}" includeantruntime="no" source="1.5" encoding="UTF-8"/>
 		<jar destfile="${hg4j.jar}">
 			<fileset dir="bin/">
 				<include name="org/tmatesoft/hg/core/**" />
@@ -148,7 +155,7 @@
 
 	<target name="build-tests" depends="build-lib">
 		<mkdir dir="bin" />
-		<javac srcdir="test" destdir="bin" debug="${compile-with-debug}" >
+		<javac srcdir="test" destdir="bin" debug="${compile-with-debug}" includeantruntime="no" source="1.5" encoding="UTF-8">
 			<classpath>
 				<pathelement location="${hg4j.jar}"/>
 				<pathelement location="${junit.jar}"/>
@@ -164,7 +171,7 @@
 
 	<target name="build-cmdline" depends="build-lib">
 		<mkdir dir="bin" />
-		<javac srcdir="cmdline" destdir="bin" debug="${compile-with-debug}">
+		<javac srcdir="cmdline" destdir="bin" debug="${compile-with-debug}" includeantruntime="no" source="1.5" encoding="UTF-8">
 			<classpath>
 				<pathelement location="${hg4j.jar}"/>
 				<pathelement location="${junit.jar}"/>
--- a/cmdline/org/tmatesoft/hg/console/Bundle.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/cmdline/org/tmatesoft/hg/console/Bundle.java	Wed Jul 10 11:48:55 2013 +0200
@@ -29,6 +29,7 @@
 import org.tmatesoft.hg.repo.HgBundle.GroupElement;
 import org.tmatesoft.hg.repo.HgBundle.Inspector;
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 
 
 /**
@@ -60,7 +61,7 @@
 		hgBundle.changes(hgRepo, new HgChangelog.Inspector() {
 			private final HgChangelog changelog = hgRepo.getChangelog();
 			
-			public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
+			public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException {
 				if (changelog.isKnown(nodeid)) {
 					System.out.print("+");
 				} else {
@@ -99,7 +100,7 @@
 
  */
 
-	public static void dump(HgBundle hgBundle) throws HgException {
+	public static void dump(HgBundle hgBundle) throws HgException, HgRuntimeException {
 		Dump dump = new Dump();
 		hgBundle.inspectAll(dump);
 		System.out.println("Total files:" + dump.names.size());
--- a/cmdline/org/tmatesoft/hg/console/ChangesetDumpHandler.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/cmdline/org/tmatesoft/hg/console/ChangesetDumpHandler.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -26,6 +26,7 @@
 import org.tmatesoft.hg.core.HgFileRevision;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Path;
 
 /**
@@ -43,7 +44,7 @@
 	private final HgRepository repo;
 	private final int tip;
 
-	public ChangesetDumpHandler(HgRepository hgRepo) {
+	public ChangesetDumpHandler(HgRepository hgRepo) throws HgRuntimeException {
 		repo = hgRepo;
 		tip = hgRepo.getChangelog().getLastRevision();
 	}
@@ -63,7 +64,7 @@
 		return this;
 	}
 
-	public void cset(HgChangeset changeset) {
+	public void cset(HgChangeset changeset) throws HgRuntimeException {
 		try {
 			final String s = print(changeset);
 			if (reverseOrder) {
@@ -89,7 +90,7 @@
 		l.clear();
 	}
 
-	private String print(HgChangeset cset) throws HgException {
+	private String print(HgChangeset cset) throws HgException, HgRuntimeException {
 		StringBuilder sb = new StringBuilder();
 		Formatter f = new Formatter(sb);
 		final Nodeid csetNodeid = cset.getNodeid();
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cmdline/org/tmatesoft/hg/console/Commit.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.console;
+
+import java.util.Collections;
+
+import org.tmatesoft.hg.core.HgCommitCommand;
+import org.tmatesoft.hg.core.HgRepoFacade;
+import org.tmatesoft.hg.util.Outcome;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class Commit {
+
+	public static void main(String[] args) throws Exception {
+		Options cmdLineOpts = Options.parse(args, Collections.<String>emptySet());
+		HgRepoFacade repo = new HgRepoFacade();
+		if (!repo.init(cmdLineOpts.findRepository())) {
+			System.err.printf("Can't find repository in: %s\n", repo.getRepository().getLocation());
+			return;
+		}
+		String message = cmdLineOpts.getSingle("-m", "--message");
+		if (message == null) {
+			System.err.println("Need a commit message");
+			return;
+		}
+		HgCommitCommand cmd = repo.createCommitCommand();
+		cmd.message(message);
+		Outcome o = cmd.execute();
+		if (!o.isOk()) {
+			System.err.println(o.getMessage());
+			return;
+		}
+		System.out.printf("New changeset: %s\n", cmd.getCommittedRevision().shortNotation());
+	}
+}
--- a/cmdline/org/tmatesoft/hg/console/Log.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/cmdline/org/tmatesoft/hg/console/Log.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2010-2012 TMate Software Ltd
+ * Copyright (c) 2010-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -26,6 +26,7 @@
 import org.tmatesoft.hg.core.HgLogCommand;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.ProgressSupport;
 
@@ -124,7 +125,7 @@
 	private static final class Dump extends ChangesetDumpHandler implements HgChangesetHandler.WithCopyHistory {
 		private final RenameDumpHandler renameHandlerDelegate;
 
-		public Dump(HgRepository hgRepo) {
+		public Dump(HgRepository hgRepo) throws HgRuntimeException {
 			super(hgRepo);
 			renameHandlerDelegate = new RenameDumpHandler();
 		}
--- a/cmdline/org/tmatesoft/hg/console/Main.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/cmdline/org/tmatesoft/hg/console/Main.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -57,6 +57,7 @@
 import org.tmatesoft.hg.repo.HgDirstate.Record;
 import org.tmatesoft.hg.repo.HgIgnore;
 import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
 import org.tmatesoft.hg.repo.HgMergeState;
@@ -102,12 +103,11 @@
 
 	public static void main(String[] args) throws Exception {
 		Main m = new Main(args);
-		m.checkFileSneakerPerformance();
+//		m.checkFileSneakerPerformance();
 //		m.testRevert();
 //		m.testCheckout();
 //		m.tryExtensions();
 //		m.dumpBookmarks();
-//		m.readConfigFile();
 //		m.dumpCommitLastMessage();
 //		m.buildFileLog();
 //		m.testConsoleLog();
@@ -119,7 +119,7 @@
 //		m.testEffectiveFileLog();
 //		m.testMergeState();
 //		m.testFileStatus();
-//		m.dumpBranches();
+		m.dumpBranches();
 //		m.inflaterLengthException();
 //		m.dumpIgnored();
 //		m.dumpDirstate();
@@ -210,19 +210,6 @@
 		}
 	}
 
-	// TODO as test
-	private void readConfigFile() throws Exception {
-		ConfigFile configFile = new ConfigFile(hgRepo.getSessionContext());
-		configFile.addLocation(new File(System.getProperty("user.home"), "test-cfg/aaa/config1"));
-		for (String s : configFile.getSectionNames()) {
-			System.out.printf("[%s]\n", s);
-			for (Map.Entry<String, String> e : configFile.getSection(s).entrySet()) {
-				System.out.printf("%s = %s\n", e.getKey(), e.getValue());
-			}
-		}
-		
-	}
-
 	private void dumpCommitLastMessage() throws Exception {
 		System.out.println(hgRepo.getCommitLastMessage());
 	}
@@ -233,7 +220,7 @@
 		cmd.file("a2.txt", true, false);
 		final int[] count = new int[] { 0 };
 		class MyHandler implements HgChangesetTreeHandler, Adaptable {
-			public void treeElement(HgChangesetTreeHandler.TreeElement entry) {
+			public void treeElement(HgChangesetTreeHandler.TreeElement entry) throws HgRuntimeException {
 				StringBuilder sb = new StringBuilder();
 				HashSet<Nodeid> test = new HashSet<Nodeid>(entry.childRevisions());
 				for (HgChangeset cc : entry.children()) {
@@ -533,7 +520,7 @@
 		System.out.println(bac.toArray().length);
 	}
 	
-	private void dumpIgnored() {
+	private void dumpIgnored() throws HgInvalidControlFileException {
 		String[] toCheck = new String[] {"design.txt", "src/com/tmate/hgkit/ll/Changelog.java", "src/Extras.java", "bin/com/tmate/hgkit/ll/Changelog.class"};
 		HgIgnore ignore = hgRepo.getIgnore();
 		for (int i = 0; i < toCheck.length; i++) {
@@ -623,7 +610,7 @@
 			public void dir(Path p) {
 				System.out.println(p);
 			}
-			public void file(HgFileRevision fileRevision) {
+			public void file(HgFileRevision fileRevision) throws HgRuntimeException {
 				System.out.print(fileRevision.getRevision());;
 				System.out.print("   ");
 				System.out.printf("%s %s", fileRevision.getParents().first().shortNotation(), fileRevision.getParents().second().shortNotation());
@@ -686,7 +673,7 @@
 	}
 
 
-	private void testStatusInternals() throws HgException {
+	private void testStatusInternals() throws HgException, HgRuntimeException {
 		HgDataFile n = hgRepo.getFileNode(Path.create("design.txt"));
 		for (String s : new String[] {"011dfd44417c72bd9e54cf89b82828f661b700ed", "e5529faa06d53e06a816e56d218115b42782f1ba", "c18e7111f1fc89a80a00f6a39d51288289a382fc"}) {
 			// expected: 359, 2123, 3079
--- a/cmdline/org/tmatesoft/hg/console/Manifest.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/cmdline/org/tmatesoft/hg/console/Manifest.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2010-2012 TMate Software Ltd
+ * Copyright (c) 2010-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -27,6 +27,7 @@
 import org.tmatesoft.hg.repo.HgInvalidRevisionException;
 import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Path;
 
 
@@ -52,7 +53,7 @@
 			}
 			public void dir(Path p) {
 			}
-			public void file(HgFileRevision fileRevision) {
+			public void file(HgFileRevision fileRevision) throws HgRuntimeException {
 				try {
 					if (debug) {
 						System.out.print(fileRevision.getRevision());;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cmdline/org/tmatesoft/hg/console/Push.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.console;
+
+import java.util.Collections;
+
+import org.tmatesoft.hg.core.HgPushCommand;
+import org.tmatesoft.hg.core.HgRepoFacade;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgRemoteRepository;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class Push {
+
+	public static void main(String[] args) throws Exception {
+		Options cmdLineOpts = Options.parse(args, Collections.<String>emptySet());
+		HgRepoFacade hgRepo = new HgRepoFacade();
+		if (!hgRepo.init(cmdLineOpts.findRepository())) {
+			System.err.printf("Can't find repository in: %s\n", hgRepo.getRepository().getLocation());
+			return;
+		}
+		// XXX perhaps, HgRepoFacade shall get detectRemote() analog (to get remote server with respect of facade's repo)
+		HgRemoteRepository hgRemote = new HgLookup().detectRemote(cmdLineOpts.getSingle(""), hgRepo.getRepository());
+		if (hgRemote.isInvalid()) {
+			System.err.printf("Remote repository %s is not valid", hgRemote.getLocation());
+			return;
+		}
+		HgPushCommand cmd = hgRepo.createPushCommand();
+		cmd.destination(hgRemote);
+		cmd.execute();
+		System.out.printf("Added %d changesets\n", cmd.getPushedRevisions().size());
+	}
+}
--- a/src/org/tmatesoft/hg/core/ChangesetTransformer.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/ChangesetTransformer.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -24,6 +24,7 @@
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.repo.HgStatusCollector;
 import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.util.Adaptable;
@@ -63,7 +64,7 @@
 		lifecycleBridge = new LifecycleBridge(ps, cs);
 	}
 	
-	public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
+	public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException {
 		if (branches != null && !branches.contains(cset.branch())) {
 			return;
 		}
--- a/src/org/tmatesoft/hg/core/HgAddRemoveCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgAddRemoveCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -18,12 +18,14 @@
 
 import java.util.LinkedHashSet;
 
+import org.tmatesoft.hg.internal.COWTransaction;
 import org.tmatesoft.hg.internal.DirstateBuilder;
 import org.tmatesoft.hg.internal.DirstateReader;
-import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.Transaction;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRepositoryLock;
 import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.CancelledException;
@@ -31,8 +33,6 @@
 import org.tmatesoft.hg.util.ProgressSupport;
 
 /**
- * WORK IN PROGRESS
- * 
  * Schedule files for addition and removal 
  * XXX and, perhaps, forget() functionality shall be here as well?
  * 
@@ -40,7 +40,6 @@
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-@Experimental(reason="Work in progress")
 public class HgAddRemoveCommand extends HgAbstractCommand<HgAddRemoveCommand> {
 	
 	private final HgRepository repo;
@@ -98,9 +97,12 @@
 	 * Perform scheduled addition/removal
 	 * 
 	 * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
+	 * @throws HgRepositoryLockException if failed to lock the repo for modifications
 	 * @throws CancelledException if execution of the command was cancelled
 	 */
-	public void execute() throws HgException, CancelledException {
+	public void execute() throws HgException, HgRepositoryLockException, CancelledException {
+		final HgRepositoryLock wdLock = repo.getWorkingDirLock();
+		wdLock.acquire();
 		try {
 			final ProgressSupport progress = getProgressSupport(null);
 			final CancelSupport cancellation = getCancelSupport(null, true);
@@ -121,11 +123,24 @@
 				progress.worked(1);
 				cancellation.checkCancelled();
 			}
-			dirstateBuilder.serialize();
+			Transaction.Factory trFactory = new COWTransaction.Factory();
+			Transaction tr = trFactory.create(repo);
+			try {
+				dirstateBuilder.serialize(tr);
+				tr.commit();
+			} catch (RuntimeException ex) {
+				tr.rollback();
+				throw ex;
+			} catch (HgException ex) {
+				tr.rollback();
+				throw ex;
+			}
 			progress.worked(1);
 			progress.done();
 		} catch (HgRuntimeException ex) {
 			throw new HgLibraryFailureException(ex);
+		} finally {
+			wdLock.release();
 		}
 	}
 }
--- a/src/org/tmatesoft/hg/core/HgAnnotateCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgAnnotateCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -20,30 +20,27 @@
 
 import java.util.Arrays;
 
+import org.tmatesoft.hg.core.HgBlameInspector.BlockData;
 import org.tmatesoft.hg.internal.Callback;
 import org.tmatesoft.hg.internal.CsetParamKeeper;
-import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.internal.FileAnnotation;
 import org.tmatesoft.hg.internal.FileAnnotation.LineDescriptor;
 import org.tmatesoft.hg.internal.FileAnnotation.LineInspector;
-import org.tmatesoft.hg.repo.HgBlameFacility.BlockData;
-import org.tmatesoft.hg.repo.HgBlameFacility;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.CancelledException;
 import org.tmatesoft.hg.util.Path;
 import org.tmatesoft.hg.util.ProgressSupport;
 
 /**
- * WORK IN PROGRESS. UNSTABLE API
- * 
  * 'hg annotate' counterpart, report origin revision and file line-by-line 
  * 
+ * @since 1.1
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-@Experimental(reason="Work in progress. Unstable API")
 public class HgAnnotateCommand extends HgAbstractCommand<HgAnnotateCommand> {
 	
 	private final HgRepository repo;
@@ -90,7 +87,7 @@
 		return this;
 	}
 	
-	// TODO [1.1] set encoding and provide String line content from LineInfo
+	// TODO [post-1.1] set encoding and provide String line content from LineInfo
 
 	/**
 	 * Annotate selected file
@@ -111,28 +108,34 @@
 		final CancelSupport cancellation = getCancelSupport(inspector, true);
 		cancellation.checkCancelled();
 		progress.start(2);
-		HgDataFile df = repo.getFileNode(file);
-		if (!df.exists()) {
-			return;
+		try {
+			HgDataFile df = repo.getFileNode(file);
+			if (!df.exists()) {
+				return;
+			}
+			final int changesetStart = followRename ? 0 : df.getChangesetRevisionIndex(0);
+			Collector c = new Collector(cancellation);
+			FileAnnotation fa = new FileAnnotation(c);
+			HgDiffCommand cmd = new HgDiffCommand(repo);
+			cmd.file(df).order(HgIterateDirection.NewToOld);
+			cmd.range(changesetStart, annotateRevision.get());
+			cmd.executeAnnotate(fa);
+			progress.worked(1);
+			c.throwIfCancelled();
+			cancellation.checkCancelled();
+			ProgressSupport.Sub subProgress = new ProgressSupport.Sub(progress, 1);
+			subProgress.start(c.lineRevisions.length);
+			LineImpl li = new LineImpl();
+			for (int i = 0; i < c.lineRevisions.length; i++) {
+				li.init(i+1, c.lineRevisions[i], c.line(i));
+				inspector.next(li);
+				subProgress.worked(1);
+				cancellation.checkCancelled();
+			}
+			subProgress.done();
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
 		}
-		final int changesetStart = followRename ? 0 : df.getChangesetRevisionIndex(0);
-		Collector c = new Collector(cancellation);
-		FileAnnotation fa = new FileAnnotation(c);
-		HgBlameFacility af = new HgBlameFacility(df);
-		af.annotate(changesetStart, annotateRevision.get(), fa, HgIterateDirection.NewToOld);
-		progress.worked(1);
-		c.throwIfCancelled();
-		cancellation.checkCancelled();
-		ProgressSupport.Sub subProgress = new ProgressSupport.Sub(progress, 1);
-		subProgress.start(c.lineRevisions.length);
-		LineImpl li = new LineImpl();
-		for (int i = 0; i < c.lineRevisions.length; i++) {
-			li.init(i+1, c.lineRevisions[i], c.line(i));
-			inspector.next(li);
-			subProgress.worked(1);
-			cancellation.checkCancelled();
-		}
-		subProgress.done();
 		progress.done();
 	}
 	
@@ -157,7 +160,8 @@
 		byte[] getContent();
 	}
 
-	// FIXME there's no need in FileAnnotation.LineInspector, merge it here
+	// TODO [post-1.1] there's no need in FileAnnotation.LineInspector, merge it here
+	// ok for 1.1 as this LineInspector is internal class
 	private static class Collector implements LineInspector {
 		private int[] lineRevisions;
 		private byte[][] lines;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/core/HgBlameInspector.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,189 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.core;
+
+import org.tmatesoft.hg.core.HgCallbackTargetException;
+import org.tmatesoft.hg.internal.Callback;
+import org.tmatesoft.hg.internal.Experimental;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.util.Adaptable;
+
+/**
+ * Client's sink for revision differences, diff/annotate functionality.
+ * 
+ * When implemented, clients shall not expect new {@link Block blocks} instances in each call.
+ * 
+ * In case more information about annotated revision is needed, inspector instances may supply 
+ * {@link RevisionDescriptor.Recipient} through {@link Adaptable}.  
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ * @since 1.1
+ */
+@Callback
+@Experimental(reason="Unstable API")
+public interface HgBlameInspector {
+
+	void same(EqualBlock block) throws HgCallbackTargetException;
+	void added(AddBlock block) throws HgCallbackTargetException;
+	void changed(ChangeBlock block) throws HgCallbackTargetException;
+	void deleted(DeleteBlock block) throws HgCallbackTargetException;
+	
+	/**
+	 * Represents content of a block, either as a sequence of bytes or a 
+	 * sequence of smaller blocks (lines), if appropriate (according to usage context).
+	 * 
+	 * This approach allows line-by-line access to content data along with complete byte sequence for the whole block, i.e.
+	 * <pre>
+	 *    BlockData bd = addBlock.addedLines()
+	 *    // bd describes data from the addition completely.
+	 *    // elements of the BlockData are lines
+	 *    bd.elementCount() == addBlock.totalAddedLines();
+	 *    // one cat obtain complete addition with
+	 *    byte[] everythingAdded = bd.asArray();
+	 *    // or iterate line by line
+	 *    for (int i = 0; i < bd.elementCount(); i++) {
+	 *    	 byte[] lineContent = bd.elementAt(i);
+	 *       String line = new String(lineContent, fileEncodingCharset);
+	 *    }
+	 *    where bd.elementAt(0) is the line at index addBlock.firstAddedLine() 
+	 * </pre> 
+	 * 
+	 * LineData or ChunkData? 
+	 */
+	public interface BlockData {
+		BlockData elementAt(int index);
+		int elementCount();
+		byte[] asArray();
+	}
+	
+	/**
+	 * {@link HgBlameInspector} may optionally request extra information about revisions
+	 * being inspected, denoting itself as a {@link RevisionDescriptor.Recipient}. This class 
+	 * provides complete information about file revision under annotation now. 
+	 */
+	public interface RevisionDescriptor {
+		/**
+		 * @return complete source of the diff origin, never <code>null</code>
+		 */
+		BlockData origin();
+		/**
+		 * @return complete source of the diff target, never <code>null</code>
+		 */
+		BlockData target();
+		/**
+		 * @return changeset revision index of original file, or {@link HgRepository#NO_REVISION} if it's the very first revision
+		 */
+		int originChangesetIndex();
+		/**
+		 * @return changeset revision index of the target file
+		 */
+		int targetChangesetIndex();
+		/**
+		 * @return <code>true</code> if this revision is merge
+		 */
+		boolean isMerge();
+		/**
+		 * @return changeset revision index of the second, merged parent
+		 */
+		int mergeChangesetIndex();
+		/**
+		 * @return revision index of the change in target file's revlog
+		 */
+		int fileRevisionIndex();
+
+		/**
+		 * @return file object under blame (target file)
+		 */
+		HgDataFile file();
+
+		/**
+		 * Implement to indicate interest in {@link RevisionDescriptor}.
+		 * 
+		 * Note, instance of {@link RevisionDescriptor} is the same for 
+		 * {@link #start(RevisionDescriptor)} and {@link #done(RevisionDescriptor)} 
+		 * methods, and not necessarily a new one (i.e. <code>==</code>) for the next
+		 * revision announced.
+		 */
+		@Callback
+		public interface Recipient {
+			/**
+			 * Comes prior to any change {@link Block blocks}
+			 */
+			void start(RevisionDescriptor revisionDescription) throws HgCallbackTargetException;
+			/**
+			 * Comes after all change {@link Block blocks} were dispatched
+			 */
+			void done(RevisionDescriptor revisionDescription) throws HgCallbackTargetException;
+		}
+	}
+	
+	/**
+	 * Each change block comes from a single origin, blocks that are result of a merge
+	 * have {@link #originChangesetIndex()} equal to {@link RevisionDescriptor#mergeChangesetIndex()}.
+	 */
+	public interface Block {
+		int originChangesetIndex();
+		int targetChangesetIndex();
+	}
+	
+	public interface EqualBlock extends Block {
+		int originStart();
+		int targetStart();
+		int length();
+		BlockData content();
+	}
+	
+	public interface AddBlock extends Block {
+		/**
+		 * @return line index in the origin where this block is inserted
+		 */
+		int insertedAt();  
+		/**
+		 * @return line index of the first added line in the target revision
+		 */
+		int firstAddedLine();
+		/**
+		 * @return number of added lines in this block
+		 */
+		int totalAddedLines();
+		/**
+		 * @return content of added lines
+		 */
+		BlockData addedLines();
+	}
+	public interface DeleteBlock extends Block {
+		/**
+		 * @return line index in the target revision were this deleted block would be
+		 */
+		int removedAt();
+		/**
+		 * @return line index of the first removed line in the original revision
+		 */
+		int firstRemovedLine();
+		/**
+		 * @return number of deleted lines in this block
+		 */
+		int totalRemovedLines();
+		/**
+		 * @return content of deleted lines
+		 */
+		BlockData removedLines();
+	}
+	public interface ChangeBlock extends AddBlock, DeleteBlock {
+	}
+}
--- a/src/org/tmatesoft/hg/core/HgChangesetHandler.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgChangesetHandler.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -17,6 +17,7 @@
 package org.tmatesoft.hg.core;
 
 import org.tmatesoft.hg.internal.Callback;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Adaptable;
 import org.tmatesoft.hg.util.Path;
 
@@ -32,8 +33,9 @@
 	/**
 	 * @param changeset descriptor of a change, not necessarily a distinct instance each time, {@link HgChangeset#clone() clone()} if need a copy.
 	 * @throws HgCallbackTargetException wrapper for any exception user code may produce 
+	 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 	 */
-	void cset(HgChangeset changeset) throws HgCallbackTargetException;
+	void cset(HgChangeset changeset) throws HgCallbackTargetException, HgRuntimeException;
 
 
 	/**
--- a/src/org/tmatesoft/hg/core/HgChangesetTreeHandler.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgChangesetTreeHandler.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -20,6 +20,7 @@
 
 import org.tmatesoft.hg.internal.Callback;
 import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Pair;
 
 /**
@@ -36,16 +37,18 @@
 	 * @param entry access to various pieces of information about current tree node. Instances might be 
 	 * reused across calls and shall not be kept by client's code
 	 * @throws HgCallbackTargetException wrapper for any exception user code may produce 
+	 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 	 */
-	public void treeElement(HgChangesetTreeHandler.TreeElement entry) throws HgCallbackTargetException;
+	public void treeElement(HgChangesetTreeHandler.TreeElement entry) throws HgCallbackTargetException, HgRuntimeException;
 
 	interface TreeElement {
 		/**
 		 * Revision of the revlog being iterated. For example, when walking file history, return value represents file revisions.
 		 * 
 		 * @return revision of the revlog being iterated.
+		 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 		 */
-		public Nodeid fileRevision();
+		public Nodeid fileRevision() throws HgRuntimeException;
 		
 		/**
 		 * File node, provided revlog being iterated is a {@link HgDataFile}; {@link #fileRevision()} 
@@ -55,19 +58,22 @@
 		 * file name for particular revision in the history.
 		 * 
 		 * @return instance of the file being walked, or <code>null</code> if it's not a file but other revlog.
+		 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 		 */
-		public HgDataFile file();
+		public HgDataFile file() throws HgRuntimeException;
 
 		/**
 		 * @return changeset associated with the current file revision
+		 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 		 */
-		public HgChangeset changeset();
+		public HgChangeset changeset() throws HgRuntimeException;
 
 		/**
 		 * Lightweight alternative to {@link #changeset()}, identifies changeset in which current file node has been modified 
 		 * @return changeset {@link Nodeid revision} 
+		 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 		 */
-		public Nodeid changesetRevision();
+		public Nodeid changesetRevision() throws HgRuntimeException;
 
 		/**
 		 * Identifies parent changes, changesets where file/revlog in question was modified prior to change being visited.
@@ -91,25 +97,29 @@
 		 * then this {@link #parents()} call would return pair with single element only, pointing to <code>D</code>
 		 * 
 		 * @return changesets that correspond to parents of the current file node, either pair element may be <code>null</code>.
+		 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 		 */
-		public Pair<HgChangeset, HgChangeset> parents();
+		public Pair<HgChangeset, HgChangeset> parents() throws HgRuntimeException;
 		
 		/**
 		 * Lightweight alternative to {@link #parents()}, give {@link Nodeid nodeids} only
 		 * @return two values, neither is <code>null</code>, use {@link Nodeid#isNull()} to identify parent not set
+		 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 		 */
-		public Pair<Nodeid, Nodeid> parentRevisions();
+		public Pair<Nodeid, Nodeid> parentRevisions() throws HgRuntimeException;
 
 		/**
 		 * Changes that originate from the given change and bear it as their parent. 
 		 * @return collection (possibly empty) of immediate children of the change
+		 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 		 */
-		public Collection<HgChangeset> children();
+		public Collection<HgChangeset> children() throws HgRuntimeException;
 
 		/**
 		 * Lightweight alternative to {@link #children()}.
 		 * @return never <code>null</code>
+		 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 		 */
-		public Collection<Nodeid> childRevisions();
+		public Collection<Nodeid> childRevisions() throws HgRuntimeException;
 	}
 }
\ No newline at end of file
--- a/src/org/tmatesoft/hg/core/HgCheckoutCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgCheckoutCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -28,7 +28,6 @@
 import org.tmatesoft.hg.internal.CsetParamKeeper;
 import org.tmatesoft.hg.internal.DirstateBuilder;
 import org.tmatesoft.hg.internal.EncodingHelper;
-import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.WorkingDirFileWriter;
 import org.tmatesoft.hg.repo.HgDataFile;
@@ -46,8 +45,6 @@
 import org.tmatesoft.hg.util.ProgressSupport;
 
 /**
- * WORK IN PROGRESS.
- * 
  * Update working directory to specific state, 'hg checkout' counterpart.
  * For the time being, only 'clean' checkout is supported ('hg co --clean')
  * 
@@ -55,7 +52,6 @@
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-@Experimental(reason="Work in progress")
 public class HgCheckoutCommand extends HgAbstractCommand<HgCheckoutCommand>{
 
 	private final HgRepository repo;
--- a/src/org/tmatesoft/hg/core/HgCloneCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgCloneCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -30,7 +30,6 @@
 
 import org.tmatesoft.hg.internal.ByteArrayDataAccess;
 import org.tmatesoft.hg.internal.DataAccess;
-import org.tmatesoft.hg.internal.DataAccessProvider;
 import org.tmatesoft.hg.internal.DataSerializer;
 import org.tmatesoft.hg.internal.DigestHelper;
 import org.tmatesoft.hg.internal.FNCacheFile;
@@ -91,7 +90,6 @@
 	 * @throws HgRepositoryNotFoundException
 	 * @throws HgException
 	 * @throws CancelledException
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
 	public HgRepository execute() throws HgException, CancelledException {
 		if (destination == null) {
@@ -115,23 +113,27 @@
 		// if cloning remote repo, which can stream and no revision is specified -
 		// can use 'stream_out' wireproto
 		//
-		// pull all changes from the very beginning
-		// XXX consult getContext() if by any chance has a bundle ready, if not, then read and register
-		HgBundle completeChanges = srcRepo.getChanges(Collections.singletonList(NULL));
-		cancel.checkCancelled();
-		WriteDownMate mate = new WriteDownMate(srcRepo.getSessionContext(), destination, progress, cancel);
 		try {
-			// instantiate new repo in the destdir
-			mate.initEmptyRepository();
-			// pull changes
-			completeChanges.inspectAll(mate);
-			mate.checkFailure();
-			mate.complete();
-		} catch (IOException ex) {
-			throw new HgInvalidFileException(getClass().getName(), ex);
-		} finally {
-			completeChanges.unlink();
-			progress.done();
+			// pull all changes from the very beginning
+			// XXX consult getContext() if by any chance has a bundle ready, if not, then read and register
+			HgBundle completeChanges = srcRepo.getChanges(Collections.singletonList(NULL));
+			cancel.checkCancelled();
+			WriteDownMate mate = new WriteDownMate(srcRepo.getSessionContext(), destination, progress, cancel);
+			try {
+				// instantiate new repo in the destdir
+				mate.initEmptyRepository();
+				// pull changes
+				completeChanges.inspectAll(mate);
+				mate.checkFailure();
+				mate.complete();
+			} catch (IOException ex) {
+				throw new HgInvalidFileException(getClass().getName(), ex);
+			} finally {
+				completeChanges.unlink();
+				progress.done();
+			}
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
 		}
 		return new HgLookup().detect(destination);
 	}
@@ -148,6 +150,7 @@
 		private final SessionContext ctx;
 		private final Path.Source pathFactory;
 		private FileOutputStream indexFile;
+		private File currentFile;
 		private String filename; // human-readable name of the file being written, for log/exception purposes 
 
 		private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>();
@@ -170,7 +173,7 @@
 			ctx = sessionCtx;
 			hgDir = new File(destDir, ".hg");
 			repoInit = new RepoInitializer();
-			repoInit.setRequires(STORE | FNCACHE | DOTENCODE);
+			repoInit.setRequires(REVLOGV1 | STORE | FNCACHE | DOTENCODE);
 			storagePathHelper = repoInit.buildDataFilesHelper(sessionCtx);
 			progressSupport = progress;
 			cancelSupport = cancel;
@@ -178,28 +181,23 @@
 			pathFactory = ctx.getPathFactory();
 		}
 
-		public void initEmptyRepository() throws IOException {
+		public void initEmptyRepository() throws HgIOException, HgRepositoryNotFoundException {
 			repoInit.initEmptyRepository(hgDir);
-			try {
-				assert (repoInit.getRequires() & FNCACHE) != 0;
-				fncacheFile = new FNCacheFile(Internals.getInstance(new HgLookup(ctx).detect(hgDir)));
-			} catch (HgRepositoryNotFoundException ex) {
-				// SHALL NOT HAPPEN provided we initialized empty repository successfully
-				// TODO perhaps, with WriteDownMate moving to a more appropriate location,
-				// we could instantiate HgRepository (or Internals) by other means, without exception?
-				throw new IOException("Can't access fncache for newly created repository", ex);
-			}
+			assert (repoInit.getRequires() & FNCACHE) != 0;
+			// XXX perhaps, with WriteDownMate moving to a more appropriate location,
+			// we could instantiate HgRepository (or Internals) by other means, without exception?
+			fncacheFile = new FNCacheFile(Internals.getInstance(new HgLookup(ctx).detect(hgDir)));
 		}
 
 		public void complete() throws IOException {
 			fncacheFile.write();
 		}
 
-		public void changelogStart() {
+		public void changelogStart() throws HgInvalidControlFileException {
 			try {
 				revlogHeader.offset(0).baseRevision(-1);
 				revisionSequence.clear();
-				indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i"));
+				indexFile = new FileOutputStream(currentFile = new File(hgDir, filename = "store/00changelog.i"));
 				collectChangelogIndexes = true;
 			} catch (IOException ex) {
 				throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(hgDir, filename));
@@ -207,7 +205,7 @@
 			stopIfCancelled();
 		}
 
-		public void changelogEnd() {
+		public void changelogEnd() throws HgInvalidControlFileException {
 			try {
 				clearPreviousContent();
 				collectChangelogIndexes = false;
@@ -219,18 +217,18 @@
 			stopIfCancelled();
 		}
 
-		public void manifestStart() {
+		public void manifestStart() throws HgInvalidControlFileException {
 			try {
 				revlogHeader.offset(0).baseRevision(-1);
 				revisionSequence.clear();
-				indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i"));
+				indexFile = new FileOutputStream(currentFile = new File(hgDir, filename = "store/00manifest.i"));
 			} catch (IOException ex) {
 				throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(hgDir, filename));
 			}
 			stopIfCancelled();
 		}
 
-		public void manifestEnd() {
+		public void manifestEnd() throws HgInvalidControlFileException {
 			try {
 				clearPreviousContent();
 				closeIndexFile();
@@ -241,14 +239,13 @@
 			stopIfCancelled();
 		}
 		
-		public void fileStart(String name) {
+		public void fileStart(String name) throws HgInvalidControlFileException {
 			try {
 				revlogHeader.offset(0).baseRevision(-1);
 				revisionSequence.clear();
-				fncacheFile.add(pathFactory.path(name)); 
 				File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString());
 				file.getParentFile().mkdirs();
-				indexFile = new FileOutputStream(file);
+				indexFile = new FileOutputStream(currentFile = file);
 			} catch (IOException ex) {
 				String m = String.format("Failed to write file %s", filename);
 				throw new HgInvalidControlFileException(m, ex, new File(filename));
@@ -256,8 +253,9 @@
 			stopIfCancelled();
 		}
 
-		public void fileEnd(String name) {
+		public void fileEnd(String name) throws HgInvalidControlFileException {
 			try {
+				fncacheFile.addIndex(pathFactory.path(name)); 
 				clearPreviousContent();
 				closeIndexFile();
 			} catch (IOException ex) {
@@ -279,9 +277,10 @@
 			indexFile.close();
 			indexFile = null;
 			filename = null;
+			currentFile = null;
 		}
 
-		private int knownRevision(Nodeid p) {
+		private int knownRevision(Nodeid p) throws HgInvalidControlFileException {
 			if (p.isNull()) {
 				return -1;
 			} else {
@@ -295,7 +294,7 @@
 			throw new HgInvalidControlFileException(m, null, new File(hgDir, filename)).setRevision(p);
 		}
 		
-		public boolean element(GroupElement ge) {
+		public boolean element(GroupElement ge) throws HgRuntimeException {
 			try {
 				assert indexFile != null;
 				boolean writeComplete = false;
@@ -367,11 +366,15 @@
 				revlogHeader.length(content.length, compressedLen);
 				
 				// XXX may be wise not to create DataSerializer for each revision, but for a file
-				DataAccessProvider.StreamDataSerializer sds = new DataAccessProvider.StreamDataSerializer(ctx.getLog(), indexFile) {
+				DataSerializer sds = new DataSerializer() {
 					@Override
-					public void done() {
-						// override parent behavior not to close stream in use
-					}
+						public void write(byte[] data, int offset, int length) throws HgIOException {
+							try {
+								indexFile.write(data, offset, length);
+							} catch (IOException ex) {
+								throw new HgIOException("Write failure", ex, currentFile);
+							}
+						}
 				};
 				revlogHeader.serialize(sds);
 
@@ -389,9 +392,12 @@
 				revisionSequence.add(node);
 				prevRevContent.done();
 				prevRevContent = new ByteArrayDataAccess(content);
+			} catch (HgIOException ex) {
+				String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename);
+				throw new HgInvalidControlFileException(m, ex, currentFile);
 			} catch (IOException ex) {
 				String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename);
-				throw new HgInvalidControlFileException(m, ex, new File(hgDir, filename));
+				throw new HgInvalidControlFileException(m, ex, currentFile);
 			}
 			return cancelException == null;
 		}
--- a/src/org/tmatesoft/hg/core/HgCommitCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgCommitCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -19,16 +19,18 @@
 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
 
 import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
 
-import org.tmatesoft.hg.internal.ByteArrayChannel;
-import org.tmatesoft.hg.internal.Experimental;
+import org.tmatesoft.hg.internal.COWTransaction;
+import org.tmatesoft.hg.internal.CommitFacility;
+import org.tmatesoft.hg.internal.CompleteRepoLock;
 import org.tmatesoft.hg.internal.FileContentSupplier;
-import org.tmatesoft.hg.repo.CommitFacility;
+import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.Transaction;
+import org.tmatesoft.hg.internal.WorkingCopyContent;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.repo.HgStatusCollector.Record;
@@ -40,14 +42,12 @@
 import org.tmatesoft.hg.util.Path;
 
 /**
- * WORK IN PROGRESS. UNSTABLE API
- * 
  * 'hg commit' counterpart, commit changes
  * 
+ * @since 1.1
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-@Experimental(reason="Work in progress. Unstable API")
 public class HgCommitCommand extends HgAbstractCommand<HgCommitCommand> {
 
 	private final HgRepository repo;
@@ -75,16 +75,22 @@
 	 * Tell if changes in the working directory constitute merge commit. May be invoked prior to (and independently from) {@link #execute()}
 	 * 
 	 * @return <code>true</code> if working directory changes are result of a merge
-	 * @throws HgException subclass thereof to indicate specific issue with the repository
+	 * @throws HgLibraryFailureException to indicate unexpected issue with the repository
+	 * @throws HgException subclass thereof to indicate other specific issue with repository state
 	 */
 	public boolean isMergeCommit() throws HgException {
-		int[] parents = new int[2];
-		detectParentFromDirstate(parents);
-		return parents[0] != NO_REVISION && parents[1] != NO_REVISION; 
+		try {
+			int[] parents = new int[2];
+			detectParentFromDirstate(parents);
+			return parents[0] != NO_REVISION && parents[1] != NO_REVISION;
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
+		}
 	}
 
 	/**
 	 * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
+	 * @throws HgRepositoryLockException if failed to lock the repo for modifications
 	 * @throws IOException propagated IO errors from status walker over working directory
 	 * @throws CancelledException if execution of the command was cancelled
 	 */
@@ -92,6 +98,8 @@
 		if (message == null) {
 			throw new HgBadArgumentException("Shall supply commit message", null);
 		}
+		final CompleteRepoLock repoLock = new CompleteRepoLock(repo);
+		repoLock.acquire();
 		try {
 			int[] parentRevs = new int[2];
 			detectParentFromDirstate(parentRevs);
@@ -104,21 +112,18 @@
 				newRevision = Nodeid.NULL;
 				return new Outcome(Kind.Failure, "nothing to add");
 			}
-			CommitFacility cf = new CommitFacility(repo, parentRevs[0], parentRevs[1]);
+			CommitFacility cf = new CommitFacility(Internals.getInstance(repo), parentRevs[0], parentRevs[1]);
 			for (Path m : status.getModified()) {
 				HgDataFile df = repo.getFileNode(m);
 				cf.add(df, new WorkingCopyContent(df));
 			}
-			ArrayList<FileContentSupplier> toClear = new ArrayList<FileContentSupplier>();
 			for (Path a : status.getAdded()) {
 				HgDataFile df = repo.getFileNode(a); // TODO need smth explicit, like repo.createNewFileNode(Path) here
 				// XXX might be an interesting exercise not to demand a content supplier, but instead return a "DataRequester"
 				// object, that would indicate interest in data, and this code would "push" it to requester, so that any exception
 				// is handled here, right away, and won't need to travel supplier and CommitFacility. (although try/catch inside
 				// supplier.read (with empty throws declaration)
-				FileContentSupplier fcs = new FileContentSupplier(repo, a);
-				cf.add(df, fcs);
-				toClear.add(fcs);
+				cf.add(df, new FileContentSupplier(repo, a));
 			}
 			for (Path r : status.getRemoved()) {
 				HgDataFile df = repo.getFileNode(r); 
@@ -126,14 +131,23 @@
 			}
 			cf.branch(detectBranch());
 			cf.user(detectUser());
-			newRevision = cf.commit(message);
-			// TODO toClear list is awful
-			for (FileContentSupplier fcs : toClear) {
-				fcs.done();
+			Transaction.Factory trFactory = new COWTransaction.Factory();
+			Transaction tr = trFactory.create(repo);
+			try {
+				newRevision = cf.commit(message, tr);
+				tr.commit();
+			} catch (RuntimeException ex) {
+				tr.rollback();
+				throw ex;
+			} catch (HgException ex) {
+				tr.rollback();
+				throw ex;
 			}
 			return new Outcome(Kind.Success, "Commit ok");
 		} catch (HgRuntimeException ex) {
 			throw new HgLibraryFailureException(ex);
+		} finally {
+			repoLock.release();
 		}
 	}
 
@@ -144,7 +158,7 @@
 		return newRevision;
 	}
 
-	private String detectBranch() {
+	private String detectBranch() throws HgInvalidControlFileException {
 		return repo.getWorkingCopyBranchName();
 	}
 	
@@ -156,50 +170,10 @@
 		return new HgInternals(repo).getNextCommitUsername();
 	}
 
-	private void detectParentFromDirstate(int[] parents) {
+	private void detectParentFromDirstate(int[] parents) throws HgRuntimeException {
 		Pair<Nodeid, Nodeid> pn = repo.getWorkingCopyParents();
 		HgChangelog clog = repo.getChangelog();
 		parents[0] = pn.first().isNull() ? NO_REVISION : clog.getRevisionIndex(pn.first());
 		parents[1] = pn.second().isNull() ? NO_REVISION : clog.getRevisionIndex(pn.second());
 	}
-
-	private static class WorkingCopyContent implements CommitFacility.ByteDataSupplier {
-		private final HgDataFile file;
-		private ByteBuffer fileContent; 
-
-		public WorkingCopyContent(HgDataFile dataFile) {
-			file = dataFile;
-			if (!dataFile.exists()) {
-				throw new IllegalArgumentException();
-			}
-		}
-
-		public int read(ByteBuffer dst) {
-			if (fileContent == null) {
-				try {
-					ByteArrayChannel sink = new ByteArrayChannel();
-					// TODO desperately need partial read here
-					file.workingCopy(sink);
-					fileContent = ByteBuffer.wrap(sink.toArray());
-				} catch (CancelledException ex) {
-					// ByteArrayChannel doesn't cancel, never happens
-					assert false;
-				}
-			}
-			if (fileContent.remaining() == 0) {
-				return -1;
-			}
-			int dstCap = dst.remaining();
-			if (fileContent.remaining() > dstCap) {
-				// save actual limit, and pretend we've got exactly desired amount of bytes
-				final int lim = fileContent.limit();
-				fileContent.limit(dstCap);
-				dst.put(fileContent);
-				fileContent.limit(lim);
-			} else {
-				dst.put(fileContent);
-			}
-			return dstCap - dst.remaining();
-		}
-	}
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/core/HgDiffCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,261 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.core;
+
+import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
+import static org.tmatesoft.hg.repo.HgRepository.TIP;
+
+import org.tmatesoft.hg.internal.BlameHelper;
+import org.tmatesoft.hg.internal.CsetParamKeeper;
+import org.tmatesoft.hg.internal.FileHistory;
+import org.tmatesoft.hg.internal.FileRevisionHistoryChunk;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.CancelSupport;
+import org.tmatesoft.hg.util.CancelledException;
+import org.tmatesoft.hg.util.Path;
+import org.tmatesoft.hg.util.ProgressSupport;
+
+/**
+ * 'hg diff' counterpart, with similar, although not identical, functionality.
+ * Despite both 'hg diff' and this command are diff-based, implementation
+ * peculiarities may lead to slightly different diff results. Either is valid
+ * as there's no strict diff specification. 
+ * 
+ * <p>
+ * <strong>Note</strong>, at the moment this command annotates single file only. Diff over
+ * complete repository (all the file changed in a given changeset) might
+ * be added later.
+ * 
+ * @since 1.1
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class HgDiffCommand extends HgAbstractCommand<HgDiffCommand> {
+
+	private final HgRepository repo;
+	private HgDataFile df;
+	private final CsetParamKeeper clogRevIndexStart, clogRevIndexEnd;
+	private HgIterateDirection iterateDirection = HgIterateDirection.NewToOld;
+
+	public HgDiffCommand(HgRepository hgRepo) {
+		repo = hgRepo;
+		clogRevIndexStart = new CsetParamKeeper(hgRepo);
+		clogRevIndexEnd = new CsetParamKeeper(hgRepo);
+	}
+	
+	public HgDiffCommand file(Path file) {
+		df = repo.getFileNode(file);
+		return this;
+	}
+
+	/**
+	 * Selects the file which history to blame, mandatory.
+	 * 
+	 * @param file repository file
+	 * @return <code>this</code> for convenience
+	 */
+	public HgDiffCommand file(HgDataFile file) {
+		df = file;
+		return this;
+	}
+
+	/**
+	 * Select range of file's history for {@link #executeDiff(HgBlameInspector)}
+	 * and {@link #executeAnnotate(HgBlameInspector)}.
+	 * <p>
+	 * {@link #executeDiff(HgBlameInspector) diff} uses these as revisions to diff against each other, while 
+	 * {@link #executeAnnotate(HgBlameInspector) annotate} walks the range. 
+	 * 
+	 * @param changelogRevIndexStart index of changelog revision, left range boundary
+	 * @param changelogRevIndexEnd index of changelog revision, right range boundary
+	 * @return <code>this</code> for convenience
+	 * @throws HgBadArgumentException if failed to find any of supplied changeset 
+	 */
+	public HgDiffCommand range(int changelogRevIndexStart, int changelogRevIndexEnd) throws HgBadArgumentException {
+		clogRevIndexStart.set(changelogRevIndexStart);
+		clogRevIndexEnd.set(changelogRevIndexEnd);
+		return this;
+	}
+	
+	/**
+	 * Selects revision for {@link #executeParentsAnnotate(HgBlameInspector)}, the one 
+	 * to diff against its parents. 
+	 * 
+	 * Besides, it is handy when range of interest spans up to the very beginning of the file history 
+	 * (and thus is equivalent to <code>range(0, changelogRevIndex)</code>)
+	 * 
+	 * @param changelogRevIndex index of changelog revision
+	 * @return <code>this</code> for convenience
+	 * @throws HgBadArgumentException if failed to find supplied changeset 
+	 */
+	public HgDiffCommand changeset(int changelogRevIndex) throws HgBadArgumentException {
+		clogRevIndexStart.set(0);
+		clogRevIndexEnd.set(changelogRevIndex);
+		return this;
+	}
+
+	/**
+	 * Revision differences are reported in selected order when 
+	 * annotating {@link #range(int, int) range} of changesets with
+	 * {@link #executeAnnotate(HgBlameInspector)}.
+	 * <p>
+	 * This method doesn't affect {@link #executeParentsAnnotate(HgBlameInspector)} and
+	 * {@link #executeDiff(HgBlameInspector)}
+	 * 
+	 * @param order desired iteration order 
+	 * @return <code>this</code> for convenience
+	 */
+	public HgDiffCommand order(HgIterateDirection order) {
+		iterateDirection = order;
+		return this;
+	}
+	
+	/**
+	 * Diff two revisions selected with {@link #range(int, int)} against each other.
+	 * <p>mimics 'hg diff -r clogRevIndex1 -r clogRevIndex2'
+	 * 
+ 	 * @throws HgCallbackTargetException propagated exception from the handler
+	 * @throws CancelledException if execution of the command was cancelled
+	 * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
+	 */
+	public void executeDiff(HgBlameInspector insp) throws HgCallbackTargetException, CancelledException, HgException {
+		checkFile();
+		final ProgressSupport progress = getProgressSupport(insp);
+		progress.start(2);
+		try {
+			final CancelSupport cancel = getCancelSupport(insp, true);
+			int fileRevIndex1 = fileRevIndex(df, clogRevIndexStart.get());
+			int fileRevIndex2 = fileRevIndex(df, clogRevIndexEnd.get());
+			BlameHelper bh = new BlameHelper(insp);
+			bh.prepare(df, clogRevIndexStart.get(), clogRevIndexEnd.get());
+			progress.worked(1);
+			cancel.checkCancelled();
+			bh.diff(fileRevIndex1, clogRevIndexStart.get(), fileRevIndex2, clogRevIndexEnd.get());
+			progress.worked(1);
+			cancel.checkCancelled();
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
+		} finally {
+			progress.done();
+		}
+	}
+
+	/**
+	 * Walk file history {@link #range(int, int) range} and report changes (diff) for each revision
+	 * 
+ 	 * @throws HgCallbackTargetException propagated exception from the handler
+	 * @throws CancelledException if execution of the command was cancelled
+	 * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
+	 */
+	public void executeAnnotate(HgBlameInspector insp) throws HgCallbackTargetException, CancelledException, HgException {
+		checkFile();
+		ProgressSupport progress = null;
+		try {
+			if (!df.exists()) {
+				return;
+			}
+			final CancelSupport cancel = getCancelSupport(insp, true);
+			BlameHelper bh = new BlameHelper(insp);
+			FileHistory fileHistory = bh.prepare(df, clogRevIndexStart.get(), clogRevIndexEnd.get());
+			//
+			cancel.checkCancelled();
+			int totalWork = 0;
+			for (FileRevisionHistoryChunk fhc : fileHistory.iterate(iterateDirection)) {
+				totalWork += fhc.revisionCount();
+			}
+			progress = getProgressSupport(insp);
+			progress.start(totalWork + 1);
+			progress.worked(1); // BlameHelper.prepare
+			//
+			int[] fileClogParentRevs = new int[2];
+			int[] fileParentRevs = new int[2];
+			for (FileRevisionHistoryChunk fhc : fileHistory.iterate(iterateDirection)) {
+				for (int fri : fhc.fileRevisions(iterateDirection)) {
+					int clogRevIndex = fhc.changeset(fri);
+					// the way we built fileHistory ensures we won't walk past [changelogRevIndexStart..changelogRevIndexEnd]
+					assert clogRevIndex >= clogRevIndexStart.get();
+					assert clogRevIndex <= clogRevIndexEnd.get();
+					fhc.fillFileParents(fri, fileParentRevs);
+					fhc.fillCsetParents(fri, fileClogParentRevs);
+					bh.annotateChange(fri, clogRevIndex, fileParentRevs, fileClogParentRevs);
+					progress.worked(1);
+					cancel.checkCancelled();
+				}
+			}
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
+		} finally {
+			if (progress != null) {
+				progress.done();
+			}
+		}
+	}
+
+	/**
+	 * Annotates changes of the file against its parent(s). 
+	 * Unlike {@link #annotate(HgDataFile, int, Inspector, HgIterateDirection)}, doesn't
+	 * walk file history, looks at the specified revision only. Handles both parents (if merge revision).
+	 * 
+ 	 * @throws HgCallbackTargetException propagated exception from the handler
+	 * @throws CancelledException if execution of the command was cancelled
+	 * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
+	 */
+	public void executeParentsAnnotate(HgBlameInspector insp) throws HgCallbackTargetException, CancelledException, HgException {
+		checkFile();
+		final ProgressSupport progress = getProgressSupport(insp);
+		progress.start(2);
+		try {
+			final CancelSupport cancel = getCancelSupport(insp, true);
+			int changelogRevisionIndex = clogRevIndexEnd.get();
+			// TODO detect if file is text/binary (e.g. looking for chars < ' ' and not \t\r\n\f
+			int fileRevIndex = fileRevIndex(df, changelogRevisionIndex);
+			int[] fileRevParents = new int[2];
+			df.parents(fileRevIndex, fileRevParents, null, null);
+			if (changelogRevisionIndex == TIP) {
+				changelogRevisionIndex = df.getChangesetRevisionIndex(fileRevIndex);
+			}
+			int[] fileClogParentRevs = new int[2];
+			fileClogParentRevs[0] = fileRevParents[0] == NO_REVISION ? NO_REVISION : df.getChangesetRevisionIndex(fileRevParents[0]);
+			fileClogParentRevs[1] = fileRevParents[1] == NO_REVISION ? NO_REVISION : df.getChangesetRevisionIndex(fileRevParents[1]);
+			BlameHelper bh = new BlameHelper(insp);
+			int clogIndexStart = fileClogParentRevs[0] == NO_REVISION ? (fileClogParentRevs[1] == NO_REVISION ? 0 : fileClogParentRevs[1]) : fileClogParentRevs[0];
+			bh.prepare(df, clogIndexStart, changelogRevisionIndex);
+			progress.worked(1);
+			cancel.checkCancelled();
+			bh.annotateChange(fileRevIndex, changelogRevisionIndex, fileRevParents, fileClogParentRevs);
+			progress.worked(1);
+			cancel.checkCancelled();
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
+		} finally {
+			progress.done();
+		}
+	}
+
+	private void checkFile() {
+		if (df == null) {
+			throw new IllegalArgumentException("File is not set");
+		}
+	}
+
+	private static int fileRevIndex(HgDataFile df, int csetRevIndex) throws HgRuntimeException {
+		Nodeid fileRev = df.getRepo().getManifest().getFileRevision(csetRevIndex, df.getPath());
+		return df.getRevisionIndex(fileRev);
+	}
+}
--- a/src/org/tmatesoft/hg/core/HgFileRenameHandlerMixin.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgFileRenameHandlerMixin.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -16,6 +16,7 @@
  */
 package org.tmatesoft.hg.core;
 
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Adaptable;
 
 /**
@@ -34,6 +35,7 @@
 
 	/**
 	 * @throws HgCallbackTargetException wrapper object for any exception user code may produce 
+	 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 	 */
-	void copy(HgFileRevision from, HgFileRevision to) throws HgCallbackTargetException;
+	void copy(HgFileRevision from, HgFileRevision to) throws HgCallbackTargetException, HgRuntimeException;
 }
--- a/src/org/tmatesoft/hg/core/HgFileRevision.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgFileRevision.java	Wed Jul 10 11:48:55 2013 +0200
@@ -107,7 +107,11 @@
 		return flags;
 	}
 
-	public boolean wasCopied() throws HgException {
+	/**
+	 * @return <code>true</code> if this file revision was created as a result of a copy/rename
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 */
+	public boolean wasCopied() throws HgRuntimeException {
 		if (isCopy == null) {
 			checkCopy();
 		}
@@ -115,8 +119,9 @@
 	}
 	/**
 	 * @return <code>null</code> if {@link #wasCopied()} is <code>false</code>, name of the copy source otherwise.
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
-	public Path getOriginIfCopy() throws HgException {
+	public Path getOriginIfCopy() throws HgRuntimeException {
 		if (wasCopied()) {
 			return origin;
 		}
@@ -145,7 +150,13 @@
 		return parents;
 	}
 
-	public void putContentTo(ByteChannel sink) throws HgException, CancelledException {
+	/**
+	 * Pipe content of this file revision into the sink
+	 * @param sink accepts file revision content
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws CancelledException if execution of the operation was cancelled
+	 */
+	public void putContentTo(ByteChannel sink) throws HgRuntimeException, CancelledException {
 		HgDataFile fn = repo.getFileNode(path);
 		int revisionIndex = fn.getRevisionIndex(revision);
 		fn.contentWithFilters(revisionIndex, sink);
@@ -156,7 +167,7 @@
 		return String.format("HgFileRevision(%s, %s)", getPath().toString(), revision.shortNotation());
 	}
 
-	private void checkCopy() throws HgException {
+	private void checkCopy() throws HgRuntimeException {
 		HgDataFile fn = repo.getFileNode(path);
 		if (fn.isCopy()) {
 			if (fn.getRevision(0).equals(revision)) {
--- a/src/org/tmatesoft/hg/core/HgIOException.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgIOException.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -40,11 +40,14 @@
 	 * @param cause root cause for the error, likely {@link IOException} or its subclass, but not necessarily, and may be omitted. 
 	 * @param troubleFile file we tried to deal with, never <code>null</code>
 	 */
-	public HgIOException(String message, Exception cause, File troubleFile) {
+	public HgIOException(String message, Throwable cause, File troubleFile) {
 		super(message, cause);
 		file = troubleFile;
 	}
 
+	/**
+	 * @return file that causes trouble, may be <code>null</code>
+	 */
 	public File getFile() {
 		return file;
 	}
--- a/src/org/tmatesoft/hg/core/HgIncomingCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgIncomingCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -31,12 +31,11 @@
 import org.tmatesoft.hg.repo.HgBundle;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
-import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.repo.HgRemoteRepository;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.repo.HgRuntimeException;
-import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.util.CancelledException;
 import org.tmatesoft.hg.util.ProgressSupport;
 
@@ -138,10 +137,10 @@
 		if (handler == null) {
 			throw new IllegalArgumentException("Delegate can't be null");
 		}
-		final List<Nodeid> common = getCommon();
-		HgBundle changegroup = remoteRepo.getChanges(common);
 		final ProgressSupport ps = getProgressSupport(handler);
 		try {
+			final List<Nodeid> common = getCommon();
+			HgBundle changegroup = remoteRepo.getChanges(common);
 			final ChangesetTransformer transformer = new ChangesetTransformer(localRepo, handler, getParentHelper(), ps, getCancelSupport(handler, true));
 			transformer.limitBranches(branches);
 			changegroup.changes(localRepo, new HgChangelog.Inspector() {
@@ -154,7 +153,7 @@
 					localIndex = localRepo.getChangelog().getRevisionCount();
 				}
 				
-				public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
+				public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException {
 					if (parentHelper.knownNode(nodeid)) {
 						if (!common.contains(nodeid)) {
 							throw new HgInvalidStateException("Bundle shall not report known nodes other than roots we've supplied");
@@ -172,7 +171,7 @@
 		}
 	}
 
-	private RepositoryComparator getComparator() throws HgInvalidControlFileException, CancelledException {
+	private RepositoryComparator getComparator() throws CancelledException, HgRuntimeException {
 		if (remoteRepo == null) {
 			throw new IllegalArgumentException("Shall specify remote repository to compare against", null);
 		}
@@ -183,7 +182,7 @@
 		return comparator;
 	}
 	
-	private HgParentChildMap<HgChangelog> getParentHelper() throws HgInvalidControlFileException {
+	private HgParentChildMap<HgChangelog> getParentHelper() throws HgRuntimeException {
 		if (parentHelper == null) {
 			parentHelper = new HgParentChildMap<HgChangelog>(localRepo.getChangelog());
 			parentHelper.init();
@@ -191,14 +190,14 @@
 		return parentHelper;
 	}
 	
-	private List<BranchChain> getMissingBranches() throws HgRemoteConnectionException, HgInvalidControlFileException, CancelledException {
+	private List<BranchChain> getMissingBranches() throws HgRemoteConnectionException, CancelledException, HgRuntimeException {
 		if (missingBranches == null) {
 			missingBranches = getComparator().calculateMissingBranches();
 		}
 		return missingBranches;
 	}
 
-	private List<Nodeid> getCommon() throws HgRemoteConnectionException, HgInvalidControlFileException, CancelledException {
+	private List<Nodeid> getCommon() throws HgRemoteConnectionException, CancelledException, HgRuntimeException {
 //		return getComparator(context).getCommon();
 		final LinkedHashSet<Nodeid> common = new LinkedHashSet<Nodeid>();
 		// XXX common can be obtained from repoCompare, but at the moment it would almost duplicate work of calculateMissingBranches
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/core/HgInitCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.core;
+
+import static org.tmatesoft.hg.internal.RequiresFile.*;
+
+import java.io.File;
+
+import org.tmatesoft.hg.internal.RepoInitializer;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.util.CancelledException;
+
+/**
+ * Initialize empty local repository. 
+ * <p>
+ * Two predefined alternatives are available, {@link #revlogV0() old} and {@link #revlogV1() new} mercurial format respectively.
+ * <p>
+ * Specific requirements may be turned off/on as needed if you know what you're doing.
+ * 
+ * @see http://mercurial.selenic.com/wiki/RequiresFile
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class HgInitCommand extends HgAbstractCommand<HgInitCommand> {
+	private static final int V1_DEFAULT = REVLOGV1 | STORE | FNCACHE | DOTENCODE;
+	
+	private final HgLookup hgLookup;
+	private File location;
+	private int requiresFlags;
+	
+	public HgInitCommand() {
+		this(null);
+	}
+
+	public HgInitCommand(HgLookup lookupEnv) {
+		hgLookup = lookupEnv;
+		requiresFlags = V1_DEFAULT;
+	}
+	
+	public HgInitCommand location(File repoLoc) {
+		location = repoLoc;
+		return this;
+	}
+	
+	public HgInitCommand revlogV0() {
+		requiresFlags = REVLOGV0;
+		return this;
+	}
+	
+	public HgInitCommand revlogV1() {
+		requiresFlags = V1_DEFAULT;
+		return this;
+	}
+	
+	public HgInitCommand store(boolean enable) {
+		return switchFlag(STORE, enable);
+	}
+	
+	public HgInitCommand fncache(boolean enable) {
+		return switchFlag(FNCACHE, enable);
+	}
+	
+	public HgInitCommand dotencode(boolean enable) {
+		return switchFlag(DOTENCODE, enable);
+	}
+
+	public HgRepository execute() throws HgRepositoryNotFoundException, HgException, CancelledException {
+		if (location == null) {
+			throw new IllegalArgumentException();
+		}
+		File repoDir;
+		if (".hg".equals(location.getName())) {
+			repoDir = location;
+		} else {
+			repoDir = new File(location, ".hg");
+		}
+		new RepoInitializer().setRequires(requiresFlags).initEmptyRepository(repoDir);
+		return getNewRepository();
+	}
+	
+	public HgRepository getNewRepository() throws HgRepositoryNotFoundException {
+		HgLookup l = hgLookup == null ? new HgLookup() : hgLookup;
+		return l.detect(location);
+	}
+	
+	private HgInitCommand switchFlag(int flag, boolean enable) {
+		if (enable) {
+			requiresFlags |= flag;
+		} else {
+			requiresFlags &= ~flag;
+		}
+		return this;
+	}
+}
--- a/src/org/tmatesoft/hg/core/HgLogCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgLogCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -30,7 +30,6 @@
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.ListIterator;
 import java.util.Set;
 import java.util.TreeSet;
 
@@ -42,10 +41,10 @@
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.Lifecycle;
 import org.tmatesoft.hg.internal.LifecycleProxy;
+import org.tmatesoft.hg.internal.ReverseIterator;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
 import org.tmatesoft.hg.repo.HgDataFile;
-import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
 import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.repo.HgRepository;
@@ -297,20 +296,20 @@
 		if (csetTransform != null) {
 			throw new ConcurrentModificationException();
 		}
-		if (repo.getChangelog().getRevisionCount() == 0) {
-			return;
-		}
-		final int lastCset = endRev == TIP ? repo.getChangelog().getLastRevision() : endRev;
-		// XXX pretty much like HgInternals.checkRevlogRange
-		if (lastCset < 0 || lastCset > repo.getChangelog().getLastRevision()) {
-			throw new HgBadArgumentException(String.format("Bad value %d for end revision", lastCset), null);
-		}
-		if (startRev < 0 || startRev > lastCset) {
-			throw new HgBadArgumentException(String.format("Bad value %d for start revision for range [%1$d..%d]", startRev, lastCset), null);
-		}
 		final ProgressSupport progressHelper = getProgressSupport(handler);
-		final int BATCH_SIZE = 100;
 		try {
+			if (repo.getChangelog().getRevisionCount() == 0) {
+				return;
+			}
+			final int lastCset = endRev == TIP ? repo.getChangelog().getLastRevision() : endRev;
+			// XXX pretty much like HgInternals.checkRevlogRange
+			if (lastCset < 0 || lastCset > repo.getChangelog().getLastRevision()) {
+				throw new HgBadArgumentException(String.format("Bad value %d for end revision", lastCset), null);
+			}
+			if (startRev < 0 || startRev > lastCset) {
+				throw new HgBadArgumentException(String.format("Bad value %d for start revision for range [%1$d..%d]", startRev, lastCset), null);
+			}
+			final int BATCH_SIZE = 100;
 			count = 0;
 			HgParentChildMap<HgChangelog> pw = getParentHelper(file == null); // leave it uninitialized unless we iterate whole repo
 			// ChangesetTransfrom creates a blank PathPool, and #file(String, boolean) above 
@@ -446,12 +445,7 @@
 		}
 		
 		public Iterable<BatchRecord> iterate(final boolean reverse) {
-			return new Iterable<BatchRecord>() {
-				
-				public Iterator<BatchRecord> iterator() {
-					return reverse ? new ReverseIterator<BatchRecord>(batch) : batch.iterator();
-				}
-			};
+			return reverse ? ReverseIterator.reversed(batch) : batch;
 		}
 		
 		// alternative would be dispatch(HgChangelog.Inspector) and dispatchReverse()
@@ -522,65 +516,51 @@
 		final CancelSupport cancelHelper = getCancelSupport(handler, true);
 		final HgFileRenameHandlerMixin renameHandler = Adaptable.Factory.getAdapter(handler, HgFileRenameHandlerMixin.class, null);
 
-		
-		// XXX rename. dispatcher is not a proper name (most of the job done - managing history chunk interconnection)
-		final HandlerDispatcher dispatcher = new HandlerDispatcher() {
-
-			@Override
-			protected void once(HistoryNode n) throws HgCallbackTargetException, CancelledException {
-				handler.treeElement(ei.init(n, currentFileNode));
-				cancelHelper.checkCancelled();
-			}
-		};
+		try {
 
-		// renamed files in the queue are placed with respect to #iterateDirection
-		// i.e. if we iterate from new to old, recent filenames come first
-		FileRenameQueueBuilder frqBuilder = new FileRenameQueueBuilder();
-		List<Pair<HgDataFile, Nodeid>> fileRenamesQueue = frqBuilder.buildFileRenamesQueue();
-		// XXX perhaps, makes sense to look at selected file's revision when followAncestry is true
-		// to ensure file we attempt to trace is in the WC's parent. Native hg aborts if not.
-		progressHelper.start(4 * fileRenamesQueue.size());
-		for (int namesIndex = 0, renamesQueueSize = fileRenamesQueue.size(); namesIndex < renamesQueueSize; namesIndex++) {
- 
-			final Pair<HgDataFile, Nodeid> renameInfo = fileRenamesQueue.get(namesIndex);
-			dispatcher.prepare(progressHelper, renameInfo);
-			cancelHelper.checkCancelled();
-			if (namesIndex > 0) {
-				dispatcher.connectWithLastJunctionPoint(renameInfo, fileRenamesQueue.get(namesIndex - 1));
-			}
-			if (namesIndex + 1 < renamesQueueSize) {
-				// there's at least one more name we are going to look at
-				dispatcher.updateJunctionPoint(renameInfo, fileRenamesQueue.get(namesIndex+1), renameHandler != null);
-			} else {
-				dispatcher.clearJunctionPoint();
-			}
-			dispatcher.dispatchAllChanges();
-			if (renameHandler != null && namesIndex + 1 < renamesQueueSize) {
-				dispatcher.reportRenames(renameHandler);
-			}
-		} // for fileRenamesQueue;
-		frqBuilder.reportRenameIfNotInQueue(fileRenamesQueue, renameHandler);
+			// XXX rename. dispatcher is not a proper name (most of the job done - managing history chunk interconnection)
+			final HandlerDispatcher dispatcher = new HandlerDispatcher() {
+	
+				@Override
+				protected void once(HistoryNode n) throws HgCallbackTargetException, CancelledException, HgRuntimeException {
+					handler.treeElement(ei.init(n, currentFileNode));
+					cancelHelper.checkCancelled();
+				}
+			};
+	
+			// renamed files in the queue are placed with respect to #iterateDirection
+			// i.e. if we iterate from new to old, recent filenames come first
+			FileRenameQueueBuilder frqBuilder = new FileRenameQueueBuilder();
+			List<Pair<HgDataFile, Nodeid>> fileRenamesQueue = frqBuilder.buildFileRenamesQueue();
+			// XXX perhaps, makes sense to look at selected file's revision when followAncestry is true
+			// to ensure file we attempt to trace is in the WC's parent. Native hg aborts if not.
+			progressHelper.start(4 * fileRenamesQueue.size());
+			for (int namesIndex = 0, renamesQueueSize = fileRenamesQueue.size(); namesIndex < renamesQueueSize; namesIndex++) {
+	 
+				final Pair<HgDataFile, Nodeid> renameInfo = fileRenamesQueue.get(namesIndex);
+				dispatcher.prepare(progressHelper, renameInfo);
+				cancelHelper.checkCancelled();
+				if (namesIndex > 0) {
+					dispatcher.connectWithLastJunctionPoint(renameInfo, fileRenamesQueue.get(namesIndex - 1));
+				}
+				if (namesIndex + 1 < renamesQueueSize) {
+					// there's at least one more name we are going to look at
+					dispatcher.updateJunctionPoint(renameInfo, fileRenamesQueue.get(namesIndex+1), renameHandler != null);
+				} else {
+					dispatcher.clearJunctionPoint();
+				}
+				dispatcher.dispatchAllChanges();
+				if (renameHandler != null && namesIndex + 1 < renamesQueueSize) {
+					dispatcher.reportRenames(renameHandler);
+				}
+			} // for fileRenamesQueue;
+			frqBuilder.reportRenameIfNotInQueue(fileRenamesQueue, renameHandler);
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
+		}
 		progressHelper.done();
 	}
 	
-	private static class ReverseIterator<E> implements Iterator<E> {
-		private final ListIterator<E> listIterator;
-		
-		public ReverseIterator(List<E> list) {
-			listIterator = list.listIterator(list.size());
-		}
-
-		public boolean hasNext() {
-			return listIterator.hasPrevious();
-		}
-		public E next() {
-			return listIterator.previous();
-		}
-		public void remove() {
-			listIterator.remove();
-		}
-	}
-
 	/**
 	 * Utility to build sequence of file renames
 	 */
@@ -601,8 +581,9 @@
 		 * and possibly reuse this functionality
 		 * 
 		 * @return list of file renames, ordered with respect to {@link #iterateDirection}
+		 * @throws HgRuntimeException 
 		 */
-		public List<Pair<HgDataFile, Nodeid>> buildFileRenamesQueue() throws HgPathNotFoundException {
+		public List<Pair<HgDataFile, Nodeid>> buildFileRenamesQueue() throws HgPathNotFoundException, HgRuntimeException {
 			LinkedList<Pair<HgDataFile, Nodeid>> rv = new LinkedList<Pair<HgDataFile, Nodeid>>();
 			Nodeid startRev = null;
 			HgDataFile fileNode = repo.getFileNode(file);
@@ -636,11 +617,11 @@
 			return rv;
 		}
 		
-		public boolean hasOrigin(Pair<HgDataFile, Nodeid> p) {
+		public boolean hasOrigin(Pair<HgDataFile, Nodeid> p) throws HgRuntimeException {
 			return p.first().isCopy();
 		}
 
-		public Pair<HgDataFile, Nodeid> origin(Pair<HgDataFile, Nodeid> p) {
+		public Pair<HgDataFile, Nodeid> origin(Pair<HgDataFile, Nodeid> p) throws HgRuntimeException {
 			HgDataFile fileNode = p.first();
 			assert fileNode.isCopy();
 			Path fp = fileNode.getCopySourceName();
@@ -656,7 +637,7 @@
 		 * @param queue value from {@link #buildFileRenamesQueue()}
 		 * @param renameHandler may be <code>null</code>
 		 */
-		public void reportRenameIfNotInQueue(List<Pair<HgDataFile, Nodeid>> queue, HgFileRenameHandlerMixin renameHandler) throws HgCallbackTargetException {
+		public void reportRenameIfNotInQueue(List<Pair<HgDataFile, Nodeid>> queue, HgFileRenameHandlerMixin renameHandler) throws HgCallbackTargetException, HgRuntimeException {
 			if (renameHandler != null && !followRenames) {
 				// If followRenames is true, all the historical names were in the queue and are processed already.
 				// Hence, shall process origin explicitly only when renameHandler is present but followRenames is not requested.
@@ -700,12 +681,12 @@
 			completeHistory[revisionNumber] = new HistoryNode(commitRevisions[revisionNumber], revision, p1, p2);
 		}
 		
-		HistoryNode one(HgDataFile fileNode, Nodeid fileRevision) throws HgInvalidControlFileException {
+		HistoryNode one(HgDataFile fileNode, Nodeid fileRevision) throws HgRuntimeException {
 			int fileRevIndexToVisit = fileNode.getRevisionIndex(fileRevision);
 			return one(fileNode, fileRevIndexToVisit);
 		}
 
-		HistoryNode one(HgDataFile fileNode, int fileRevIndexToVisit) throws HgInvalidControlFileException {
+		HistoryNode one(HgDataFile fileNode, int fileRevIndexToVisit) throws HgRuntimeException {
 			resultHistory = null;
 			if (fileRevIndexToVisit == HgRepository.TIP) {
 				fileRevIndexToVisit = fileNode.getLastRevision();
@@ -731,7 +712,7 @@
 		 * @return list of history elements, from oldest to newest. In case {@link #followAncestry} is <code>true</code>, the list
 		 * is modifiable (to further augment with last/first elements of renamed file histories)
 		 */
-		List<HistoryNode> go(HgDataFile fileNode, Nodeid fileLastRevisionToVisit) throws HgInvalidControlFileException {
+		List<HistoryNode> go(HgDataFile fileNode, Nodeid fileLastRevisionToVisit) throws HgRuntimeException {
 			resultHistory = null;
 			int fileLastRevIndexToVisit = fileLastRevisionToVisit == null ? fileNode.getLastRevision() : fileNode.getRevisionIndex(fileLastRevisionToVisit);
 			completeHistory = new HistoryNode[fileLastRevIndexToVisit+1];
@@ -828,7 +809,7 @@
 		private HgFileRevision copiedFrom, copiedTo; 
 
 		// parentProgress shall be initialized with 4 XXX refactor all this stuff with parentProgress 
-		public void prepare(ProgressSupport parentProgress, Pair<HgDataFile, Nodeid> renameInfo) {
+		public void prepare(ProgressSupport parentProgress, Pair<HgDataFile, Nodeid> renameInfo) throws HgRuntimeException {
 			// if we don't followAncestry, take complete history
 			// XXX treeBuildInspector knows followAncestry, perhaps the logic 
 			// whether to take specific revision or the last one shall be there?
@@ -857,7 +838,7 @@
 			switchTo(renameInfo.first());
 		}
 		
-		public void updateJunctionPoint(Pair<HgDataFile, Nodeid> curRename, Pair<HgDataFile, Nodeid> nextRename, boolean needCopyFromTo) {
+		public void updateJunctionPoint(Pair<HgDataFile, Nodeid> curRename, Pair<HgDataFile, Nodeid> nextRename, boolean needCopyFromTo) throws HgRuntimeException {
 			copiedFrom = copiedTo = null;
 			//
 			// A (old) renamed to B(new).  A(0..k..n) -> B(0..m). If followAncestry, k == n
@@ -899,7 +880,7 @@
 			}
 		}
 		
-		public void reportRenames(HgFileRenameHandlerMixin renameHandler) throws HgCallbackTargetException {
+		public void reportRenames(HgFileRenameHandlerMixin renameHandler) throws HgCallbackTargetException, HgRuntimeException {
 			if (renameHandler != null) { // shall report renames
 				assert copiedFrom != null;
 				assert copiedTo != null;
@@ -954,9 +935,9 @@
 			throw new HgInvalidStateException(String.format("For change history (cset[%d..%d]) could not find node for file change %s", csetStart, csetEnd, fileRevision.shortNotation()));
 		}
 
-		protected abstract void once(HistoryNode n) throws HgCallbackTargetException, CancelledException;
+		protected abstract void once(HistoryNode n) throws HgCallbackTargetException, CancelledException, HgRuntimeException;
 		
-		public void dispatchAllChanges() throws HgCallbackTargetException, CancelledException {
+		public void dispatchAllChanges() throws HgCallbackTargetException, CancelledException, HgRuntimeException {
 			// XXX shall sort changeHistory according to changeset numbers?
 			Iterator<HistoryNode> it;
 			if (iterateDirection == HgIterateDirection.OldToNew) {
@@ -1006,7 +987,7 @@
 			}
 		}
 
-		public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
+		public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException {
 			if (limit > 0 && count >= limit) {
 				return;
 			}
@@ -1045,7 +1026,7 @@
 		}
 	}
 
-	private HgParentChildMap<HgChangelog> getParentHelper(boolean create) throws HgInvalidControlFileException {
+	private HgParentChildMap<HgChangelog> getParentHelper(boolean create) throws HgRuntimeException {
 		if (parentHelper == null && create) {
 			parentHelper = new HgParentChildMap<HgChangelog>(repo.getChangelog());
 			parentHelper.init();
@@ -1143,11 +1124,11 @@
 			return fileNode;
 		}
 
-		public HgChangeset changeset() {
+		public HgChangeset changeset() throws HgRuntimeException {
 			return get(historyNode.changeset)[0];
 		}
 
-		public Pair<HgChangeset, HgChangeset> parents() {
+		public Pair<HgChangeset, HgChangeset> parents() throws HgRuntimeException {
 			if (parents != null) {
 				return parents;
 			}
@@ -1167,7 +1148,7 @@
 			return parents = new Pair<HgChangeset, HgChangeset>(r[0], r[1]);
 		}
 
-		public Collection<HgChangeset> children() {
+		public Collection<HgChangeset> children() throws HgRuntimeException {
 			if (children != null) {
 				return children;
 			}
@@ -1188,7 +1169,7 @@
 			cachedChangesets.put(cs.getRevisionIndex(), cs);
 		}
 		
-		private HgChangeset[] get(int... changelogRevisionIndex) {
+		private HgChangeset[] get(int... changelogRevisionIndex) throws HgRuntimeException {
 			HgChangeset[] rv = new HgChangeset[changelogRevisionIndex.length];
 			IntVector misses = new IntVector(changelogRevisionIndex.length, -1);
 			for (int i = 0; i < changelogRevisionIndex.length; i++) {
@@ -1210,8 +1191,7 @@
 				for (int changeset2read : changesets2read) {
 					HgChangeset cs = cachedChangesets.get(changeset2read);
 					if (cs == null) {
-						HgInvalidStateException t = new HgInvalidStateException(String.format("Can't get changeset for revision %d", changeset2read));
-						throw t.setRevisionIndex(changeset2read);
+						throw new HgInvalidStateException(String.format("Can't get changeset for revision %d", changeset2read));
 					}
 					// HgChangelog.range may reorder changesets according to their order in the changelog
 					// thus need to find original index
@@ -1244,14 +1224,14 @@
 			populate(cs.clone());
 		}
 
-		public Nodeid changesetRevision() {
+		public Nodeid changesetRevision() throws HgRuntimeException {
 			if (changesetRevision == null) {
 				changesetRevision = getRevision(historyNode.changeset);
 			}
 			return changesetRevision;
 		}
 
-		public Pair<Nodeid, Nodeid> parentRevisions() {
+		public Pair<Nodeid, Nodeid> parentRevisions() throws HgRuntimeException {
 			if (parentRevisions == null) {
 				HistoryNode p;
 				final Nodeid p1, p2;
@@ -1270,7 +1250,7 @@
 			return parentRevisions;
 		}
 
-		public Collection<Nodeid> childRevisions() {
+		public Collection<Nodeid> childRevisions() throws HgRuntimeException {
 			if (childRevisions != null) {
 				return childRevisions;
 			}
@@ -1287,7 +1267,7 @@
 		}
 		
 		// reading nodeid involves reading index only, guess, can afford not to optimize multiple reads
-		private Nodeid getRevision(int changelogRevisionNumber) {
+		private Nodeid getRevision(int changelogRevisionNumber) throws HgRuntimeException {
 			// TODO post-1.0 pipe through pool
 			HgChangeset cs = cachedChangesets.get(changelogRevisionNumber);
 			if (cs != null) {
--- a/src/org/tmatesoft/hg/core/HgManifestCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgManifestCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -190,7 +190,7 @@
 			}
 		}
 	
-		public boolean begin(int manifestRevision, Nodeid nid, int changelogRevision) {
+		public boolean begin(int manifestRevision, Nodeid nid, int changelogRevision) throws HgRuntimeException {
 			if (needDirs && manifestContent == null) {
 				manifestContent = new LinkedList<HgFileRevision>();
 			}
@@ -206,7 +206,7 @@
 				return false;
 			}
 		}
-		public boolean end(int revision) {
+		public boolean end(int revision) throws HgRuntimeException {
 			try {
 				if (needDirs) {
 					LinkedHashMap<Path, LinkedList<HgFileRevision>> breakDown = new LinkedHashMap<Path, LinkedList<HgFileRevision>>();
@@ -243,7 +243,7 @@
 			}
 		}
 		
-		public boolean next(Nodeid nid, Path fname, Flags flags) {
+		public boolean next(Nodeid nid, Path fname, Flags flags) throws HgRuntimeException {
 			if (matcher != null && !matcher.accept(fname)) {
 				return true;
 			}
--- a/src/org/tmatesoft/hg/core/HgManifestHandler.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgManifestHandler.java	Wed Jul 10 11:48:55 2013 +0200
@@ -17,6 +17,7 @@
 package org.tmatesoft.hg.core;
 
 import org.tmatesoft.hg.internal.Callback;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Path;
 
 /**
@@ -33,8 +34,9 @@
 	 * 
 	 * @param manifestRevision unique identifier of the manifest revision
 	 * @throws HgCallbackTargetException wrapper for any exception user code may produce
+	 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 	 */
-	void begin(Nodeid manifestRevision) throws HgCallbackTargetException;
+	void begin(Nodeid manifestRevision) throws HgCallbackTargetException, HgRuntimeException;
 
 	/**
 	 * If walker is configured to spit out directories, indicates files from specified directories are about to be reported.
@@ -42,16 +44,18 @@
 	 * 
 	 * @param path directory known in the manifest
 	 * @throws HgCallbackTargetException wrapper for any exception user code may produce
+	 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 	 */
-	void dir(Path path) throws HgCallbackTargetException; 
+	void dir(Path path) throws HgCallbackTargetException, HgRuntimeException; 
 
 	/**
 	 * Reports a file revision entry in the manifest
 	 * 
 	 * @param fileRevision description of the file revision
 	 * @throws HgCallbackTargetException wrapper for any exception user code may produce
+	 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 	 */
-	void file(HgFileRevision fileRevision) throws HgCallbackTargetException;
+	void file(HgFileRevision fileRevision) throws HgCallbackTargetException, HgRuntimeException;
 
 	/**
 	 * Indicates all files from the manifest revision have been reported.
@@ -59,6 +63,7 @@
 	 * 
 	 * @param manifestRevision unique identifier of the manifest revision 
 	 * @throws HgCallbackTargetException wrapper for any exception user code may produce
+	 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 	 */
-	void end(Nodeid manifestRevision) throws HgCallbackTargetException;
+	void end(Nodeid manifestRevision) throws HgCallbackTargetException, HgRuntimeException;
 }
\ No newline at end of file
--- a/src/org/tmatesoft/hg/core/HgOutgoingCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgOutgoingCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -21,13 +21,14 @@
 import java.util.TreeSet;
 
 import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.PhasesHelper;
 import org.tmatesoft.hg.internal.RepositoryComparator;
+import org.tmatesoft.hg.internal.RevisionSet;
 import org.tmatesoft.hg.repo.HgChangelog;
-import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.repo.HgRemoteRepository;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.repo.HgRuntimeException;
-import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.CancelledException;
 import org.tmatesoft.hg.util.ProgressSupport;
@@ -104,8 +105,7 @@
 	public List<Nodeid> executeLite() throws HgRemoteConnectionException, HgException, CancelledException {
 		final ProgressSupport ps = getProgressSupport(null);
 		try {
-			ps.start(10);
-			return getComparator(new ProgressSupport.Sub(ps, 5), getCancelSupport(null, true)).getLocalOnlyRevisions();
+			return getOutgoingRevisions(ps, getCancelSupport(null, true));
 		} catch (HgRuntimeException ex) {
 			throw new HgLibraryFailureException(ex);
 		} finally {
@@ -129,10 +129,16 @@
 		final ProgressSupport ps = getProgressSupport(handler);
 		final CancelSupport cs = getCancelSupport(handler, true);
 		try {
-			ps.start(-1);
-			ChangesetTransformer inspector = new ChangesetTransformer(localRepo, handler, getParentHelper(), ps, cs);
+			ps.start(200);
+			ChangesetTransformer inspector = new ChangesetTransformer(localRepo, handler, getParentHelper(), new ProgressSupport.Sub(ps, 100), cs);
 			inspector.limitBranches(branches);
-			getComparator(new ProgressSupport.Sub(ps, 1), cs).visitLocalOnlyRevisions(inspector);
+			List<Nodeid> out = getOutgoingRevisions(new ProgressSupport.Sub(ps, 100), cs);
+			int[] outRevIndex = new int[out.size()];
+			int i = 0;
+			for (Nodeid o : out) {
+				outRevIndex[i++] = localRepo.getChangelog().getRevisionIndex(o);
+			}
+			localRepo.getChangelog().range(inspector, outRevIndex);
 			inspector.checkFailure();
 		} catch (HgRuntimeException ex) {
 			throw new HgLibraryFailureException(ex);
@@ -141,7 +147,7 @@
 		}
 	}
 
-	private RepositoryComparator getComparator(ProgressSupport ps, CancelSupport cs) throws HgRemoteConnectionException, HgInvalidControlFileException, CancelledException {
+	private RepositoryComparator getComparator(ProgressSupport ps, CancelSupport cs) throws HgRemoteConnectionException, CancelledException, HgRuntimeException {
 		if (remoteRepo == null) {
 			throw new IllegalArgumentException("Shall specify remote repository to compare against");
 		}
@@ -152,7 +158,7 @@
 		return comparator;
 	}
 	
-	private HgParentChildMap<HgChangelog> getParentHelper() throws HgInvalidControlFileException {
+	private HgParentChildMap<HgChangelog> getParentHelper() throws HgRuntimeException {
 		if (parentHelper == null) {
 			parentHelper = new HgParentChildMap<HgChangelog>(localRepo.getChangelog());
 			parentHelper.init();
@@ -160,4 +166,17 @@
 		return parentHelper;
 	}
 
+	
+	private List<Nodeid> getOutgoingRevisions(ProgressSupport ps, CancelSupport cs) throws HgRemoteConnectionException, HgException, CancelledException {
+		ps.start(10);
+		final RepositoryComparator c = getComparator(new ProgressSupport.Sub(ps, 5), cs);
+		List<Nodeid> local = c.getLocalOnlyRevisions();
+		ps.worked(3);
+		PhasesHelper phaseHelper = new PhasesHelper(Internals.getInstance(localRepo));
+		if (phaseHelper.isCapableOfPhases() && phaseHelper.withSecretRoots()) {
+			local = new RevisionSet(local).subtract(phaseHelper.allSecret()).asList();
+		}
+		ps.worked(2);
+		return local;
+	}
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/core/HgPullCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.core;
+
+import java.util.List;
+
+import org.tmatesoft.hg.internal.AddRevInspector;
+import org.tmatesoft.hg.internal.COWTransaction;
+import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.PhasesHelper;
+import org.tmatesoft.hg.internal.RepositoryComparator;
+import org.tmatesoft.hg.internal.RevisionSet;
+import org.tmatesoft.hg.internal.Transaction;
+import org.tmatesoft.hg.repo.HgBundle;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgParentChildMap;
+import org.tmatesoft.hg.repo.HgRemoteRepository;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.CancelledException;
+import org.tmatesoft.hg.util.ProgressSupport;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class HgPullCommand extends HgAbstractCommand<HgPullCommand> {
+
+	private final HgRepository repo;
+	private HgRemoteRepository remote;
+
+	public HgPullCommand(HgRepository hgRepo) {
+		repo = hgRepo;
+	}
+
+	public HgPullCommand source(HgRemoteRepository hgRemote) {
+		remote = hgRemote;
+		return this;
+	}
+
+	public void execute() throws HgRemoteConnectionException, HgIOException, HgLibraryFailureException, CancelledException {
+		final ProgressSupport progress = getProgressSupport(null);
+		try {
+			progress.start(100);
+			// TODO refactor same code in HgIncomingCommand #getComparator and #getParentHelper
+			final HgChangelog clog = repo.getChangelog();
+			final HgParentChildMap<HgChangelog> parentHelper = new HgParentChildMap<HgChangelog>(clog);
+			parentHelper.init();
+			final RepositoryComparator comparator = new RepositoryComparator(parentHelper, remote);
+			// get incoming revisions
+			comparator.compare(new ProgressSupport.Sub(progress, 50), getCancelSupport(null, true));
+			final List<Nodeid> common = comparator.getCommon();
+			// get bundle with changes from remote
+			HgBundle incoming = remote.getChanges(common);
+			//
+			// add revisions to changelog, manifest, files
+			final Internals implRepo = HgInternals.getImplementationRepo(repo);
+			final AddRevInspector insp;
+			Transaction.Factory trFactory = new COWTransaction.Factory();
+			Transaction tr = trFactory.create(repo);
+			try {
+				incoming.inspectAll(insp = new AddRevInspector(implRepo, tr));
+				tr.commit();
+			} catch (HgRuntimeException ex) {
+				tr.rollback();
+				throw ex;
+			} catch (RuntimeException ex) {
+				tr.rollback();
+				throw ex;
+			}
+			progress.worked(45);
+			RevisionSet added = insp.addedChangesets();
+			
+			// get remote phases, update local phases to match that of remote
+			final PhasesHelper phaseHelper = new PhasesHelper(implRepo, parentHelper);
+			if (phaseHelper.isCapableOfPhases()) {
+				RevisionSet rsCommon = new RevisionSet(common);
+				HgRemoteRepository.Phases remotePhases = remote.getPhases();
+				if (remotePhases.isPublishingServer()) {
+					final RevisionSet knownPublic = rsCommon.union(added);
+					RevisionSet newDraft = phaseHelper.allDraft().subtract(knownPublic);
+					RevisionSet newSecret = phaseHelper.allSecret().subtract(knownPublic);
+					phaseHelper.updateRoots(newDraft.asList(), newSecret.asList());
+				} else {
+					// FIXME refactor reuse from HgPushCommand
+				}
+			}
+			progress.worked(5);
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
+		} finally {
+			progress.done();
+		}
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/core/HgPushCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,228 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+import org.tmatesoft.hg.internal.BundleGenerator;
+import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.PhasesHelper;
+import org.tmatesoft.hg.internal.RepositoryComparator;
+import org.tmatesoft.hg.internal.RevisionSet;
+import org.tmatesoft.hg.repo.HgBookmarks;
+import org.tmatesoft.hg.repo.HgBundle;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgParentChildMap;
+import org.tmatesoft.hg.repo.HgPhase;
+import org.tmatesoft.hg.repo.HgRemoteRepository;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.CancelledException;
+import org.tmatesoft.hg.util.LogFacility.Severity;
+import org.tmatesoft.hg.util.Outcome;
+import org.tmatesoft.hg.util.Pair;
+import org.tmatesoft.hg.util.ProgressSupport;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class HgPushCommand extends HgAbstractCommand<HgPushCommand> {
+	
+	private final HgRepository repo;
+	private HgRemoteRepository remoteRepo;
+	private RevisionSet outgoing;
+
+	public HgPushCommand(HgRepository hgRepo) {
+		repo = hgRepo;
+	}
+	
+	public HgPushCommand destination(HgRemoteRepository hgRemote) {
+		remoteRepo = hgRemote;
+		return this;
+	}
+
+	public void execute() throws HgRemoteConnectionException, HgIOException, CancelledException, HgLibraryFailureException {
+		final ProgressSupport progress = getProgressSupport(null);
+		try {
+			progress.start(100);
+			//
+			// find out missing
+			// TODO refactor same code in HgOutgoingCommand #getComparator and #getParentHelper
+			final HgChangelog clog = repo.getChangelog();
+			final HgParentChildMap<HgChangelog> parentHelper = new HgParentChildMap<HgChangelog>(clog);
+			parentHelper.init();
+			final Internals implRepo = HgInternals.getImplementationRepo(repo);
+			final PhasesHelper phaseHelper = new PhasesHelper(implRepo, parentHelper);
+			final RepositoryComparator comparator = new RepositoryComparator(parentHelper, remoteRepo);
+			comparator.compare(new ProgressSupport.Sub(progress, 50), getCancelSupport(null, true));
+			List<Nodeid> l = comparator.getLocalOnlyRevisions();
+			if (phaseHelper.isCapableOfPhases() && phaseHelper.withSecretRoots()) {
+				RevisionSet secret = phaseHelper.allSecret();
+				outgoing = new RevisionSet(l).subtract(secret);
+			} else {
+				outgoing = new RevisionSet(l);
+			}
+			//
+			// prepare bundle
+			BundleGenerator bg = new BundleGenerator(implRepo);
+			File bundleFile = bg.create(outgoing.asList());
+			progress.worked(20);
+			HgBundle b = new HgLookup(repo.getSessionContext()).loadBundle(bundleFile);
+			//
+			// send changes
+			remoteRepo.unbundle(b, comparator.getRemoteHeads());
+			progress.worked(20);
+			//
+			// update phase information
+			if (phaseHelper.isCapableOfPhases()) {
+				RevisionSet presentSecret = phaseHelper.allSecret();
+				RevisionSet presentDraft = phaseHelper.allDraft();
+				RevisionSet secretLeft, draftLeft;
+				HgRemoteRepository.Phases remotePhases = remoteRepo.getPhases();
+				RevisionSet remoteDrafts = knownRemoteDrafts(remotePhases, parentHelper, outgoing, presentSecret);
+				if (remotePhases.isPublishingServer()) {
+					// although it's unlikely outgoing would affect secret changesets,
+					// it doesn't hurt to check secret roots along with draft ones
+					secretLeft = presentSecret.subtract(outgoing);
+					draftLeft = presentDraft.subtract(outgoing);
+				} else {
+					// shall merge local and remote phase states
+					// revisions that cease to be secret (gonna become Public), e.g. someone else pushed them
+					RevisionSet secretGone = presentSecret.intersect(remoteDrafts);
+					// parents of those remote drafts are public, mark them as public locally, too
+					RevisionSet remotePublic = presentSecret.ancestors(secretGone, parentHelper);
+					secretLeft = presentSecret.subtract(secretGone).subtract(remotePublic);
+					/*
+					 * Revisions grow from left to right (parents to the left, children to the right)
+					 * 
+					 * I: Set of local is subset of remote
+					 * 
+					 *               local draft 
+					 * --o---r---o---l---o--
+					 *       remote draft
+					 * 
+					 * Remote draft roots shall be updated
+					 *
+					 *
+					 * II: Set of local is superset of remote
+					 * 
+					 *       local draft 
+					 * --o---l---o---r---o--
+					 *               remote draft 
+					 *               
+					 * Local draft roots shall be updated
+					 */
+					RevisionSet sharedDraft = presentDraft.intersect(remoteDrafts); // (I: ~presentDraft; II: ~remoteDraft
+					// XXX do I really need sharedDrafts here? why not ancestors(remoteDrafts)?
+					RevisionSet localDraftRemotePublic = presentDraft.ancestors(sharedDraft, parentHelper); // I: 0; II: those treated public on remote
+					// remoteDrafts are local revisions known as draft@remote
+					// remoteDraftsLocalPublic - revisions that would cease to be listed as draft on remote
+					RevisionSet remoteDraftsLocalPublic = remoteDrafts.ancestors(sharedDraft, parentHelper);
+					RevisionSet remoteDraftsLeft = remoteDrafts.subtract(remoteDraftsLocalPublic);
+					// forget those deemed public by remote (drafts shared by both remote and local are ok to stay)
+					RevisionSet combinedDraft = presentDraft.union(remoteDraftsLeft);
+					draftLeft = combinedDraft.subtract(localDraftRemotePublic);
+				}
+				final RevisionSet newDraftRoots = draftLeft.roots(parentHelper);
+				final RevisionSet newSecretRoots = secretLeft.roots(parentHelper);
+				phaseHelper.updateRoots(newDraftRoots.asList(), newSecretRoots.asList());
+				//
+				// if there's a remote draft root that points to revision we know is public
+				RevisionSet remoteDraftsLocalPublic = remoteDrafts.subtract(draftLeft).subtract(secretLeft);
+				if (!remoteDraftsLocalPublic.isEmpty()) {
+					// foreach remoteDraftsLocallyPublic.heads() do push Draft->Public
+					for (Nodeid n : remoteDraftsLocalPublic.heads(parentHelper)) {
+						try {
+							Outcome upo = remoteRepo.updatePhase(HgPhase.Draft, HgPhase.Public, n);
+							if (!upo.isOk()) {
+								implRepo.getLog().dump(getClass(), Severity.Info, "Failed to update remote phase, reason: %s", upo.getMessage());
+							}
+						} catch (HgRemoteConnectionException ex) {
+							implRepo.getLog().dump(getClass(), Severity.Error, ex, String.format("Failed to update phase of %s", n.shortNotation()));
+						}
+					}
+				}
+			}
+			progress.worked(5);
+			//
+			// update bookmark information
+			HgBookmarks localBookmarks = repo.getBookmarks();
+			if (!localBookmarks.getAllBookmarks().isEmpty()) {
+				for (Pair<String,Nodeid> bm : remoteRepo.getBookmarks()) {
+					Nodeid localRevision = localBookmarks.getRevision(bm.first());
+					if (localRevision == null || !parentHelper.knownNode(bm.second())) {
+						continue;
+					}
+					// we know both localRevision and revision of remote bookmark,
+					// need to make sure we don't push  older revision than it's at the server
+					if (parentHelper.isChild(bm.second(), localRevision)) {
+						remoteRepo.updateBookmark(bm.first(), bm.second(), localRevision);
+					}
+				}
+			}
+			// XXX WTF is obsolete in namespaces key??
+			progress.worked(5);
+		} catch (IOException ex) {
+			throw new HgIOException(ex.getMessage(), null); // XXX not a nice idea to throw IOException from BundleGenerator#create
+		} catch (HgRepositoryNotFoundException ex) {
+			final HgInvalidStateException e = new HgInvalidStateException("Failed to load a just-created bundle");
+			e.initCause(ex);
+			throw new HgLibraryFailureException(e);
+		} catch (HgRuntimeException ex) {
+			throw new HgLibraryFailureException(ex);
+		} finally {
+			progress.done();
+		}
+	}
+	
+	public Collection<Nodeid> getPushedRevisions() {
+		return outgoing == null ? Collections.<Nodeid>emptyList() : outgoing.asList();
+	}
+	
+	private RevisionSet knownRemoteDrafts(HgRemoteRepository.Phases remotePhases, HgParentChildMap<HgChangelog> parentHelper, RevisionSet outgoing, RevisionSet localSecret) {
+		ArrayList<Nodeid> knownRemoteDraftRoots = new ArrayList<Nodeid>();
+		for (Nodeid rdr : remotePhases.draftRoots()) {
+			if (parentHelper.knownNode(rdr)) {
+				knownRemoteDraftRoots.add(rdr);
+			}
+		}
+		// knownRemoteDraftRoots + childrenOf(knownRemoteDraftRoots) is everything remote may treat as Draft
+		RevisionSet remoteDrafts = new RevisionSet(knownRemoteDraftRoots);
+		RevisionSet localChildren = remoteDrafts.children(parentHelper);
+		// we didn't send any local secret revision
+		localChildren = localChildren.subtract(localSecret);
+		// draft roots are among remote drafts
+		remoteDrafts = remoteDrafts.union(localChildren);
+		// 1) outgoing.children gives all local revisions accessible from outgoing.
+		// 2) outgoing.roots.children is equivalent with smaller intermediate set, the way we build
+		// childrenOf doesn't really benefits from that.
+		RevisionSet localChildrenNotSent = outgoing.children(parentHelper).subtract(outgoing);
+		// remote shall know only what we've sent, subtract revisions we didn't actually sent
+		remoteDrafts = remoteDrafts.subtract(localChildrenNotSent);
+		return remoteDrafts;
+	}
+}
--- a/src/org/tmatesoft/hg/core/HgRepoFacade.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgRepoFacade.java	Wed Jul 10 11:48:55 2013 +0200
@@ -101,6 +101,14 @@
 	public SessionContext getSessionContext() {
 		return context;
 	}
+	
+	/**
+	 * This factory method doesn't need this facade to be initialized with a repository.
+	 * @return command instance, never <code>null</code>
+	 */
+	public HgInitCommand createInitCommand() {
+		return new HgInitCommand(new HgLookup(context));
+	}
 
 	public HgLogCommand createLogCommand() {
 		return new HgLogCommand(repo/*, getCommandContext()*/);
@@ -153,4 +161,16 @@
 	public HgCommitCommand createCommitCommand() {
 		return new HgCommitCommand(repo);
 	}
+	
+	public HgDiffCommand createDiffCommand() {
+		return new HgDiffCommand(repo);
+	}
+
+	public HgPushCommand createPushCommand() {
+		return new HgPushCommand(repo);
+	}
+	
+	public HgPullCommand createPullCommand() {
+		return new HgPullCommand(repo);
+	}
 }
--- a/src/org/tmatesoft/hg/core/HgRepositoryLockException.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgRepositoryLockException.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,7 +16,6 @@
  */
 package org.tmatesoft.hg.core;
 
-import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.repo.HgRepositoryLock;
 
 /**
@@ -26,7 +25,6 @@
  * @author TMate Software Ltd.
  */
 @SuppressWarnings("serial")
-@Experimental(reason="Work in progress")
 public class HgRepositoryLockException extends HgException {
 	
 	public HgRepositoryLockException(String message) {
--- a/src/org/tmatesoft/hg/core/HgRevertCommand.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgRevertCommand.java	Wed Jul 10 11:48:55 2013 +0200
@@ -21,14 +21,16 @@
 import java.util.LinkedHashSet;
 import java.util.Set;
 
+import org.tmatesoft.hg.internal.COWTransaction;
 import org.tmatesoft.hg.internal.CsetParamKeeper;
 import org.tmatesoft.hg.internal.DirstateBuilder;
 import org.tmatesoft.hg.internal.DirstateReader;
-import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.Transaction;
 import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRepositoryLock;
 import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.CancelledException;
@@ -36,14 +38,12 @@
 import org.tmatesoft.hg.util.ProgressSupport;
 
 /**
- * WORK IN PROGRESS.
- * 
  * Restore files to their checkout state, 'hg revert' counterpart.
  * 
+ * @since 1.1
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-@Experimental(reason="Work in progress")
 public class HgRevertCommand extends HgAbstractCommand<HgRevertCommand> {
 
 	private final HgRepository repo;
@@ -102,6 +102,8 @@
 	 * @throws CancelledException if execution of the command was cancelled
 	 */
 	public void execute() throws HgException, CancelledException {
+		final HgRepositoryLock wdLock = repo.getWorkingDirLock();
+		wdLock.acquire();
 		try {
 			final ProgressSupport progress = getProgressSupport(null);
 			final CancelSupport cancellation = getCancelSupport(null, true);
@@ -158,11 +160,25 @@
 				progress.worked(1);
 				cancellation.checkCancelled();
 			}
-			dirstateBuilder.serialize();
+			Transaction.Factory trFactory = new COWTransaction.Factory();
+			Transaction tr = trFactory.create(repo);
+			try {
+				// TODO same code in HgAddRemoveCommand and similar in HgCommitCommand
+				dirstateBuilder.serialize(tr);
+				tr.commit();
+			} catch (RuntimeException ex) {
+				tr.rollback();
+				throw ex;
+			} catch (HgException ex) {
+				tr.rollback();
+				throw ex;
+			}
 			progress.worked(1);
 			progress.done();
 		} catch (HgRuntimeException ex) {
 			throw new HgLibraryFailureException(ex);
+		} finally {
+			wdLock.release();
 		}
 	}
 }
--- a/src/org/tmatesoft/hg/core/HgStatus.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgStatus.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -21,6 +21,7 @@
 
 import org.tmatesoft.hg.internal.ChangelogHelper;
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Path;
 
 /**
@@ -70,8 +71,9 @@
 
 	/**
 	 * @return <code>null</code> if author for the change can't be deduced (e.g. for clean files it's senseless)
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
-	public String getModificationAuthor() {
+	public String getModificationAuthor() throws HgRuntimeException {
 		RawChangeset cset = logHelper.findLatestChangeWith(path);
 		if (cset == null) {
 			if (kind == Kind.Modified || kind == Kind.Added || kind == Kind.Removed /*&& RightBoundary is TIP*/) {
@@ -84,15 +86,20 @@
 		return null;
 	}
 
-	public Date getModificationDate() {
+	/**
+	 * @return date when the file was last modified, never <code>null</code>. Either date of changeset the file was modified at
+	 * or timestamp of local file, if present
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 */
+	public Date getModificationDate() throws HgRuntimeException {
 		RawChangeset cset = logHelper.findLatestChangeWith(path);
 		if (cset == null) {
 			File localFile = new File(logHelper.getRepo().getWorkingDir(), path.toString());
 			if (localFile.canRead()) {
 				return new Date(localFile.lastModified());
 			}
-			// TODO post-1.0 find out what to do in this case, perhaps, throw an exception?
-			// perhaps check dirstate and/or local file for tstamp
+			// TODO post-1.1 find out what to do in this case, perhaps, throw an exception?
+			// perhaps check dirstate and for timestamp
 			return new Date(); // what's correct? 
 		} else {
 			return cset.date();
--- a/src/org/tmatesoft/hg/core/Nodeid.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/core/Nodeid.java	Wed Jul 10 11:48:55 2013 +0200
@@ -33,11 +33,21 @@
  *
  */
 public final class Nodeid implements Comparable<Nodeid> {
-	
+
+	/**
+	 * Length of the nodeid in bytes
+	 */
+	public static final int SIZE = 20;
+
+	/**
+	 * Length of nodeid string representation, in bytes
+	 */
+	public static final int SIZE_ASCII = 40;
+
 	/**
 	 * <b>nullid</b>, empty root revision.
 	 */
-	public static final Nodeid NULL = new Nodeid(new byte[20], false);
+	public static final Nodeid NULL = new Nodeid(new byte[SIZE], false);
 
 	private final byte[] binaryData; 
 
@@ -49,7 +59,7 @@
 	public Nodeid(byte[] binaryRepresentation, boolean shallClone) {
 		// 5 int fields => 32 bytes
 		// byte[20] => 48 bytes (16 bytes is Nodeid with one field, 32 bytes for byte[20] 
-		if (binaryRepresentation == null || binaryRepresentation.length != 20) {
+		if (binaryRepresentation == null || binaryRepresentation.length != SIZE) {
 			throw new HgBadNodeidFormatException(String.format("Bad value: %s", String.valueOf(binaryRepresentation)));
 		}
 		/*
@@ -69,8 +79,18 @@
 
 	@Override
 	public int hashCode() {
+		return hashCode(binaryData);
+	}
+	
+	/**
+	 * Handy alternative to calculate hashcode without need to get {@link Nodeid} instance
+	 * @param binaryNodeid array of exactly 20 bytes
+	 * @return same value as <code>new Nodeid(binaryNodeid, false).hashCode()</code>
+	 */
+	public static int hashCode(byte[] binaryNodeid) {
+		assert binaryNodeid.length == SIZE;
 		// digest (part thereof) seems to be nice candidate for the hashCode
-		byte[] b = binaryData;
+		byte[] b = binaryNodeid;
 		return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF);
 	}
 	
@@ -93,7 +113,7 @@
 		if (this == o) {
 			return 0;
 		}
-		for (int i = 0; i < 20; i++) {
+		for (int i = 0; i < SIZE; i++) {
 			if (binaryData[i] != o.binaryData[i]) {
 				// if we need truly ascending sort, need to respect byte sign 
 				// return (binaryData[i] & 0xFF) < (o.binaryData[i] & 0xFF) ? -1 : 1;
@@ -121,7 +141,7 @@
 		if (this == NULL) {
 			return true;
 		}
-		for (int i = 0; i < 20; i++) {
+		for (int i = 0; i < SIZE; i++) {
 			if (this.binaryData[i] != 0) {
 				return false;
 			}
@@ -143,19 +163,19 @@
 	 * @throws HgBadNodeidFormatException custom {@link IllegalArgumentException} subclass when arguments don't select 20 bytes
 	 */
 	public static Nodeid fromBinary(byte[] binaryRepresentation, int offset) {
-		if (binaryRepresentation == null || binaryRepresentation.length - offset < 20) {
+		if (binaryRepresentation == null || binaryRepresentation.length - offset < SIZE) {
 			throw new HgBadNodeidFormatException(String.format("Bad value: %s", String.valueOf(binaryRepresentation)));
 		}
 		int i = 0;
-		while (i < 20 && binaryRepresentation[offset+i] == 0) i++;
-		if (i == 20) {
+		while (i < SIZE && binaryRepresentation[offset+i] == 0) i++;
+		if (i == SIZE) {
 			return NULL;
 		}
-		if (offset == 0 && binaryRepresentation.length == 20) {
+		if (offset == 0 && binaryRepresentation.length == SIZE) {
 			return new Nodeid(binaryRepresentation, true);
 		}
-		byte[] b = new byte[20]; // create new instance if no other reasonable guesses possible
-		System.arraycopy(binaryRepresentation, offset, b, 0, 20);
+		byte[] b = new byte[SIZE]; // create new instance if no other reasonable guesses possible
+		System.arraycopy(binaryRepresentation, offset, b, 0, SIZE);
 		return new Nodeid(b, false);
 	}
 
@@ -167,11 +187,11 @@
 	 * @throws HgBadNodeidFormatException custom {@link IllegalArgumentException} subclass when argument doesn't match encoded form of 20-bytes sha1 digest. 
 	 */
 	public static Nodeid fromAscii(String asciiRepresentation) throws HgBadNodeidFormatException {
-		if (asciiRepresentation.length() != 40) {
+		if (asciiRepresentation.length() != SIZE_ASCII) {
 			throw new HgBadNodeidFormatException(String.format("Bad value: %s", asciiRepresentation));
 		}
 		// XXX is better impl for String possible?
-		return fromAscii(asciiRepresentation.toCharArray(), 0, 40);
+		return fromAscii(asciiRepresentation.toCharArray(), 0, SIZE_ASCII);
 	}
 	
 	/**
@@ -179,11 +199,11 @@
 	 * @throws HgBadNodeidFormatException custom {@link IllegalArgumentException} subclass when bytes are not hex digits or number of bytes != 40 (160 bits) 
 	 */
 	public static Nodeid fromAscii(byte[] asciiRepresentation, int offset, int length) throws HgBadNodeidFormatException {
-		if (length != 40) {
-			throw new HgBadNodeidFormatException(String.format("Expected 40 hex characters for nodeid, not %d", length));
+		if (length != SIZE_ASCII) {
+			throw new HgBadNodeidFormatException(String.format("Expected %d hex characters for nodeid, not %d", SIZE_ASCII, length));
 		}
 		try {
-			byte[] data = new byte[20];
+			byte[] data = new byte[SIZE];
 			boolean zeroBytes = DigestHelper.ascii2bin(asciiRepresentation, offset, length, data);
 			if (zeroBytes) {
 				return NULL;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/AddRevInspector.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.util.HashMap;
+import java.util.Set;
+
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgBundle;
+import org.tmatesoft.hg.repo.HgBundle.GroupElement;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.Pair;
+
+/**
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class AddRevInspector implements HgBundle.Inspector {
+	private final Internals repo;
+	private final Transaction tr;
+	private Set<Nodeid> added;
+	private RevlogStreamWriter revlog;
+	private RevMap clogRevs;
+	private RevMap revlogRevs;
+
+	public AddRevInspector(Internals implRepo, Transaction transaction) {
+		repo = implRepo;
+		tr = transaction;
+	}
+
+	public void changelogStart() throws HgRuntimeException {
+		// TODO Auto-generated method stub
+		RevlogStream rs = repo.getImplAccess().getChangelogStream();
+		revlog = new RevlogStreamWriter(repo, rs, tr);
+		revlogRevs = clogRevs = new RevMap(rs);
+	}
+
+	public void changelogEnd() throws HgRuntimeException {
+		revlog = null;
+		revlogRevs = null;
+		added = clogRevs.added();
+	}
+
+	public void manifestStart() throws HgRuntimeException {
+		RevlogStream rs = repo.getImplAccess().getManifestStream();
+		revlog = new RevlogStreamWriter(repo, rs, tr);
+		revlogRevs = new RevMap(rs);
+	}
+
+	public void manifestEnd() throws HgRuntimeException {
+		revlog = null;
+		revlogRevs = null;
+	}
+
+	public void fileStart(String name) throws HgRuntimeException {
+		HgDataFile df = repo.getRepo().getFileNode(name);
+		RevlogStream rs = repo.getImplAccess().getStream(df);
+		revlog = new RevlogStreamWriter(repo, rs, tr);
+		revlogRevs = new RevMap(rs);
+		// FIXME collect new files and update fncache
+	}
+
+	public void fileEnd(String name) throws HgRuntimeException {
+		revlog = null;
+		revlogRevs = null;
+	}
+
+	public boolean element(GroupElement ge) throws HgRuntimeException {
+		assert clogRevs != null;
+		assert revlogRevs != null;
+		try {
+			Pair<Integer, Nodeid> newRev = revlog.addPatchRevision(ge, clogRevs, revlogRevs);
+			revlogRevs.update(newRev.first(), newRev.second());
+			return true;
+		} catch (HgIOException ex) {
+			throw new HgInvalidControlFileException(ex, true);
+		}
+	}
+
+	public RevisionSet addedChangesets() {
+		return new RevisionSet(added);
+	}
+
+	private static class RevMap implements RevlogStreamWriter.RevisionToIndexMap {
+		
+		private final RevlogStream revlog;
+		private HashMap<Nodeid, Integer> added = new HashMap<Nodeid, Integer>();
+
+		public RevMap(RevlogStream revlogStream) {
+			revlog = revlogStream;
+		}
+
+		public int revisionIndex(Nodeid revision) {
+			Integer a = added.get(revision);
+			if (a != null) {
+				return a;
+			}
+			int f = revlog.findRevisionIndex(revision);
+			return f == HgRepository.BAD_REVISION ? HgRepository.NO_REVISION : f;
+		}
+		
+		public void update(Integer revIndex, Nodeid rev) {
+			added.put(rev, revIndex);
+		}
+		
+		Set<Nodeid> added() {
+			return added.keySet();
+		}
+	}
+}
\ No newline at end of file
--- a/src/org/tmatesoft/hg/internal/ArrayHelper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ArrayHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,40 +16,106 @@
  */
 package org.tmatesoft.hg.internal;
 
+import java.util.Arrays;
+
 /**
  * Internal alternative to Arrays.sort to build reversed index along with sorting
+ * and to perform lookup (binary search) without sorted array, using reversed index.
  * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class ArrayHelper {
-	private int[] reverse;
+public final class ArrayHelper<T extends Comparable<T>> {
+	private int[] reverse; // aka sorted2natural
+	private final T[] data;
+	private T[] sorted;
+	
+	public ArrayHelper(T[] _data) {
+		assert _data != null;
+		data = _data;
+	}
 
-	@SuppressWarnings("unchecked")
-	public void sort(Comparable<?>[] a) {
-//		Object[] aux = (Object[]) a.clone();
-		reverse = new int[a.length];
-		sort1((Comparable<Object>[])a, 0, a.length);
+	/**
+	 * Sort data this helper wraps, possibly using supplied array (optional)
+	 * to keep sorted elements
+	 * @param sortDest array to keep sorted values at, or <code>null</code>
+	 * @param sortDestIsEmpty <code>false</code> when sortDest already contains copy of data to be sorted
+	 * @param keepSorted <code>true</code> to save sorted array for future use (e.g. in
+	 */
+	public void sort(T[] sortDest, boolean sortDestIsEmpty, boolean keepSorted) {
+		if (sortDest != null) {
+			assert sortDest.length >= data.length;
+			if (sortDestIsEmpty) {
+				System.arraycopy(data, 0, sortDest, 0, data.length);
+			}
+			sorted = sortDest;
+		} else {
+			sorted = data.clone();
+		}
+		reverse = new int[data.length];
 		for (int i = 0; i < reverse.length; i++) {
-			// element that was not moved don't have an index in reverse.
-			// perhaps, can do it inside sort alg?
-			// Alternatively, may start with filling reverse[] array with initial indexes and
-			// avoid != 0 comparisons in #swap altogether?
-			if (reverse[i] == 0) {
-				reverse[i] = i+1;
-			}
+			// initial reverse indexes, so that elements that do
+			// not move during sort got correct indexes
+			reverse[i] = i;
 		}
+		sort1(0, data.length);
+		if (!keepSorted) {
+			sorted = null;
+		}
+	}
+
+	/**
+	 * @return all reverse indexes
+	 */
+	public int[] getReverseIndexes() {
+		return reverse;
+	}
+	
+	public int getReverseIndex(int sortedIndex) {
+		return reverse[sortedIndex];
+	}
+	
+	public T get(int index) {
+		return data[index];
+	}
+	
+	public T[] getData() {
+		return data;
+	}
+
+	/**
+	 * Look up sorted index of the value, using sort information 
+	 * @return same value as {@link Arrays#binarySearch(Object[], Object)} does
+	 */
+	public int binarySearchSorted(T value) {
+		if (sorted != null) {
+			return Arrays.binarySearch(sorted, 0, data.length, value);
+		}
+		return binarySearchWithReverse(0, data.length, value);
+	}
+
+	/**
+	 * Look up index of the value in the original array.
+	 * @return index in original data, or <code>defaultValue</code> if value not found
+	 */
+	public int binarySearch(T value, int defaultValue) {
+		int x = binarySearchSorted(value);
+		if (x < 0) {
+			return defaultValue;
+		}
+		return reverse[x];
 	}
 
 	/**
 	 * Slightly modified version of Arrays.sort1(int[], int, int) quicksort alg (just to deal with Object[])
 	 */
-    private void sort1(Comparable<Object> x[], int off, int len) {
+    private void sort1(int off, int len) {
+		Comparable<Object>[] x = comparableSorted();
     	// Insertion sort on smallest arrays
     	if (len < 7) {
     	    for (int i=off; i<len+off; i++)
     			for (int j=i; j>off && x[j-1].compareTo(x[j]) > 0; j--)
-    			    swap(x, j, j-1);
+    			    swap(j, j-1);
     	    return;
     	}
 
@@ -60,11 +126,11 @@
     	    int n = off + len - 1;
     	    if (len > 40) {        // Big arrays, pseudomedian of 9
     			int s = len/8;
-	    		l = med3(x, l,     l+s, l+2*s);
-	    		m = med3(x, m-s,   m,   m+s);
-	    		n = med3(x, n-2*s, n-s, n);
+	    		l = med3(l,     l+s, l+2*s);
+	    		m = med3(m-s,   m,   m+s);
+	    		n = med3(n-2*s, n-s, n);
     	    }
-    	    m = med3(x, l, m, n); // Mid-size, med of 3
+    	    m = med3(l, m, n); // Mid-size, med of 3
     	}
     	Comparable<Object> v = x[m];
 
@@ -73,67 +139,94 @@
     	while(true) {
     	    while (b <= c && x[b].compareTo(v) <= 0) {
     			if (x[b] == v)
-    			    swap(x, a++, b);
+    			    swap(a++, b);
     			b++;
     	    }
     	    while (c >= b && x[c].compareTo(v) >= 0) {
     			if (x[c] == v)
-    			    swap(x, c, d--);
+    			    swap(c, d--);
     			c--;
     	    }
     	    if (b > c)
     			break;
-    	    swap(x, b++, c--);
+    	    swap(b++, c--);
     	}
 
     	// Swap partition elements back to middle
     	int s, n = off + len;
-    	s = Math.min(a-off, b-a  );  vecswap(x, off, b-s, s);
-    	s = Math.min(d-c,   n-d-1);  vecswap(x, b,   n-s, s);
+    	s = Math.min(a-off, b-a  );  vecswap(off, b-s, s);
+    	s = Math.min(d-c,   n-d-1);  vecswap(b,   n-s, s);
 
     	// Recursively sort non-partition-elements
     	if ((s = b-a) > 1)
-    	    sort1(x, off, s);
+    	    sort1(off, s);
     	if ((s = d-c) > 1)
-    	    sort1(x, n-s, s);
+    	    sort1(n-s, s);
     }
 
     /**
      * Swaps x[a .. (a+n-1)] with x[b .. (b+n-1)].
      */
-    private void vecswap(Object[] x, int a, int b, int n) {
+    private void vecswap(int a, int b, int n) {
 		for (int i=0; i<n; i++, a++, b++) {
-		    swap(x, a, b);
+		    swap(a, b);
 		}
     }
 
     /**
      * Returns the index of the median of the three indexed integers.
      */
-    private static int med3(Comparable<Object>[] x, int a, int b, int c) {
-	return (x[a].compareTo(x[b]) < 0 ?
-		(x[b].compareTo(x[c]) < 0 ? b : x[a].compareTo(x[c]) < 0 ? c : a) :
-		(x[b].compareTo(x[c]) > 0 ? b : x[a].compareTo(x[c]) > 0 ? c : a));
+    private int med3(int a, int b, int c) {
+		Comparable<Object>[] x = comparableSorted();
+		return (x[a].compareTo(x[b]) < 0 ?
+			(x[b].compareTo(x[c]) < 0 ? b : x[a].compareTo(x[c]) < 0 ? c : a) :
+			(x[b].compareTo(x[c]) > 0 ? b : x[a].compareTo(x[c]) > 0 ? c : a));
+    }
+    
+    private Comparable<Object>[] comparableSorted() {
+    	// Comparable<Object>[] x = (Comparable<Object>[]) sorted
+		// eclipse compiler is ok with the line above, while javac doesn't understand it:
+		// inconvertible types found : T[] required: java.lang.Comparable<java.lang.Object>[]
+    	// so need to add another step
+    	Comparable<?>[] oo = sorted;
+		@SuppressWarnings("unchecked")
+		Comparable<Object>[] x = (Comparable<Object>[]) oo;
+		return x;
     }
 
-
-	/**
-	 * @return the reverse
-	 */
-	public int[] getReverse() {
-		return reverse;
-	}
-
-	/**
+    /**
 	 * Swaps x[a] with x[b].
 	 */
-	private void swap(Object[] x, int a, int b) {
+	private void swap(int a, int b) {
+		Object[] x = sorted;
 		Object t = x[a];
 		x[a] = x[b];
 		x[b] = t;
-		int z1 = reverse[a] != 0 ? reverse[a] : a+1;
-		int z2 = reverse[b] != 0 ? reverse[b] : b+1;
+		int z1 = reverse[a];
+		int z2 = reverse[b];
 		reverse[b] = z1;
 		reverse[a] = z2;
 	}
+
+	// copied from Arrays.binarySearch0, update to be instance method and to use reverse indexes
+	private int binarySearchWithReverse(int fromIndex, int toIndex, T key) {
+		int low = fromIndex;
+		int high = toIndex - 1;
+
+		while (low <= high) {
+			int mid = (low + high) >>> 1;
+			// data[reverse[x]] gives sorted value at index x
+			T midVal = data[reverse[mid]];
+			int cmp = midVal.compareTo(key);
+
+			if (cmp < 0)
+				low = mid + 1;
+			else if (cmp > 0)
+				high = mid - 1;
+			else
+				return mid; // key found
+		}
+		return -(low + 1);  // key not found.
+	}
+
 }
--- a/src/org/tmatesoft/hg/internal/BlameHelper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/BlameHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,6 +16,7 @@
  */
 package org.tmatesoft.hg.internal;
 
+import static org.tmatesoft.hg.core.HgIterateDirection.OldToNew;
 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
 
 import java.util.LinkedList;
@@ -24,44 +25,62 @@
 import org.tmatesoft.hg.core.HgCallbackTargetException;
 import org.tmatesoft.hg.internal.DiffHelper.LineSequence;
 import org.tmatesoft.hg.internal.DiffHelper.LineSequence.ByteChain;
-import org.tmatesoft.hg.repo.HgBlameFacility.Block;
-import org.tmatesoft.hg.repo.HgBlameFacility.BlockData;
-import org.tmatesoft.hg.repo.HgBlameFacility.ChangeBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.EqualBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.Inspector;
-import org.tmatesoft.hg.repo.HgBlameFacility.RevisionDescriptor;
-import org.tmatesoft.hg.repo.HgBlameFacility.RevisionDescriptor.Recipient;
-import org.tmatesoft.hg.repo.HgBlameFacility;
+import org.tmatesoft.hg.core.HgBlameInspector;
+import org.tmatesoft.hg.core.HgBlameInspector.*;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Adaptable;
 import org.tmatesoft.hg.util.CancelledException;
 import org.tmatesoft.hg.util.Pair;
 
 /**
  * Blame implementation
- * @see HgBlameFacility
+ * @see HgBlameInspector
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
 public class BlameHelper {
 	
-	private final Inspector insp;
+	private final HgBlameInspector insp;
 	private FileLinesCache linesCache;
 
-	// FIXME exposing internals (use of FileLinesCache through cons arg and #useFileUpTo) smells bad, refactor!
-
-	public BlameHelper(Inspector inspector, int cacheHint) {
+	public BlameHelper(HgBlameInspector inspector) {
 		insp = inspector;
-		linesCache = new FileLinesCache(cacheHint);
 	}
-	
-	public void useFileUpTo(HgDataFile df, int clogRevIndex) {
-		linesCache.useFileUpTo(df, clogRevIndex);
+
+	/**
+	 * Build history of the file for the specified range (follow renames if necessary). This history
+	 * is used to access various file revision data during subsequent {@link #diff(int, int, int, int)} and
+	 * {@link #annotateChange(int, int, int[], int[])} calls. Callers can use returned history for own approaches 
+	 * to iteration over file history.
+
+	 * <p>NOTE, clogRevIndexEnd has to list name of the supplied file in the corresponding manifest,
+	 * as it's not possible to trace rename history otherwise.
+	 */
+	public FileHistory prepare(HgDataFile df, int clogRevIndexStart, int clogRevIndexEnd) throws HgRuntimeException {
+		assert clogRevIndexStart <= clogRevIndexEnd;
+		FileHistory fileHistory = new FileHistory(df, clogRevIndexStart, clogRevIndexEnd);
+		fileHistory.build();
+		int cacheHint = 5; // cache comes useful when we follow merge branches and don't want to
+		// parse base revision twice. There's no easy way to determine max(distance(all(base,merge))),
+		// hence the heuristics to use the longest history chunk:
+		for (FileRevisionHistoryChunk c : fileHistory.iterate(OldToNew)) {
+			// iteration order is not important here
+			if (c.revisionCount() > cacheHint) {
+				cacheHint = c.revisionCount();
+			}
+		}
+		linesCache = new FileLinesCache(cacheHint);
+		for (FileRevisionHistoryChunk fhc : fileHistory.iterate(OldToNew)) {
+			// iteration order is not important here
+			linesCache.useFileUpTo(fhc.getFile(), fhc.getEndChangeset());
+		}
+		return fileHistory;
 	}
 	
 	// NO_REVISION is not allowed as any argument
-	public void diff(int fileRevIndex1, int clogRevIndex1, int fileRevIndex2, int clogRevIndex2) throws HgCallbackTargetException {
+	public void diff(int fileRevIndex1, int clogRevIndex1, int fileRevIndex2, int clogRevIndex2) throws HgCallbackTargetException, HgRuntimeException {
 		HgDataFile targetFile = linesCache.getFile(clogRevIndex2);
 		LineSequence c1 = linesCache.lines(clogRevIndex1, fileRevIndex1);
 		LineSequence c2 = linesCache.lines(clogRevIndex2, fileRevIndex2);
@@ -72,7 +91,7 @@
 		bbi.checkErrors();
 	}
 
-	public void annotateChange(int fileRevIndex, int csetRevIndex, int[] fileParentRevs, int[] fileParentClogRevs) throws HgCallbackTargetException {
+	public void annotateChange(int fileRevIndex, int csetRevIndex, int[] fileParentRevs, int[] fileParentClogRevs) throws HgCallbackTargetException, HgRuntimeException {
 		HgDataFile targetFile = linesCache.getFile(csetRevIndex);
 		final LineSequence fileRevLines = linesCache.lines(csetRevIndex, fileRevIndex);
 		if (fileParentClogRevs[0] != NO_REVISION && fileParentClogRevs[1] != NO_REVISION) {
@@ -117,6 +136,9 @@
 		private final int limit;
 		private final LinkedList<Pair<Integer, HgDataFile>> files; // TODO in fact, need sparse array 
 
+		/**
+		 * @param lruLimit how many parsed file revisions to keep
+		 */
 		public FileLinesCache(int lruLimit) {
 			limit = lruLimit;
 			lruCache = new LinkedList<Pair<Integer, LineSequence>>();
@@ -150,7 +172,7 @@
 			throw new HgInvalidStateException(String.format("Got %d file-changelog mappings, but no luck for revision %d.", files.size(), clogRevIndex));
 		}
 
-		public LineSequence lines(int clogRevIndex, int fileRevIndex) {
+		public LineSequence lines(int clogRevIndex, int fileRevIndex) throws HgRuntimeException {
 			Pair<Integer, LineSequence> cached = checkCache(clogRevIndex);
 			if (cached != null) {
 				return cached.second();
@@ -192,7 +214,7 @@
 	}
 
 	private static class BlameBlockInspector extends DiffHelper.DeltaInspector<LineSequence> {
-		private final Inspector insp;
+		private final HgBlameInspector insp;
 		private final int csetOrigin;
 		private final int csetTarget;
 		private EqualBlocksCollector p2MergeCommon;
@@ -201,7 +223,7 @@
 		private final AnnotateRev annotatedRevision;
 		private HgCallbackTargetException error;
 
-		public BlameBlockInspector(HgDataFile df, int fileRevIndex, Inspector inspector, int originCset, int targetCset) {
+		public BlameBlockInspector(HgDataFile df, int fileRevIndex, HgBlameInspector inspector, int originCset, int targetCset) {
 			assert inspector != null;
 			insp = inspector;
 			annotatedRevision = new AnnotateRev();
@@ -226,7 +248,7 @@
 			ContentBlock targetContent = new ContentBlock(s2);
 			annotatedRevision.set(originContent, targetContent);
 			annotatedRevision.set(csetOrigin, csetTarget, p2MergeCommon != null ? csetMergeParent : NO_REVISION);
-			Recipient curious = Adaptable.Factory.getAdapter(insp, Recipient.class, null);
+			RevisionDescriptor.Recipient curious = Adaptable.Factory.getAdapter(insp, RevisionDescriptor.Recipient.class, null);
 			if (curious != null) {
 				try {
 					curious.start(annotatedRevision);
@@ -242,7 +264,7 @@
 			if (shallStop()) {
 				return;
 			}
-			Recipient curious = Adaptable.Factory.getAdapter(insp, Recipient.class, null);
+			RevisionDescriptor.Recipient curious = Adaptable.Factory.getAdapter(insp, RevisionDescriptor.Recipient.class, null);
 			if (curious != null) {
 				try {
 					curious.done(annotatedRevision);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/BundleGenerator.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,245 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
+import static org.tmatesoft.hg.repo.HgRepository.TIP;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.DataSerializer.OutputStreamSerializer;
+import org.tmatesoft.hg.internal.Patch.PatchDataSource;
+import org.tmatesoft.hg.repo.HgBundle;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgManifest;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+
+/**
+ * @see http://mercurial.selenic.com/wiki/BundleFormat
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class BundleGenerator {
+
+	private final Internals repo;
+
+	public BundleGenerator(Internals hgRepo) {
+		repo = hgRepo;
+	}
+	
+	public File create(List<Nodeid> changesets) throws HgIOException, IOException {
+		final HgChangelog clog = repo.getRepo().getChangelog();
+		final HgManifest manifest = repo.getRepo().getManifest();
+		IntVector clogRevsVector = new IntVector(changesets.size(), 0);
+		for (Nodeid n : changesets) {
+			clogRevsVector.add(clog.getRevisionIndex(n));
+		}
+		clogRevsVector.sort(true);
+		final int[] clogRevs = clogRevsVector.toArray();
+		final IntMap<Nodeid> clogMap = new IntMap<Nodeid>(changesets.size());
+		final IntVector manifestRevs = new IntVector(changesets.size(), 0);
+		final List<HgDataFile> files = new ArrayList<HgDataFile>();
+		clog.range(new HgChangelog.Inspector() {
+			private Set<String> seenFiles = new HashSet<String>();
+			public void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException {
+				clogMap.put(revisionIndex, nodeid);
+				manifestRevs.add(manifest.getRevisionIndex(cset.manifest()));
+				for (String f : cset.files()) {
+					if (seenFiles.contains(f)) {
+						continue;
+					}
+					seenFiles.add(f);
+					HgDataFile df = repo.getRepo().getFileNode(f);
+					files.add(df);
+				}
+			}
+		}, clogRevs);
+		manifestRevs.sort(true);
+		//
+		final File bundleFile = File.createTempFile("hg4j-", ".bundle");
+		final FileOutputStream osBundle = new FileOutputStream(bundleFile);
+		final OutputStreamSerializer outRaw = new OutputStreamSerializer(osBundle);
+		outRaw.write("HG10UN".getBytes(), 0, 6);
+		//
+		RevlogStream clogStream = repo.getImplAccess().getChangelogStream();
+		new ChunkGenerator(outRaw, clogMap).iterate(clogStream, clogRevs);
+		outRaw.writeInt(0); // null chunk for changelog group
+		//
+		RevlogStream manifestStream = repo.getImplAccess().getManifestStream();
+		new ChunkGenerator(outRaw, clogMap).iterate(manifestStream, manifestRevs.toArray(true));
+		outRaw.writeInt(0); // null chunk for manifest group
+		//
+		for (HgDataFile df : sortedByName(files)) {
+			RevlogStream s = repo.getImplAccess().getStream(df);
+			final IntVector fileRevs = new IntVector();
+			s.iterate(0, TIP, false, new RevlogStream.Inspector() {
+				
+				public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException {
+					if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) {
+						fileRevs.add(revisionIndex);
+					}
+				}
+			});
+			fileRevs.sort(true);
+			if (!fileRevs.isEmpty()) {
+				// although BundleFormat page says "filename length, filename" for a file,
+				// in fact there's a sort of 'filename chunk', i.e. filename length field includes
+				// not only length of filename, but also length of the field itseld, i.e. filename.length+sizeof(int)
+				byte[] fnameBytes = df.getPath().toString().getBytes(); // FIXME check encoding in native hg (and fix accordingly in HgBundle)
+				outRaw.writeInt(fnameBytes.length + 4);
+				outRaw.writeByte(fnameBytes);
+				new ChunkGenerator(outRaw, clogMap).iterate(s, fileRevs.toArray(true));
+				outRaw.writeInt(0); // null chunk for file group
+			}
+		}
+		outRaw.writeInt(0); // null chunk to indicate no more files (although BundleFormat page doesn't mention this)
+		outRaw.done();
+		osBundle.flush();
+		osBundle.close();
+		//return new HgBundle(repo.getSessionContext(), repo.getDataAccess(), bundleFile);
+		return bundleFile;
+	}
+	
+	private static Collection<HgDataFile> sortedByName(List<HgDataFile> files) {
+		Collections.sort(files, new Comparator<HgDataFile>() {
+
+			public int compare(HgDataFile o1, HgDataFile o2) {
+				return o1.getPath().compareTo(o2.getPath());
+			}
+		});
+		return files;
+	}
+	
+	
+	public static void main(String[] args) throws Exception {
+		final HgLookup hgLookup = new HgLookup();
+		HgRepository hgRepo = hgLookup.detectFromWorkingDir();
+		BundleGenerator bg = new BundleGenerator(HgInternals.getImplementationRepo(hgRepo));
+		ArrayList<Nodeid> l = new ArrayList<Nodeid>();
+		l.add(Nodeid.fromAscii("9ef1fab9f5e3d51d70941121dc27410e28069c2d")); // 640
+		l.add(Nodeid.fromAscii("2f33f102a8fa59274a27ebbe1c2903cecac6c5d5")); // 639
+		l.add(Nodeid.fromAscii("d074971287478f69ab0a64176ce2284d8c1e91c3")); // 638
+		File bundleFile = bg.create(l);
+		HgBundle b = hgLookup.loadBundle(bundleFile);
+//		Bundle.dump(b); // FIXME dependency from dependant code
+	}
+
+	private static class ChunkGenerator implements RevlogStream.Inspector {
+		
+		private final DataSerializer ds;
+		private final IntMap<Nodeid> parentMap;
+		private final IntMap<Nodeid> clogMap;
+		private byte[] prevContent;
+		private int startParent;
+
+		public ChunkGenerator(DataSerializer dataSerializer, IntMap<Nodeid> clogNodeidMap) {
+			ds = dataSerializer;
+			parentMap = new IntMap<Nodeid>(clogNodeidMap.size());
+			clogMap = clogNodeidMap;
+		}
+		
+		public void iterate(RevlogStream s, int[] revisions) throws HgRuntimeException {
+			int[] p = s.parents(revisions[0], new int[2]);
+			startParent = p[0];
+			int[] revs2read;
+			if (startParent == NO_REVISION) {
+				revs2read = revisions;
+				prevContent = new byte[0];
+			} else {
+				revs2read = new int[revisions.length + 1];
+				revs2read[0] = startParent;
+				System.arraycopy(revisions, 0, revs2read, 1, revisions.length);
+			}
+			// FIXME this is a hack to fill parentsMap with 
+			// parents of elements that we are not going to meet with regular
+			// iteration, e.g. changes from a different branch (with some older parent),
+			// scenario: two revisions added to two different branches
+			// revisions[10, 11], parents(10) == 9, parents(11) == 7
+			// revs2read == [9,10,11], and parentsMap lacks entry for parent rev7.
+			fillMissingParentsMap(s, revisions);
+			s.iterate(revs2read, true, this);
+		}
+		
+		private void fillMissingParentsMap(RevlogStream s, int[] revisions) throws HgRuntimeException {
+			int[] p = new int[2];
+			for (int i = 1; i < revisions.length; i++) {
+				s.parents(revisions[i], p);
+				if (p[0] != NO_REVISION && Arrays.binarySearch(revisions, p[0]) < 0) {
+					parentMap.put(p[0], Nodeid.fromBinary(s.nodeid(p[0]), 0));
+				}
+				if (p[1] != NO_REVISION && Arrays.binarySearch(revisions, p[1]) < 0) {
+					parentMap.put(p[1], Nodeid.fromBinary(s.nodeid(p[1]), 0));
+				}
+			}
+		}
+		
+		public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException {
+			try {
+				parentMap.put(revisionIndex, Nodeid.fromBinary(nodeid, 0));
+				byte[] nextContent = data.byteArray();
+				data.done();
+				if (revisionIndex == startParent) {
+					prevContent = nextContent;
+					return;
+				}
+				Patch p = GeneratePatchInspector.delta(prevContent, nextContent);
+				prevContent = nextContent;
+				nextContent = null;
+				PatchDataSource pds = p.new PatchDataSource();
+				int len = pds.serializeLength() + 84;
+				ds.writeInt(len);
+				ds.write(nodeid, 0, Nodeid.SIZE);
+				// TODO assert parents match those in previous group elements
+				if (parent1Revision != NO_REVISION) {
+					ds.writeByte(parentMap.get(parent1Revision).toByteArray());
+				} else {
+					ds.writeByte(Nodeid.NULL.toByteArray());
+				}
+				if (parent2Revision != NO_REVISION) {
+					ds.writeByte(parentMap.get(parent2Revision).toByteArray());
+				} else {
+					ds.writeByte(Nodeid.NULL.toByteArray());
+				}
+				ds.writeByte(clogMap.get(linkRevision).toByteArray());
+				pds.serialize(ds);
+			} catch (IOException ex) {
+				// XXX odd to have object with IOException to use where no checked exception is allowed 
+				throw new HgInvalidControlFileException(ex.getMessage(), ex, null); 
+			} catch (HgIOException ex) {
+				throw new HgInvalidControlFileException(ex, true); // XXX any way to refactor ChunkGenerator not to get checked exception here?
+			}
+		}
+	}
+}
--- a/src/org/tmatesoft/hg/internal/ByteArrayChannel.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ByteArrayChannel.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -23,7 +23,8 @@
 import org.tmatesoft.hg.util.ByteChannel;
 
 /**
- *
+ * {@link ByteChannel} implementation that serializes data into a byte array
+ * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
@@ -48,7 +49,10 @@
 		}
 	}
 
-	// TODO document what happens on write after toArray() in each case
+	/*
+	 * {@link #toArray()} calls do not clear data collected so far, subsequent {@link #write(ByteBuffer)}  
+	 * augment collected content.
+	 */
 	public int write(ByteBuffer buffer) {
 		int rv = buffer.remaining();
 		if (buffers == null) {
@@ -58,9 +62,13 @@
 			copy.put(buffer);
 			buffers.add(copy);
 		}
+		result = null;
 		return rv;
 	}
 
+	/**
+	 * @return content accumulated so far
+	 */
 	public byte[] toArray() {
 		if (result != null) {
 			return result;
@@ -84,7 +92,6 @@
 				bb.get(result, off, bb.limit());
 				off += bb.limit();
 			}
-			buffers.clear();
 			return result;
 		}
 	}
--- a/src/org/tmatesoft/hg/internal/ByteArrayDataAccess.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ByteArrayDataAccess.java	Wed Jul 10 11:48:55 2013 +0200
@@ -54,9 +54,32 @@
 		if (len > (this.length - pos)) {
 			throw new IOException();
 		}
-		System.arraycopy(data, pos, buf, off, len);
+		System.arraycopy(data, offset+pos, buf, off, len);
 		pos += len;
 	}
+	@Override
+	public int readInt() throws IOException {
+		// overridden not to create an intermediate array
+		if (length - pos < 4) {
+			throw new IOException();
+		}
+		int x = offset + pos;
+		int rv = data[x++] << 24 | (data[x++] & 0xFF) << 16 | (data[x++] & 0xFF) << 8 | (data[x] & 0xFF);
+		pos += 4;
+		return rv;
+	}
+	@Override
+	public long readLong() throws IOException {
+		// overridden not to create an intermediate array
+		if (length - pos < 8) {
+			throw new IOException();
+		}
+		int x = offset + pos;
+		int i1 = data[x++] << 24 | (data[x++] & 0xFF) << 16 | (data[x++] & 0xFF) << 8 | (data[x++] & 0xFF);
+		int i2 = data[x++] << 24 | (data[x++] & 0xFF) << 16 | (data[x++] & 0xFF) << 8 | (data[x] & 0xFF);
+		pos += 8;
+		return ((long) i1) << 32 | ((long) i2 & 0x0FFFFFFFFl);
+	}
 
 	@Override
 	public ByteArrayDataAccess reset() {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/COWTransaction.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,198 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.SessionContext;
+
+/**
+ * This transaction strategy makes a copy of original file and breaks origin hard links, if any.
+ * Changes are directed to actual repository files.
+ * 
+ * On commit, remove all backup copies
+ * On rollback, move all backup files in place of original
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class COWTransaction extends Transaction {
+	
+	private final FileUtils fileHelper;
+	private final List<RollbackEntry> entries = new LinkedList<RollbackEntry>();
+	
+	public COWTransaction(SessionContext.Source ctxSource) {
+		fileHelper = new FileUtils(ctxSource.getSessionContext().getLog(), this);
+	}
+
+	@Override
+	public File prepare(File f) throws HgIOException {
+		if (known(f)) {
+			return f;
+		}
+		if (!f.exists()) {
+			return recordNonExistent(f);
+		}
+		final File parentDir = f.getParentFile();
+		assert parentDir.canWrite();
+		File copy = new File(parentDir, f.getName() + ".hg4j.copy");
+		fileHelper.copy(f, copy);
+		final long lm = f.lastModified();
+		copy.setLastModified(lm);
+		File backup = new File(parentDir, f.getName() + ".hg4j.orig");
+		if (backup.exists()) {
+			backup.delete();
+		}
+		if (!f.renameTo(backup)) {
+			throw new HgIOException(String.format("Failed to backup %s to %s", f.getName(), backup.getName()), backup);
+		}
+		if (!copy.renameTo(f)) {
+			throw new HgIOException(String.format("Failed to bring on-write copy in place (%s to %s)", copy.getName(), f.getName()), copy);
+		}
+		f.setLastModified(lm);
+		record(f, backup);
+		return f;
+	}
+
+	@Override
+	public File prepare(File origin, File backup) throws HgIOException {
+		if (known(origin)) {
+			return origin;
+		}
+		if (!origin.exists()) {
+			return recordNonExistent(origin);
+		}
+		fileHelper.copy(origin, backup);
+		final RollbackEntry e = record(origin, backup);
+		e.keepBackup = true;
+		return origin;
+	}
+
+	@Override
+	public void done(File f) throws HgIOException {
+		find(f).success = true;
+	}
+
+	@Override
+	public void failure(File f, IOException ex) {
+		find(f).failure = ex;
+	}
+
+	// XXX custom exception for commit and rollback to hold information about files rolled back
+	
+	@Override
+	public void commit() throws HgIOException {
+		for (Iterator<RollbackEntry> it = entries.iterator(); it.hasNext();) {
+			RollbackEntry e = it.next();
+			assert e.success;
+			if (e.failure != null) {
+				throw new HgIOException("Can't close transaction with a failure.", e.failure, e.origin);
+			}
+			if (!e.keepBackup && e.backup != null) {
+				e.backup.delete();
+			}
+			it.remove();
+		}
+	}
+
+	@Override
+	public void rollback() throws HgIOException {
+		LinkedList<RollbackEntry> success = new LinkedList<RollbackEntry>();
+		for (Iterator<RollbackEntry> it = entries.iterator(); it.hasNext();) {
+			RollbackEntry e = it.next();
+			e.origin.delete();
+			if (e.backup != null) {
+				if (!e.backup.renameTo(e.origin)) {
+					String msg = String.format("Transaction rollback failed, could not rename backup %s back to %s", e.backup.getName(), e.origin.getName());
+					throw new HgIOException(msg, e.origin);
+				}
+				// renameTo() doesn't update timestamp, while the rest of the code relies
+				// on file timestamp to detect revlog changes. Rollback *is* a change,
+				// even if it brings the old state.
+				e.origin.setLastModified(System.currentTimeMillis());
+			}
+			success.add(e);
+			it.remove();
+		}
+	}
+
+	private File recordNonExistent(File f) throws HgIOException {
+		record(f, null);
+		try {
+			f.getParentFile().mkdirs();
+			f.createNewFile();
+			return f;
+		} catch (IOException ex) {
+			throw new HgIOException("Failed to create new file", ex, f);
+		}
+	}
+	
+	private RollbackEntry record(File origin, File backup) {
+		final RollbackEntry e = new RollbackEntry(origin, backup);
+		entries.add(e);
+		return e;
+	}
+
+	private boolean known(File f) {
+		RollbackEntry e = lookup(f);
+		return e != null;
+	}
+
+	private RollbackEntry find(File f) {
+		RollbackEntry e = lookup(f);
+		if (e != null) {
+			return e;
+		}
+		assert false;
+		return new RollbackEntry(f,f);
+	}
+	
+	private RollbackEntry lookup(File f) {
+		for (RollbackEntry e : entries) {
+			if (e.origin.equals(f)) {
+				return e;
+			}
+		}
+		return null;
+	}
+	
+	private static class RollbackEntry {
+		public final File origin;
+		public final File backup; // may be null to indicate file didn't exist
+		public boolean success = false;
+		public IOException failure = null;
+		public boolean keepBackup = false;
+		
+		public RollbackEntry(File o, File b) {
+			origin = o;
+			backup = b;
+		}
+	}
+	
+	public static class Factory implements Transaction.Factory {
+
+		public Transaction create(SessionContext.Source ctxSource) {
+			return new COWTransaction(ctxSource);
+		}
+		
+	}
+}
--- a/src/org/tmatesoft/hg/internal/ChangelogEntryBuilder.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ChangelogEntryBuilder.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -16,6 +16,8 @@
  */
 package org.tmatesoft.hg.internal;
 
+import java.io.ByteArrayOutputStream;
+import java.io.UnsupportedEncodingException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -23,24 +25,29 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.TimeZone;
 import java.util.Map.Entry;
+import java.util.TimeZone;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
+import org.tmatesoft.hg.repo.HgInvalidStateException;
 import org.tmatesoft.hg.util.Path;
 
 /**
- *
+ * Builds changelog entry
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class ChangelogEntryBuilder {
+public class ChangelogEntryBuilder implements DataSource {
 
 	private String user;
 	private List<Path> modifiedFiles;
 	private final Map<String, String> extrasMap = new LinkedHashMap<String, String>();
 	private Integer tzOffset;
 	private Long csetTime;
+	private Nodeid manifestRev;
+	private CharSequence comment;
 	
 	public ChangelogEntryBuilder user(String username) {
 		user = username;
@@ -89,6 +96,93 @@
 		return this;
 	}
 	
+	public ChangelogEntryBuilder manifest(Nodeid manifestRevision) {
+		manifestRev = manifestRevision;
+		return this;
+	}
+	
+	public ChangelogEntryBuilder comment(CharSequence commentString) {
+		comment = commentString;
+		return this;
+	}
+
+	public void serialize(DataSerializer out) throws HgIOException {
+		byte[] b = build();
+		out.write(b, 0, b.length);
+	}
+
+	public int serializeLength() {
+		return -1;
+	}
+
+	public byte[] build() {
+		try {
+			ByteArrayOutputStream out = new ByteArrayOutputStream();
+			final int LF = '\n';
+			CharSequence extras = buildExtras();
+			CharSequence files = buildFiles();
+			byte[] manifestRevision = manifestRev.toString().getBytes();
+			byte[] username = user().getBytes(EncodingHelper.getUTF8().name()); // XXX Java 1.5
+			out.write(manifestRevision, 0, manifestRevision.length);
+			out.write(LF);
+			out.write(username, 0, username.length);
+			out.write(LF);
+			final long csetDate = csetTime();
+			byte[] date = String.format("%d %d", csetDate, csetTimezone(csetDate)).getBytes();
+			out.write(date, 0, date.length);
+			if (extras.length() > 0) {
+				out.write(' ');
+				byte[] b = extras.toString().getBytes();
+				out.write(b, 0, b.length);
+			}
+			out.write(LF);
+			byte[] b = files.toString().getBytes();
+			out.write(b, 0, b.length);
+			out.write(LF);
+			out.write(LF);
+			byte[] cmt = comment.toString().getBytes(EncodingHelper.getUTF8().name()); // XXX Java 1.5
+			out.write(cmt, 0, cmt.length);
+			return out.toByteArray();
+		} catch (UnsupportedEncodingException ex) {
+			throw new HgInvalidStateException(ex.getMessage()); // Can't happen, UTF8 is always there
+		}
+	}
+
+	private CharSequence buildExtras() {
+		StringBuilder extras = new StringBuilder();
+		for (Iterator<Entry<String, String>> it = extrasMap.entrySet().iterator(); it.hasNext();) {
+			final Entry<String, String> next = it.next();
+			extras.append(encodeExtrasPair(next.getKey()));
+			extras.append(':');
+			extras.append(encodeExtrasPair(next.getValue()));
+			if (it.hasNext()) {
+				extras.append('\00');
+			}
+		}
+		return extras;
+	}
+
+	private CharSequence buildFiles() {
+		StringBuilder files = new StringBuilder();
+		if (modifiedFiles != null) {
+			Collections.sort(modifiedFiles);
+			for (Iterator<Path> it = modifiedFiles.iterator(); it.hasNext(); ) {
+				files.append(it.next());
+				if (it.hasNext()) {
+					files.append('\n');
+				}
+			}
+		}
+		return files;
+	}
+
+	private final static CharSequence encodeExtrasPair(String s) {
+		if (s != null) {
+			return s.replace("\\", "\\\\").replace("\n", "\\n").replace("\r", "\\r").replace("\00", "\\0");
+		}
+		return s;
+	}
+
 	private long csetTime() {
 		if (csetTime != null) { 
 			return csetTime;
@@ -102,37 +196,4 @@
 		}
 		return -(TimeZone.getDefault().getOffset(time) / 1000);
 	}
-
-	public byte[] build(Nodeid manifestRevision, String comment) {
-		String f = "%s\n%s\n%d %d %s\n%s\n\n%s";
-		StringBuilder extras = new StringBuilder();
-		for (Iterator<Entry<String, String>> it = extrasMap.entrySet().iterator(); it.hasNext();) {
-			final Entry<String, String> next = it.next();
-			extras.append(encodeExtrasPair(next.getKey()));
-			extras.append(':');
-			extras.append(encodeExtrasPair(next.getValue()));
-			if (it.hasNext()) {
-				extras.append('\00');
-			}
-		}
-		StringBuilder files = new StringBuilder();
-		if (modifiedFiles != null) {
-			for (Iterator<Path> it = modifiedFiles.iterator(); it.hasNext(); ) {
-				files.append(it.next());
-				if (it.hasNext()) {
-					files.append('\n');
-				}
-			}
-		}
-		final long date = csetTime();
-		final int tz = csetTimezone(date);
-		return String.format(f, manifestRevision.toString(), user(), date, tz, extras, files, comment).getBytes();
-	}
-
-	private final static CharSequence encodeExtrasPair(String s) {
-		if (s != null) {
-			return s.replace("\\", "\\\\").replace("\n", "\\n").replace("\r", "\\r").replace("\00", "\\0");
-		}
-		return s;
-	}
 }
--- a/src/org/tmatesoft/hg/internal/ChangelogHelper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ChangelogHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -19,8 +19,8 @@
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgInternals;
-import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Path;
 
 /**
@@ -56,7 +56,7 @@
 	 * @param file
 	 * @return changeset where specified file is mentioned among affected files, or <code>null</code> if none found up to leftBoundary
 	 */
-	public RawChangeset findLatestChangeWith(Path file) throws HgInvalidControlFileException {
+	public RawChangeset findLatestChangeWith(Path file) throws HgRuntimeException {
 		HgDataFile df = repo.getFileNode(file);
 		if (!df.exists()) {
 			return null;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/ChangelogMonitor.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+
+/**
+ * Track changes to a repository based on recent changelog revision.
+ * TODO shall be merged with {@link RevlogChangeMonitor} and {@link FileChangeMonitor} into 
+ * a single facility available from {@link SessionContext}
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class ChangelogMonitor {
+	private final HgRepository repo;
+	private int changelogRevCount = -1;
+	private Nodeid changelogLastRev = null;
+	
+	public ChangelogMonitor(HgRepository hgRepo) {
+		repo = hgRepo;
+	}
+	
+	// memorize the state of the repository's changelog
+	public void touch() throws HgRuntimeException {
+		changelogRevCount = repo.getChangelog().getRevisionCount();
+		changelogLastRev = safeGetRevision(changelogRevCount-1);
+	}
+	
+	// if present state doesn't match the one we remember
+	public boolean isChanged() throws HgRuntimeException {
+		int rc = repo.getChangelog().getRevisionCount();
+		if (rc != changelogRevCount) {
+			return true;
+		}
+		Nodeid r = safeGetRevision(rc-1);
+		return !r.equals(changelogLastRev);
+	}
+	
+	// handles empty repository case
+	private Nodeid safeGetRevision(int revIndex) throws HgRuntimeException {
+		if (revIndex >= 0) {
+			return repo.getChangelog().getRevision(revIndex);
+		}
+		return Nodeid.NULL;
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/CommitFacility.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,281 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.repo.HgRepository.DEFAULT_BRANCH_NAME;
+import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.*;
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.Branch;
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.UndoBranch;
+import static org.tmatesoft.hg.util.LogFacility.Severity.Error;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import org.tmatesoft.hg.core.HgCommitCommand;
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.HgRepositoryLockException;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgPhase;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.Pair;
+import org.tmatesoft.hg.util.Path;
+
+/**
+ * Name: CommitObject, FutureCommit or PendingCommit
+ * The only public API now: {@link HgCommitCommand}.
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class CommitFacility {
+	private final Internals repo;
+	private final int p1Commit, p2Commit;
+	private Map<Path, Pair<HgDataFile, DataSource>> files = new LinkedHashMap<Path, Pair<HgDataFile, DataSource>>();
+	private Set<Path> removals = new TreeSet<Path>();
+	private String branch, user;
+
+	public CommitFacility(Internals hgRepo, int parentCommit) {
+		this(hgRepo, parentCommit, NO_REVISION);
+	}
+	
+	public CommitFacility(Internals hgRepo, int parent1Commit, int parent2Commit) {
+		repo = hgRepo;
+		p1Commit = parent1Commit;
+		p2Commit = parent2Commit;
+		if (parent1Commit != NO_REVISION && parent1Commit == parent2Commit) {
+			throw new IllegalArgumentException("Merging same revision is dubious");
+		}
+	}
+
+	public boolean isMerge() {
+		return p1Commit != NO_REVISION && p2Commit != NO_REVISION;
+	}
+
+	public void add(HgDataFile dataFile, DataSource content) {
+		if (content == null) {
+			throw new IllegalArgumentException();
+		}
+		removals.remove(dataFile.getPath());
+		files.put(dataFile.getPath(), new Pair<HgDataFile, DataSource>(dataFile, content));
+	}
+
+	public void forget(HgDataFile dataFile) {
+		files.remove(dataFile.getPath());
+		removals.add(dataFile.getPath());
+	}
+	
+	public void branch(String branchName) {
+		branch = branchName;
+	}
+	
+	public void user(String userName) {
+		user = userName;
+	}
+	
+	// this method doesn't roll transaction back in case of failure, caller's responsibility
+	// this method expects repository to be locked, if needed
+	public Nodeid commit(String message, Transaction transaction) throws HgIOException, HgRepositoryLockException, HgRuntimeException {
+		final HgChangelog clog = repo.getRepo().getChangelog();
+		final int clogRevisionIndex = clog.getRevisionCount();
+		ManifestRevision c1Manifest = new ManifestRevision(null, null);
+		ManifestRevision c2Manifest = new ManifestRevision(null, null);
+		final Nodeid p1Cset = p1Commit == NO_REVISION ? null : clog.getRevision(p1Commit);
+		final Nodeid p2Cset = p2Commit == NO_REVISION ? null : clog.getRevision(p2Commit);
+		if (p1Commit != NO_REVISION) {
+			repo.getRepo().getManifest().walk(p1Commit, p1Commit, c1Manifest);
+		}
+		if (p2Commit != NO_REVISION) {
+			repo.getRepo().getManifest().walk(p2Commit, p2Commit, c2Manifest);
+		}
+//		Pair<Integer, Integer> manifestParents = getManifestParents();
+		Pair<Integer, Integer> manifestParents = new Pair<Integer, Integer>(c1Manifest.revisionIndex(), c2Manifest.revisionIndex());
+		TreeMap<Path, Nodeid> newManifestRevision = new TreeMap<Path, Nodeid>();
+		HashMap<Path, Pair<Integer, Integer>> fileParents = new HashMap<Path, Pair<Integer,Integer>>();
+		for (Path f : c1Manifest.files()) {
+			HgDataFile df = repo.getRepo().getFileNode(f);
+			Nodeid fileKnownRev1 = c1Manifest.nodeid(f), fileKnownRev2;
+			final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev1);
+			final int fileRevIndex2;
+			if ((fileKnownRev2 = c2Manifest.nodeid(f)) != null) {
+				// merged files
+				fileRevIndex2 = df.getRevisionIndex(fileKnownRev2);
+			} else {
+				fileRevIndex2 = NO_REVISION;
+			}
+				
+			fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex1, fileRevIndex2));
+			newManifestRevision.put(f, fileKnownRev1);
+		}
+		//
+		// Forget removed
+		for (Path p : removals) {
+			newManifestRevision.remove(p);
+		}
+		//
+		saveCommitMessage(message);
+		//
+		// Register new/changed
+		LinkedHashMap<Path, RevlogStream> newlyAddedFiles = new LinkedHashMap<Path, RevlogStream>();
+		ArrayList<Path> touchInDirstate = new ArrayList<Path>();
+		for (Pair<HgDataFile, DataSource> e : files.values()) {
+			HgDataFile df = e.first();
+			DataSource bds = e.second();
+			Pair<Integer, Integer> fp = fileParents.get(df.getPath());
+			if (fp == null) {
+				// NEW FILE
+				fp = new Pair<Integer, Integer>(NO_REVISION, NO_REVISION);
+			}
+			RevlogStream contentStream = repo.getImplAccess().getStream(df);
+			if (!df.exists()) {
+				newlyAddedFiles.put(df.getPath(), contentStream);
+			}
+			RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo, contentStream, transaction);
+			Nodeid fileRev = fileWriter.addRevision(bds, clogRevisionIndex, fp.first(), fp.second()).second();
+			newManifestRevision.put(df.getPath(), fileRev);
+			touchInDirstate.add(df.getPath());
+		}
+		//
+		// Manifest
+		final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder(repo.buildFileNameEncodingHelper());
+		for (Map.Entry<Path, Nodeid> me : newManifestRevision.entrySet()) {
+			manifestBuilder.add(me.getKey().toString(), me.getValue());
+		}
+		RevlogStreamWriter manifestWriter = new RevlogStreamWriter(repo, repo.getImplAccess().getManifestStream(), transaction);
+		Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder, clogRevisionIndex, manifestParents.first(), manifestParents.second()).second();
+		//
+		// Changelog
+		final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder();
+		changelogBuilder.setModified(files.keySet());
+		changelogBuilder.branch(branch == null ? DEFAULT_BRANCH_NAME : branch);
+		changelogBuilder.user(String.valueOf(user));
+		changelogBuilder.manifest(manifestRev).comment(message);
+		RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo, repo.getImplAccess().getChangelogStream(), transaction);
+		Nodeid changesetRev = changelogWriter.addRevision(changelogBuilder, clogRevisionIndex, p1Commit, p2Commit).second();
+		// TODO move fncache update to an external facility, along with dirstate and bookmark update
+		if (!newlyAddedFiles.isEmpty() && repo.fncacheInUse()) {
+			FNCacheFile fncache = new FNCacheFile(repo);
+			for (Path p : newlyAddedFiles.keySet()) {
+				fncache.addIndex(p);
+				if (!newlyAddedFiles.get(p).isInlineData()) {
+					fncache.addData(p);
+				}
+			}
+			try {
+				fncache.write();
+			} catch (IOException ex) {
+				// see comment above for fnchache.read()
+				repo.getLog().dump(getClass(), Error, ex, "Failed to write fncache, error ignored");
+			}
+		}
+		String oldBranchValue = DirstateReader.readBranch(repo);
+		String newBranchValue = branch == null ? DEFAULT_BRANCH_NAME : branch;
+		if (!oldBranchValue.equals(newBranchValue)) {
+			// prepare undo.branch as described in http://mercurial.selenic.com/wiki/FileFormats#undo..2A
+			File branchFile = transaction.prepare(repo.getRepositoryFile(Branch), repo.getRepositoryFile(UndoBranch));
+			FileOutputStream fos = null;
+			try {
+				fos = new FileOutputStream(branchFile);
+				fos.write(newBranchValue.getBytes(EncodingHelper.getUTF8().name())); // XXX Java 1.5
+				fos.flush();
+				fos.close();
+				fos = null;
+				transaction.done(branchFile);
+			} catch (IOException ex) {
+				transaction.failure(branchFile, ex);
+				repo.getLog().dump(getClass(), Error, ex, "Failed to write branch information, error ignored");
+			} finally {
+				try {
+					if (fos != null) {
+						fos.close();
+					}
+				} catch (IOException ex) {
+					repo.getLog().dump(getClass(), Error, ex, null);
+				}
+			}
+		}
+		// bring dirstate up to commit state, TODO share this code with HgAddRemoveCommand
+		final DirstateBuilder dirstateBuilder = new DirstateBuilder(repo);
+		dirstateBuilder.fillFrom(new DirstateReader(repo, new Path.SimpleSource()));
+		for (Path p : removals) {
+			dirstateBuilder.recordRemoved(p);
+		}
+		for (Path p : touchInDirstate) {
+			dirstateBuilder.recordUncertain(p);
+		}
+		dirstateBuilder.parents(changesetRev, Nodeid.NULL);
+		dirstateBuilder.serialize(transaction);
+		// update bookmarks
+		if (p1Commit != NO_REVISION || p2Commit != NO_REVISION) {
+			repo.getRepo().getBookmarks().updateActive(p1Cset, p2Cset, changesetRev);
+		}
+		PhasesHelper phaseHelper = new PhasesHelper(repo);
+		HgPhase newCommitPhase = HgPhase.parse(repo.getRepo().getConfiguration().getStringValue("phases", "new-commit", HgPhase.Draft.mercurialString()));
+		phaseHelper.newCommitNode(changesetRev, newCommitPhase);
+		// TODO Revisit: might be reasonable to send out a "Repo changed" notification, to clear
+		// e.g. cached branch, tags and so on, not to rely on file change detection methods?
+		// The same notification might come useful once Pull is implemented
+		return changesetRev;
+	}
+	
+	private void saveCommitMessage(String message) throws HgIOException {
+		File lastMessage = repo.getRepositoryFile(LastMessage);
+		// do not attempt to write if we are going to fail anyway
+		if ((lastMessage.isFile() && !lastMessage.canWrite()) || !lastMessage.getParentFile().canWrite()) {
+			return;
+		}
+		FileWriter w = null;
+		try {
+			w = new FileWriter(lastMessage);
+			w.write(message == null ? new String() : message);
+			w.flush();
+		} catch (IOException ex) {
+			throw new HgIOException("Failed to save last commit message", ex, lastMessage);
+		} finally {
+			new FileUtils(repo.getLog(), this).closeQuietly(w, lastMessage);
+		}
+	}
+/*
+	private Pair<Integer, Integer> getManifestParents() {
+		return new Pair<Integer, Integer>(extractManifestRevisionIndex(p1Commit), extractManifestRevisionIndex(p2Commit));
+	}
+
+	private int extractManifestRevisionIndex(int clogRevIndex) {
+		if (clogRevIndex == NO_REVISION) {
+			return NO_REVISION;
+		}
+		RawChangeset commitObject = repo.getChangelog().range(clogRevIndex, clogRevIndex).get(0);
+		Nodeid manifestRev = commitObject.manifest();
+		if (manifestRev.isNull()) {
+			return NO_REVISION;
+		}
+		return repo.getManifest().getRevisionIndex(manifestRev);
+	}
+*/
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/CompleteRepoLock.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.util.LogFacility.Severity.Error;
+
+import org.tmatesoft.hg.core.HgRepositoryLockException;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRepositoryLock;
+import org.tmatesoft.hg.util.LogFacility;
+
+/**
+ * Helper to lock both storage and working directory
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class CompleteRepoLock {
+
+	private final HgRepository repo;
+	private HgRepositoryLock wdLock, storeLock;
+
+	public CompleteRepoLock(HgRepository hgRepo) {
+		repo = hgRepo;
+	}
+
+	public void acquire() throws HgRepositoryLockException {
+		wdLock = repo.getWorkingDirLock();
+		storeLock = repo.getStoreLock();
+		wdLock.acquire();
+		try {
+			storeLock.acquire();
+		} catch (HgRepositoryLockException ex) {
+			try {
+				wdLock.release();
+			} catch (HgRepositoryLockException e2) {
+				final LogFacility log = repo.getSessionContext().getLog();
+				log.dump(getClass(), Error, e2, "Nested exception ignored once failed to acquire store lock");
+			}
+			throw ex;
+		}
+
+	}
+	
+	public void release() throws HgRepositoryLockException {
+		try {
+			storeLock.release();
+		} catch (HgRepositoryLockException ex) {
+			try {
+				wdLock.release();
+			} catch (HgRepositoryLockException e2) {
+				final LogFacility log = repo.getSessionContext().getLog();
+				log.dump(getClass(), Error, e2, "Nested exception ignored when releasing working directory lock");
+			}
+			throw ex;
+		}
+		wdLock.release();
+	}
+}
--- a/src/org/tmatesoft/hg/internal/ConfigFile.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ConfigFile.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -31,8 +31,8 @@
 import java.util.List;
 import java.util.Map;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.SessionContext;
-import org.tmatesoft.hg.repo.HgInvalidFileException;
 import org.tmatesoft.hg.util.LogFacility;
 
 /**
@@ -50,7 +50,7 @@
 		sessionContext = ctx;
 	}
 
-	public void addLocation(File path) throws HgInvalidFileException {
+	public void addLocation(File path) throws HgIOException {
 		read(path);
 	}
 	
@@ -125,7 +125,7 @@
 		}
 	}
 	
-	private void read(File f) throws HgInvalidFileException {
+	private void read(File f) throws HgIOException {
 		if (f == null || !f.canRead()) {
 			return;
 		}
@@ -183,7 +183,7 @@
 		private Map<String,String> section = new LinkedHashMap<String, String>();
 		private File contextFile;
 
-		// TODO "" and lists
+		// TODO [post-1.1] "" and lists
 		// XXX perhaps, single string to keep whole section with substrings for keys/values to minimize number of arrays (String.value)
 		public boolean consume(String line, ConfigFile cfg) throws IOException {
 			int x;
@@ -227,7 +227,7 @@
 			return true;
 		}
 		
-		public void go(File f, ConfigFile cfg) throws HgInvalidFileException {
+		public void go(File f, ConfigFile cfg) throws HgIOException {
 			contextFile = f;
 			LineReader lr = new LineReader(f, cfg.sessionContext.getLog());
 			lr.ignoreLineComments("#");
@@ -237,7 +237,7 @@
 		// include failure doesn't propagate
 		private void processInclude(String includeValue, ConfigFile cfg) {
 			File f; 
-			// TODO handle environment variable expansion
+			// TODO [post-1.1] handle environment variable expansion
 			if (includeValue.startsWith("~/")) {
 				f = new File(System.getProperty("user.home"), includeValue.substring(2));
 			} else {
@@ -250,7 +250,7 @@
 					LogFacility lf = cfg.sessionContext.getLog();
 					lf.dump(ConfigFile.class, LogFacility.Severity.Debug, "Can't read file to  include: %s", f);
 				}
-			} catch (HgInvalidFileException ex) {
+			} catch (HgIOException ex) {
 				LogFacility lf = cfg.sessionContext.getLog();
 				lf.dump(ConfigFile.class, LogFacility.Severity.Warn, "Can't include %s (%s)", f, includeValue);
 			}
--- a/src/org/tmatesoft/hg/internal/CsetParamKeeper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/CsetParamKeeper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -23,6 +23,7 @@
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgInvalidRevisionException;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 
 /**
  * Common code to keep changelog revision and to perform boundary check.
@@ -43,19 +44,25 @@
 			set(repo.getChangelog().getRevisionIndex(changeset));
 		} catch (HgInvalidRevisionException ex) {
 			throw new HgBadArgumentException("Can't find revision", ex).setRevision(changeset);
+		} catch (HgRuntimeException ex) {
+			throw new HgBadArgumentException(String.format("Can't initialize with revision %s", changeset.shortNotation()), ex);
 		}
 		return this;
 	}
 	
 	public CsetParamKeeper set(int changelogRevIndex) throws HgBadArgumentException {
-		int lastCsetIndex = repo.getChangelog().getLastRevision();
-		if (changelogRevIndex == HgRepository.TIP) {
-			changelogRevIndex = lastCsetIndex;
+		try {
+			int lastCsetIndex = repo.getChangelog().getLastRevision();
+			if (changelogRevIndex == HgRepository.TIP) {
+				changelogRevIndex = lastCsetIndex;
+			}
+			if (changelogRevIndex < 0 || changelogRevIndex > lastCsetIndex) {
+				throw new HgBadArgumentException(String.format("Bad revision index %d, value from [0..%d] expected", changelogRevIndex, lastCsetIndex), null).setRevisionIndex(changelogRevIndex);
+			}
+			doSet(changelogRevIndex);
+		} catch (HgRuntimeException ex) {
+			throw new HgBadArgumentException(String.format("Can't initialize with revision index %d", changelogRevIndex), ex);
 		}
-		if (changelogRevIndex < 0 || changelogRevIndex > lastCsetIndex) {
-			throw new HgBadArgumentException(String.format("Bad revision index %d, value from [0..%d] expected", changelogRevIndex, lastCsetIndex), null).setRevisionIndex(changelogRevIndex);
-		}
-		doSet(changelogRevIndex);
 		return this;
 	}
 	
@@ -74,7 +81,7 @@
 	 * @param defaultRevisionIndex value to return when no revision was set, may be {@link HgRepository#TIP} which gets translated to real index if used
 	 * @return changelog revision index if set, or defaultRevisionIndex value otherwise
 	 */
-	public int get(int defaultRevisionIndex) {
+	public int get(int defaultRevisionIndex) throws HgRuntimeException {
 		// XXX perhaps, shall translate other predefined constants (like WORKING COPY) here, too (e.g. for HgRevertCommand)
 		if (changelogRevisionIndex != BAD_REVISION || changelogRevisionIndex != TIP) {
 			return changelogRevisionIndex;
--- a/src/org/tmatesoft/hg/internal/DataAccess.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DataAccess.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2010-2012 TMate Software Ltd
+ * Copyright (c) 2010-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -99,9 +99,10 @@
 		}
 		throw new IOException(String.format("No data, can't read %d bytes", length));
 	}
-	// reads bytes into ByteBuffer, up to its limit or total data length, whichever smaller
-	// TODO post-1.0 perhaps, in DataAccess paradigm (when we read known number of bytes, we shall pass specific byte count to read)
-	// for 1.0, it's ok as it's our internal class
+	/**
+	 * reads bytes into ByteBuffer, up to its limit or total data length, whichever smaller.
+	 * XXX perhaps, in DataAccess paradigm (when we read known number of bytes, we shall pass specific byte count to read)
+	 */
 	public void readBytes(ByteBuffer buf) throws IOException {
 //		int toRead = Math.min(buf.remaining(), (int) length());
 //		if (buf.hasArray()) {
@@ -111,7 +112,7 @@
 //			readBytes(bb, 0, bb.length);
 //			buf.put(bb);
 //		}
-		// TODO post-1.0 optimize to read as much as possible at once
+		// TODO [post-1.1] optimize to read as much as possible at once
 		while (!isEmpty() && buf.hasRemaining()) {
 			buf.put(readByte());
 		}
@@ -120,8 +121,14 @@
 		throw new UnsupportedOperationException();
 	}
 
-	// XXX decide whether may or may not change position in the DataAccess
-	// TODO REVISIT exception handling may not be right, initially just for the sake of quick test
+	/**
+	 * Content of this DataAccess as byte array.
+	 * Note, likely changes position in the DataAccess.
+	 * Might provide direct access to underlying data structure in certain cases, do not alter.
+	 * 
+	 * @return byte array of {@link #length()} size, filled with data   
+	 * @throws IOException
+	 */
 	public byte[] byteArray() throws IOException {
 		reset();
 		byte[] rv = new byte[length()];
--- a/src/org/tmatesoft/hg/internal/DataAccessProvider.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DataAccessProvider.java	Wed Jul 10 11:48:55 2013 +0200
@@ -21,14 +21,13 @@
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.util.LogFacility;
 
@@ -54,8 +53,6 @@
 	private final int mapioMagicBoundary;
 	private final int bufferSize, mapioBufSize;
 	private final SessionContext context;
-	// not the right place for the property, but DAP is the only place currently available to RevlogStream to get the value
-	private final boolean shallMergePatches;
 	
 	public DataAccessProvider(SessionContext ctx) {
 		context = ctx;
@@ -63,7 +60,6 @@
 		mapioMagicBoundary = mapioBoundaryValue(pm.getInt(CFG_PROPERTY_MAPIO_LIMIT, DEFAULT_MAPIO_LIMIT));
 		bufferSize = pm.getInt(CFG_PROPERTY_FILE_BUFFER_SIZE, DEFAULT_FILE_BUFFER);
 		mapioBufSize = pm.getInt(CFG_PROPERTY_MAPIO_BUFFER_SIZE, DEFAULT_MAPIO_BUFFER);
-		shallMergePatches = pm.getBoolean(Internals.CFG_PROPERTY_PATCH_MERGE, false);
 	}
 	
 	public DataAccessProvider(SessionContext ctx, int mapioBoundary, int regularBufferSize, int mapioBufferSize) {
@@ -71,36 +67,30 @@
 		mapioMagicBoundary = mapioBoundaryValue(mapioBoundary);
 		bufferSize = regularBufferSize;
 		mapioBufSize = mapioBufferSize;
-		shallMergePatches = new PropertyMarshal(ctx).getBoolean(Internals.CFG_PROPERTY_PATCH_MERGE, false);
 	}
 	
-	// TODO [post-1.1] find a better place for this option, it's unrelated to the DAP
-	public boolean shallMergePatches() {
-		return shallMergePatches;
-	}
-
 	// ensure contract of CFG_PROPERTY_MAPIO_LIMIT, for mapioBoundary == 0 use MAX_VALUE so that no file is memmap-ed
 	private static int mapioBoundaryValue(int mapioBoundary) {
 		return mapioBoundary == 0 ? Integer.MAX_VALUE : mapioBoundary;
 	}
 
-	public DataAccess createReader(File f) {
+	public DataAccess createReader(File f, boolean shortRead) {
 		if (!f.exists()) {
 			return new DataAccess();
 		}
 		try {
-			FileChannel fc = new FileInputStream(f).getChannel();
-			long flen = fc.size();
-			if (flen > mapioMagicBoundary) {
+			FileInputStream fis = new FileInputStream(f);
+			long flen = f.length();
+			if (!shortRead && flen > mapioMagicBoundary) {
 				// TESTS: bufLen of 1024 was used to test MemMapFileAccess
-				return new MemoryMapFileAccess(fc, flen, mapioBufSize, context.getLog());
+				return new MemoryMapFileAccess(fis, flen, mapioBufSize, context.getLog());
 			} else {
 				// XXX once implementation is more or less stable,
 				// may want to try ByteBuffer.allocateDirect() to see
 				// if there's any performance gain. 
 				boolean useDirectBuffer = false; // XXX might be another config option
 				// TESTS: bufferSize of 100 was used to check buffer underflow states when readBytes reads chunks bigger than bufSize
-				return new FileAccess(fc, flen, bufferSize, useDirectBuffer, context.getLog());
+				return new FileAccess(fis, flen, bufferSize, useDirectBuffer, context.getLog());
 			}
 		} catch (IOException ex) {
 			// unlikely to happen, we've made sure file exists.
@@ -109,23 +99,17 @@
 		return new DataAccess(); // non-null, empty.
 	}
 	
-	public DataSerializer createWriter(File f, boolean createNewIfDoesntExist) {
+	public DataSerializer createWriter(final Transaction tr, File f, boolean createNewIfDoesntExist) {
 		if (!f.exists() && !createNewIfDoesntExist) {
 			return new DataSerializer();
 		}
-		try {
-			return new StreamDataSerializer(context.getLog(), new FileOutputStream(f, true));
-		} catch (final FileNotFoundException ex) {
-			context.getLog().dump(getClass(), Error, ex, null);
-			return new DataSerializer() {
-				public void write(byte[] data, int offset, int length) throws IOException {
-					throw ex;
-				}
-			};
-		}
+		// TODO invert RevlogStreamWriter to send DataSource here instead of grabbing DataSerializer
+		// to control the moment transaction gets into play and whether it fails or not
+		return new TransactionAwareFileSerializer(tr, f);
 	}
 
 	private static class MemoryMapFileAccess extends DataAccess {
+		private FileInputStream fileStream;
 		private FileChannel fileChannel;
 		private long position = 0; // always points to buffer's absolute position in the file
 		private MappedByteBuffer buffer;
@@ -133,8 +117,9 @@
 		private final int memBufferSize;
 		private final LogFacility logFacility;
 
-		public MemoryMapFileAccess(FileChannel fc, long channelSize, int bufferSize, LogFacility log) {
-			fileChannel = fc;
+		public MemoryMapFileAccess(FileInputStream fis, long channelSize, int bufferSize, LogFacility log) {
+			fileStream = fis;
+			fileChannel = fis.getChannel();
 			size = channelSize;
 			logFacility = log;
 			memBufferSize = bufferSize > channelSize ? (int) channelSize : bufferSize; // no reason to waste memory more than there's data 
@@ -258,27 +243,26 @@
 		@Override
 		public void done() {
 			buffer = null;
-			if (fileChannel != null) {
-				try {
-					fileChannel.close();
-				} catch (IOException ex) {
-					logFacility.dump(getClass(), Warn, ex, null);
-				}
-				fileChannel = null;
+			if (fileStream != null) {
+				new FileUtils(logFacility, this).closeQuietly(fileStream);
+				fileStream = null;
+				fileChannel = null; // channel is closed together with stream
 			}
 		}
 	}
 
 	// (almost) regular file access - FileChannel and buffers.
 	private static class FileAccess extends DataAccess {
+		private FileInputStream fileStream;
 		private FileChannel fileChannel;
 		private ByteBuffer buffer;
 		private long bufferStartInFile = 0; // offset of this.buffer in the file.
 		private final long size;
 		private final LogFacility logFacility;
 
-		public FileAccess(FileChannel fc, long channelSize, int bufferSizeHint, boolean useDirect, LogFacility log) {
-			fileChannel = fc;
+		public FileAccess(FileInputStream fis, long channelSize, int bufferSizeHint, boolean useDirect, LogFacility log) {
+			fileStream = fis;
+			fileChannel = fis.getChannel();
 			size = channelSize;
 			logFacility = log;
 			final int capacity = size < bufferSizeHint ? (int) size : bufferSizeHint;
@@ -389,69 +373,66 @@
 
 		@Override
 		public void done() {
-			if (buffer != null) {
-				buffer = null;
-			}
-			if (fileChannel != null) {
-				try {
-					fileChannel.close();
-				} catch (IOException ex) {
-					logFacility.dump(getClass(), Warn, ex, null);
-				}
+			buffer = null;
+			if (fileStream != null) {
+				new FileUtils(logFacility, this).closeQuietly(fileStream);
+				fileStream = null;
 				fileChannel = null;
 			}
 		}
 	}
+	
+	/**
+	 * Appends serialized changes to the end of the file
+	 */
+	private static class TransactionAwareFileSerializer extends DataSerializer {
+		
+		private final Transaction transaction;
+		private final File file;
+		private FileOutputStream fos;
+		private File transactionFile;
+		private boolean writeFailed = false;
 
-	public/*XXX, private, once HgCloneCommand stops using it */ static class StreamDataSerializer extends DataSerializer {
-		private final OutputStream out;
-		private final LogFacility log;
-		private byte[] buffer;
-	
-		public StreamDataSerializer(LogFacility logFacility, OutputStream os) {
-			assert os != null;
-			out = os;
-			log = logFacility;
+		public TransactionAwareFileSerializer(Transaction tr, File f) {
+			transaction = tr;
+			file = f;
 		}
 		
 		@Override
-		public void write(byte[] data, int offset, int length) throws IOException {
-			out.write(data, offset, length);
-		}
-	
-		@Override
-		public void writeInt(int... values) throws IOException {
-			ensureBufferSize(4*values.length); // sizeof(int)
-			int idx = 0;
-			for (int v : values) {
-				DataSerializer.bigEndian(v, buffer, idx);
-				idx += 4;
+		public void write(byte[] data, int offset, int length) throws HgIOException {
+			try {
+				if (fos == null) {
+					transactionFile = transaction.prepare(file);
+					fos = new FileOutputStream(transactionFile, true);
+				}
+				fos.write(data, offset, length);
+				fos.flush();
+			} catch (IOException ex) {
+				writeFailed = true;
+				transaction.failure(transactionFile, ex);
+				throw new HgIOException("Write failure", ex, transactionFile);
 			}
-			out.write(buffer, 0, idx);
 		}
 		
 		@Override
-		public void writeByte(byte... values) throws IOException {
-			if (values.length == 1) {
-				out.write(values[0]);
-			} else {
-				out.write(values, 0, values.length);
-			}
-		}
-		
-		private void ensureBufferSize(int bytesNeeded) {
-			if (buffer == null || buffer.length < bytesNeeded) {
-				buffer = new byte[bytesNeeded];
-			}
-		}
-	
-		@Override
-		public void done() {
-			try {
-				out.flush();
-				out.close();
-			} catch (IOException ex) {
-				log.dump(getClass(), Error, ex, "Failure to close stream");
+		public void done() throws HgIOException {
+			if (fos != null) {
+				assert transactionFile != null;
+				try {
+					fos.close();
+					if (!writeFailed) {
+						// XXX, Transaction#done() assumes there's no error , but perhaps it's easier to 
+						// rely on #failure(), and call #done() always (or change #done() to #success()
+						transaction.done(transactionFile);
+					}
+					fos = null;
+				} catch (IOException ex) {
+					if (!writeFailed) {
+						// do not eclipse original exception
+						transaction.failure(transactionFile, ex);
+					}
+					throw new HgIOException("Write failure", ex, transactionFile);
+				}
 			}
 		}
 	}
--- a/src/org/tmatesoft/hg/internal/DataSerializer.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DataSerializer.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,7 +16,12 @@
  */
 package org.tmatesoft.hg.internal;
 
+import java.io.ByteArrayOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
+
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 
 /**
  * Serialization friend of {@link DataAccess}
@@ -26,28 +31,36 @@
  */
 @Experimental(reason="Work in progress")
 public class DataSerializer {
+	private byte[] buffer;
 	
-	public void writeByte(byte... values) throws IOException {
+	public void writeByte(byte... values) throws HgIOException {
 		write(values, 0, values.length);
 	}
 
-	public void writeInt(int... values) throws IOException {
-		byte[] buf = new byte[4];
+	public void writeInt(int... values) throws HgIOException {
+		ensureBufferSize(4*values.length); // sizeof(int)
+		int idx = 0;
 		for (int v : values) {
-			bigEndian(v, buf, 0);
-			write(buf, 0, buf.length);
+			bigEndian(v, buffer, idx);
+			idx += 4;
+		}
+		write(buffer, 0, idx);
+	}
+
+	public void write(byte[] data, int offset, int length) throws HgIOException {
+		throw new HgIOException("Attempt to write to non-existent file", null);
+	}
+
+	public void done() throws HgIOException {
+		// no-op
+	}
+	
+	private void ensureBufferSize(int bytesNeeded) {
+		if (buffer == null || buffer.length < bytesNeeded) {
+			buffer = new byte[bytesNeeded];
 		}
 	}
 
-	public void write(byte[] data, int offset, int length) throws IOException {
-		throw new IOException("Attempt to write to non-existent file");
-	}
-
-	public void done() {
-		// FIXME perhaps, shall allow IOException, too
-		// no-op
-	}
-	
 	/**
 	 * Writes 4 bytes of supplied value into the buffer at given offset, big-endian. 
 	 */
@@ -63,14 +76,18 @@
 	 * Denotes an entity that wants to/could be serialized
 	 */
 	@Experimental(reason="Work in progress")
-	interface DataSource {
-		public void serialize(DataSerializer out) throws IOException;
+	public interface DataSource {
+		/**
+		 * Invoked once for a single write operation, 
+		 * although the source itself may get serialized several times
+		 */
+		public void serialize(DataSerializer out) throws HgIOException, HgRuntimeException;
 
 		/**
 		 * Hint of data length it would like to writes
 		 * @return -1 if can't answer
 		 */
-		public int serializeLength();
+		public int serializeLength() throws HgRuntimeException;
 	}
 	
 	public static class ByteArrayDataSource implements DataSource {
@@ -81,7 +98,7 @@
 			data = bytes;
 		}
 
-		public void serialize(DataSerializer out) throws IOException {
+		public void serialize(DataSerializer out) throws HgIOException {
 			if (data != null) {
 				out.write(data, 0, data.length);
 			}
@@ -90,6 +107,43 @@
 		public int serializeLength() {
 			return data == null ? 0 : data.length;
 		}
+	}
+	
+	/**
+	 * Serialize data to byte array
+	 */
+	public static class ByteArraySerializer extends DataSerializer {
+		private final ByteArrayOutputStream out = new ByteArrayOutputStream();
+
+		@Override
+		public void write(byte[] data, int offset, int length) {
+			out.write(data, offset, length);
+		}
 		
+		public byte[] toByteArray() {
+			return out.toByteArray();
+		}
+	}
+
+	/**
+	 * Bridge to the world of {@link java.io.OutputStream}.
+	 * Caller instantiates the stream and is responsible to close it as appropriate, 
+	 * {@link #done() DataSerializer.done()} doesn't close the stream. 
+	 */
+	public static class OutputStreamSerializer extends DataSerializer {
+		private final OutputStream out;
+
+		public OutputStreamSerializer(OutputStream outputStream) {
+			out = outputStream;
+		}
+
+		@Override
+		public void write(byte[] data, int offset, int length) throws HgIOException {
+			try {
+				out.write(data, offset, length);
+			} catch (IOException ex) {
+				throw new HgIOException(ex.getMessage(), ex, null);
+			}
+		}
 	}
 }
--- a/src/org/tmatesoft/hg/internal/DeflaterDataSerializer.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DeflaterDataSerializer.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,10 +16,11 @@
  */
 package org.tmatesoft.hg.internal;
 
-import java.io.IOException;
 import java.util.zip.Deflater;
 import java.util.zip.DeflaterOutputStream;
 
+import org.tmatesoft.hg.core.HgIOException;
+
 /**
  * {@link DeflaterOutputStream} counterpart for {@link DataSerializer} API
  * 
@@ -43,7 +44,7 @@
 	}
 
 	@Override
-	public void writeInt(int... values) throws IOException {
+	public void writeInt(int... values) throws HgIOException {
 		for (int i = 0; i < values.length; i+= AUX_BUFFER_CAPACITY) {
 			int idx = 0;
 			for (int j = i, x = Math.min(values.length, i + AUX_BUFFER_CAPACITY); j < x; j++) {
@@ -58,7 +59,7 @@
 	}
 
 	@Override
-	public void write(byte[] data, int offset, int length) throws IOException {
+	public void write(byte[] data, int offset, int length) throws HgIOException {
 		// @see DeflaterOutputStream#write(byte[], int, int)
 		int stride = deflateOutBuffer.length;
 		for (int i = 0; i < length; i += stride) {
@@ -66,7 +67,7 @@
 		}
 	}
 	
-	private void internalWrite(byte[] data, int offset, int length) throws IOException {
+	private void internalWrite(byte[] data, int offset, int length) throws HgIOException {
 		deflater.setInput(data, offset, length);
 		while (!deflater.needsInput()) {
 			deflate();
@@ -74,11 +75,11 @@
 	}
 
 	@Override
-	public void done() {
+	public void done() throws HgIOException {
 		delegate.done();
 	}
 
-	public void finish() throws IOException {
+	public void finish() throws HgIOException {
 		if (!deflater.finished()) {
 			deflater.finish();
 			while (!deflater.finished()) {
@@ -87,7 +88,7 @@
 		}
 	}
 
-	protected void deflate() throws IOException {
+	protected void deflate() throws HgIOException {
 		int len = deflater.deflate(deflateOutBuffer, 0, deflateOutBuffer.length);
 		if (len > 0) {
 			delegate.write(deflateOutBuffer, 0, len);
--- a/src/org/tmatesoft/hg/internal/DiffHelper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DiffHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -20,8 +20,6 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.tmatesoft.hg.repo.HgInvalidStateException;
-
 /**
  * Mercurial cares about changes only up to the line level, e.g. a simple file version dump in manifest looks like (RevlogDump output):
  * 
@@ -201,9 +199,7 @@
 				} else {
 					assert changeStartS2 == matchStartSeq2;
 					if (matchStartSeq1 > 0 || matchStartSeq2 > 0) {
-						// FIXME perhaps, exception is too much for the case
-						// once diff is covered with tests, replace with assert false : msg; 
-						throw new HgInvalidStateException(String.format("adjustent equal blocks %d, %d and %d,%d", changeStartS1, matchStartSeq1, changeStartS2, matchStartSeq2));
+						assert false : String.format("adjustent equal blocks %d, %d and %d,%d", changeStartS1, matchStartSeq1, changeStartS2, matchStartSeq2);
 					}
 				}
 			}
--- a/src/org/tmatesoft/hg/internal/DirstateBuilder.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DirstateBuilder.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,6 +16,9 @@
  */
 package org.tmatesoft.hg.internal;
 
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.Dirstate;
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.UndoDirstate;
+
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
@@ -30,9 +33,9 @@
 import org.tmatesoft.hg.repo.HgDirstate;
 import org.tmatesoft.hg.repo.HgDirstate.EntryKind;
 import org.tmatesoft.hg.repo.HgDirstate.Record;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
-import org.tmatesoft.hg.repo.HgRepositoryFiles;
 import org.tmatesoft.hg.util.Path;
 
 /**
@@ -149,18 +152,20 @@
 		}
 	}
 	
-	public void serialize() throws HgIOException {
-		File dirstateFile = hgRepo.getRepositoryFile(HgRepositoryFiles.Dirstate);
+	public void serialize(Transaction tr) throws HgIOException {
+		File dirstateFile = tr.prepare(hgRepo.getRepositoryFile(Dirstate), hgRepo.getRepositoryFile(UndoDirstate));
 		try {
 			FileChannel dirstate = new FileOutputStream(dirstateFile).getChannel();
 			serialize(dirstate);
 			dirstate.close();
+			tr.done(dirstateFile);
 		} catch (IOException ex) {
+			tr.failure(dirstateFile, ex);
 			throw new HgIOException("Can't write down new directory state", ex, dirstateFile);
 		}
 	}
 	
-	public void fillFrom(DirstateReader dirstate) {
+	public void fillFrom(DirstateReader dirstate) throws HgInvalidControlFileException {
 		// TODO preserve order, if reasonable and possible 
 		dirstate.readInto(new HgDirstate.Inspector() {
 			
--- a/src/org/tmatesoft/hg/internal/DirstateReader.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DirstateReader.java	Wed Jul 10 11:48:55 2013 +0200
@@ -66,7 +66,7 @@
 		if (dirstateFile == null || !dirstateFile.exists()) {
 			return;
 		}
-		DataAccess da = repo.getDataAccess().createReader(dirstateFile);
+		DataAccess da = repo.getDataAccess().createReader(dirstateFile, false);
 		try {
 			if (da.isEmpty()) {
 				return;
@@ -102,7 +102,7 @@
 				} else if (state == 'm') {
 					target.next(EntryKind.Merged, r);
 				} else {
-					repo.getSessionContext().getLog().dump(getClass(), Severity.Warn, "Dirstate record for file %s (size: %d, tstamp:%d) has unknown state '%c'", r.name(), r.size(), r.modificationTime(), state);
+					repo.getLog().dump(getClass(), Severity.Warn, "Dirstate record for file %s (size: %d, tstamp:%d) has unknown state '%c'", r.name(), r.size(), r.modificationTime(), state);
 				}
 			}
 		} catch (IOException ex) {
@@ -142,7 +142,7 @@
 		if (dirstateFile == null || !dirstateFile.exists()) {
 			return new Pair<Nodeid,Nodeid>(NULL, NULL);
 		}
-		DataAccess da = internalRepo.getDataAccess().createReader(dirstateFile);
+		DataAccess da = internalRepo.getDataAccess().createReader(dirstateFile, false);
 		try {
 			if (da.isEmpty()) {
 				return new Pair<Nodeid,Nodeid>(NULL, NULL);
@@ -178,7 +178,7 @@
 				branch = b == null || b.length() == 0 ? HgRepository.DEFAULT_BRANCH_NAME : b;
 				r.close();
 			} catch (FileNotFoundException ex) {
-				internalRepo.getSessionContext().getLog().dump(HgDirstate.class, Debug, ex, null); // log verbose debug, exception might be legal here 
+				internalRepo.getLog().dump(HgDirstate.class, Debug, ex, null); // log verbose debug, exception might be legal here 
 				// IGNORE
 			} catch (IOException ex) {
 				throw new HgInvalidControlFileException("Error reading file with branch information", ex, branchFile);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/EncodeDirPathHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.tmatesoft.hg.util.PathRewrite;
+
+/**
+ * <blockquote cite="http://mercurial.selenic.com/wiki/FileFormats#data.2F">Directory names ending in .i or .d have .hg appended</blockquote>
+ *  
+ * @see http://mercurial.selenic.com/wiki/FileFormats#data.2F
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+final class EncodeDirPathHelper implements PathRewrite {
+	private final Pattern suffix2replace;
+	
+	public EncodeDirPathHelper() {
+		suffix2replace = Pattern.compile("\\.([id]|hg)/");
+	}
+
+	public CharSequence rewrite(CharSequence p) {
+		Matcher suffixMatcher = suffix2replace.matcher(p);
+		CharSequence path;
+		// Matcher.replaceAll, but without extra toString
+		boolean found = suffixMatcher.find();
+		if (found) {
+			StringBuffer sb = new StringBuffer(p.length()  + 20);
+			do {
+				suffixMatcher.appendReplacement(sb, ".$1.hg/");
+			} while (found = suffixMatcher.find());
+			suffixMatcher.appendTail(sb);
+			path = sb;
+		} else {
+			path = p;
+		}
+		return path;
+	}
+
+}
--- a/src/org/tmatesoft/hg/internal/FNCacheFile.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FNCacheFile.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,9 +16,14 @@
  */
 package org.tmatesoft.hg.internal;
 
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.FNCache;
+
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.nio.channels.FileChannel;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.List;
@@ -41,11 +46,16 @@
 	
 	private final Internals repo;
 //	private final List<Path> files;
-	private List<Path> added;
+	private final List<Path> addedDotI;
+	private final List<Path> addedDotD;
+	private final FNCachePathHelper pathHelper;
 
 	public FNCacheFile(Internals internalRepo) {
 		repo = internalRepo;
 //		files = new ArrayList<Path>();
+		pathHelper = new FNCachePathHelper();
+		addedDotI = new ArrayList<Path>(5);
+		addedDotD = new ArrayList<Path>(5);
 	}
 
 	/*
@@ -60,36 +70,48 @@
 		// names in fncache are in local encoding, shall translate to unicode
 		new LineReader(f, repo.getSessionContext().getLog(), repo.getFilenameEncoding()).read(new LineReader.SimpleLineCollector(), entries);
 		for (String e : entries) {
-			// FIXME plain wrong, need either to decode paths and strip off .i/.d or (if keep names as is) change write()
+			// XXX plain wrong, need either to decode paths and strip off .i/.d or (if keep names as is) change write()
 			files.add(pathFactory.path(e));
 		}
 	}
 	*/
 	
 	public void write() throws IOException {
-		if (added == null || added.isEmpty()) {
+		if (addedDotI.isEmpty() && addedDotD.isEmpty()) {
 			return;
 		}
-		File f = fncacheFile();
+		File f = repo.getRepositoryFile(FNCache);
 		f.getParentFile().mkdirs();
 		final Charset filenameEncoding = repo.getFilenameEncoding();
-		FileOutputStream fncacheFile = new FileOutputStream(f, true);
-		for (Path p : added) {
-			String s = "data/" + p.toString() + ".i"; // TODO post-1.0 this is plain wrong. (a) need .d files, too; (b) what about dh/ location? 
-			fncacheFile.write(s.getBytes(filenameEncoding));
-			fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat
+		ArrayList<CharBuffer> added = new ArrayList<CharBuffer>();
+		for (Path p : addedDotI) {
+			added.add(CharBuffer.wrap(pathHelper.rewrite(p)));
+		}
+		for (Path p : addedDotD) {
+			// XXX FNCachePathHelper always return name of an index file, need to change it into a name of data file,
+			// although the approach (to replace last char) is depressingly awful
+			CharSequence cs = pathHelper.rewrite(p);
+			CharBuffer cb = CharBuffer.allocate(cs.length());
+			cb.append(cs);
+			cb.put(cs.length()-1, 'd');
+			cb.flip();
+			added.add(cb);
+		}
+		FileChannel fncacheFile = new FileOutputStream(f, true).getChannel();
+		ByteBuffer lf = ByteBuffer.wrap(new byte[] { 0x0A });
+		for (CharBuffer b : added) {
+			fncacheFile.write(filenameEncoding.encode(b));
+			fncacheFile.write(lf);
+			lf.rewind();
 		}
 		fncacheFile.close();
 	}
 
-	public void add(Path p) {
-		if (added == null) {
-			added = new ArrayList<Path>();
-		}
-		added.add(p);
+	public void addIndex(Path p) {
+		addedDotI.add(p);
 	}
 
-	private File fncacheFile() {
-		return repo.getFileFromStoreDir("fncache");
+	public void addData(Path p) {
+		addedDotD.add(p);
 	}
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/FNCachePathHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.internal.StoragePathHelper.STR_DATA;
+
+import org.tmatesoft.hg.util.PathRewrite;
+
+/**
+ * Prepare filelog names to be written into fncache. 
+ * 
+ * @see http://mercurial.selenic.com/wiki/fncacheRepoFormat#The_fncache_file
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+final class FNCachePathHelper implements PathRewrite {
+
+	private final EncodeDirPathHelper dirPathRewrite;
+
+	
+	public FNCachePathHelper() {
+		dirPathRewrite = new EncodeDirPathHelper();
+	}
+
+	/**
+	 * Input: repository-relative path of a filelog, i.e. without 'data/' or 'dh/' prefix, and/or '.i'/'.d' suffix.
+	 * Output: path ready to be written into fncache file, alaways with '.i' suffix (caller is free to alter the suffix to '.d' as appropriate
+	 */
+	public CharSequence rewrite(CharSequence path) {
+		CharSequence p = dirPathRewrite.rewrite(path);
+		StringBuilder result = new StringBuilder(p.length() + STR_DATA.length() + ".i".length());
+		result.append(STR_DATA);
+		result.append(p);
+		result.append(".i");
+		return result;
+	}
+
+	/*
+	 * There's always 'data/' prefix, even if actual file resides under 'dh/':
+	 *  
+	 * $ cat .hg/store/fncache
+	 * data/very-long-directory-name-level-1/very-long-directory-name-level-2/very-long-directory-name-level-3/file-with-longest-name-i-am-not-lazy-to-type.txt.i
+	 * $ ls .hg/store/dh/very-lon/very-lon/very-lon/
+	 * file-with-longest-name-i-am-not-lazy-to-type.txtbbd4d3327f6364027211b0cd8ca499d3d6308849.i
+	 */
+}
--- a/src/org/tmatesoft/hg/internal/FileAnnotation.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FileAnnotation.java	Wed Jul 10 11:48:55 2013 +0200
@@ -17,17 +17,9 @@
 package org.tmatesoft.hg.internal;
 
 
-import org.tmatesoft.hg.core.HgCallbackTargetException;
-import org.tmatesoft.hg.core.HgIterateDirection;
-import org.tmatesoft.hg.repo.HgBlameFacility;
+import org.tmatesoft.hg.core.HgBlameInspector;
+import org.tmatesoft.hg.core.HgBlameInspector.RevisionDescriptor;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
-import org.tmatesoft.hg.repo.HgBlameFacility.AddBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.BlockData;
-import org.tmatesoft.hg.repo.HgBlameFacility.ChangeBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.DeleteBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.EqualBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.RevisionDescriptor;
-import org.tmatesoft.hg.repo.HgDataFile;
 
 /**
  * Produce output like 'hg annotate' does
@@ -35,7 +27,7 @@
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class FileAnnotation implements HgBlameFacility.Inspector, RevisionDescriptor.Recipient {
+public class FileAnnotation implements HgBlameInspector, RevisionDescriptor.Recipient {
 
 	@Experimental(reason="The line-by-line inspector likely to become part of core/command API")
 	@Callback
@@ -50,18 +42,6 @@
 		int totalLines();
 	}
 
-	/**
-	 * Annotate file revision, line by line.
-	 */
-	public static void annotate(HgDataFile df, int changelogRevisionIndex, LineInspector insp) throws HgCallbackTargetException {
-		if (!df.exists()) {
-			return;
-		}
-		FileAnnotation fa = new FileAnnotation(insp);
-		HgBlameFacility af = new HgBlameFacility(df);
-		af.annotate(changelogRevisionIndex, fa, HgIterateDirection.NewToOld);
-	}
-
 	// keeps <startSeq1, startSeq2, len> of equal blocks, origin to target, from some previous step
 	private RangeSeq activeEquals;
 	// equal blocks of the current iteration, to be recalculated before next step
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/FileChangeMonitor.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+
+/**
+ * This shall become interface/abstract class accessible from SessionContext,
+ * with plugable implementations, e.g. Java7 (file monitoring facilities) based,
+ * or any other convenient means. It shall allow both "check at the moment asked" 
+ * and "collect changes and dispatch on demand" implementation approaches, so that
+ * implementors may use best available technology   
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class FileChangeMonitor {
+	private final File file;
+	private long lastModified;
+	private long length;
+	
+	/**
+	 * First round: support for 1-monitor-1-file only
+	 * Next round: 1-monitor-N files
+	 */
+	public FileChangeMonitor(File f) {
+		file = f;
+	}
+	
+	// shall work for files that do not exist
+	public void touch(Object source) {
+		lastModified = file.lastModified();
+		length = file.length();
+	}
+	
+	public void check(Object source, Action onChange) {
+		if (changed(source)) {
+			onChange.changed();
+		}
+	}
+
+	public boolean changed(Object source) {
+		if (file.lastModified() != lastModified) {
+			return true;
+		}
+		return file.length() != length; 
+	}
+	
+	public interface Action {
+		public void changed();
+	}
+}
--- a/src/org/tmatesoft/hg/internal/FileContentSupplier.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FileContentSupplier.java	Wed Jul 10 11:48:55 2013 +0200
@@ -18,57 +18,55 @@
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
 
 import org.tmatesoft.hg.core.HgIOException;
-import org.tmatesoft.hg.repo.CommitFacility;
+import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.util.Path;
 
 /**
- * FIXME files are opened at the moment of instantiation, though the moment the data is requested might be distant
+ * {@link DataSource} that reads from regular files
  * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class FileContentSupplier implements CommitFacility.ByteDataSupplier {
-	private final FileChannel channel;
-	private IOException error;
+public class FileContentSupplier implements DataSource {
+	private final File file;
+	private final SessionContext ctx;
 	
-	public FileContentSupplier(HgRepository repo, Path file) throws HgIOException {
-		this(new File(repo.getWorkingDir(), file.toString()));
-	}
-
-	public FileContentSupplier(File f) throws HgIOException {
-		if (!f.canRead()) {
-			throw new HgIOException(String.format("Can't read file %s", f), f);
-		}
-		try {
-			channel = new FileInputStream(f).getChannel();
-		} catch (FileNotFoundException ex) {
-			throw new HgIOException("Can't open file", ex, f);
-		}
+	public FileContentSupplier(HgRepository repo, Path file) {
+		this(repo, new File(repo.getWorkingDir(), file.toString()));
 	}
 
-	public int read(ByteBuffer buf) {
-		if (error != null) {
-			return -1;
-		}
-		try {
-			return channel.read(buf);
-		} catch (IOException ex) {
-			error = ex;
-		}
-		return -1;
+	public FileContentSupplier(SessionContext.Source ctxSource, File f) {
+		ctx = ctxSource.getSessionContext();
+		file = f;
 	}
 	
-	public void done() throws IOException {
-		channel.close();
-		if (error != null) {
-			throw error;
+	public void serialize(DataSerializer out) throws HgIOException {
+		FileInputStream fis = null;
+		try {
+			fis = new FileInputStream(file);
+			FileChannel fc = fis.getChannel();
+			ByteBuffer buffer = ByteBuffer.allocate((int) Math.min(100*1024, fc.size()));
+			while (fc.read(buffer) != -1) {
+				buffer.flip();
+				// #allocate() above ensures backing array
+				out.write(buffer.array(), 0, buffer.limit());
+				buffer.clear();
+			}
+		} catch (IOException ex) {
+			throw new HgIOException("Failed to get content of the file", ex, file);
+		} finally {
+			new FileUtils(ctx.getLog(), this).closeQuietly(fis);
 		}
 	}
+	
+	public int serializeLength() {
+		return Internals.ltoi(file.length());
+	}
 }
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/FileHistory.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.core.HgIterateDirection.NewToOld;
+
+import java.util.Collections;
+import java.util.LinkedList;
+
+import org.tmatesoft.hg.core.HgIterateDirection;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+
+/**
+ * History of a file, with copy/renames, and corresponding revision information.
+ * Facility for file history iteration. 
+ * 
+ * TODO [post-1.1] Utilize in HgLogCommand and anywhere else we need to follow file history
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class FileHistory {
+	
+	private LinkedList<FileRevisionHistoryChunk> fileCompleteHistory = new LinkedList<FileRevisionHistoryChunk>();
+	private final HgDataFile df;
+	private final int csetTo;
+	private final int csetFrom;
+	
+	public FileHistory(HgDataFile file, int fromChangeset, int toChangeset) {
+		df = file;
+		csetFrom = fromChangeset;
+		csetTo = toChangeset;
+	}
+	
+	public int getStartChangeset() {
+		return csetFrom;
+	}
+	
+	public int getEndChangeset() {
+		return csetTo;
+	}
+
+	public void build() throws HgRuntimeException {
+		assert fileCompleteHistory.isEmpty();
+		HgDataFile currentFile = df;
+		final int changelogRevIndexEnd = csetTo;
+		final int changelogRevIndexStart = csetFrom;
+		int fileLastClogRevIndex = changelogRevIndexEnd;
+		FileRevisionHistoryChunk nextChunk = null;
+		fileCompleteHistory.clear(); // just in case, #build() is not expected to be called more than once
+		do {
+			FileRevisionHistoryChunk fileHistory = new FileRevisionHistoryChunk(currentFile);
+			fileHistory.init(fileLastClogRevIndex);
+			fileHistory.linkTo(nextChunk);
+			fileCompleteHistory.addFirst(fileHistory); // to get the list in old-to-new order
+			nextChunk = fileHistory;
+			if (fileHistory.changeset(0) > changelogRevIndexStart && currentFile.isCopy()) {
+				// fileHistory.changeset(0) is the earliest revision we know about so far,
+				// once we get to revisions earlier than the requested start, stop digging.
+				// The reason there's NO == (i.e. not >=) because:
+				// (easy): once it's equal, we've reached our intended start
+				// (hard): if changelogRevIndexStart happens to be exact start of one of renames in the 
+				// chain of renames (test-annotate2 repository, file1->file1a->file1b, i.e. points 
+				// to the very start of file1a or file1 history), presence of == would get us to the next 
+				// chunk and hence changed parents of present chunk's first element. Our annotate alg 
+				// relies on parents only (i.e. knows nothing about 'last iteration element') to find out 
+				// what to compare, and hence won't report all lines of 'last iteration element' (which is the
+				// first revision of the renamed file) as "added in this revision", leaving gaps in annotate
+				HgRepository repo = currentFile.getRepo();
+				Nodeid originLastRev = currentFile.getCopySourceRevision();
+				currentFile = repo.getFileNode(currentFile.getCopySourceName());
+				fileLastClogRevIndex = currentFile.getChangesetRevisionIndex(currentFile.getRevisionIndex(originLastRev));
+				// XXX perhaps, shall fail with meaningful exception if new file doesn't exist (.i/.d not found for whatever reason)
+				// or source revision is missing?
+			} else {
+				fileHistory.chopAtChangeset(changelogRevIndexStart);
+				currentFile = null; // stop iterating
+			}
+		} while (currentFile != null && fileLastClogRevIndex > changelogRevIndexStart);
+		// fileCompleteHistory is in (origin, intermediate target, ultimate target) order
+	}
+	
+	public Iterable<FileRevisionHistoryChunk> iterate(HgIterateDirection order) {
+		if (order == NewToOld) {
+			return ReverseIterator.reversed(fileCompleteHistory);
+		}
+		return Collections.unmodifiableList(fileCompleteHistory);
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/FileRevisionHistoryChunk.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,202 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.core.HgIterateDirection.OldToNew;
+import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION;
+import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
+
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.LinkedList;
+
+import org.tmatesoft.hg.core.HgIterateDirection;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+
+/**
+ * Piece of file history, identified by path, limited to file revisions from range [chop..init] of changesets, 
+ * can be linked to another piece.
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class FileRevisionHistoryChunk {
+	private final HgDataFile df;
+	// change ancestry, sequence of file revisions
+	private IntVector fileRevsToVisit;
+	// parent pairs of complete file history
+	private IntVector fileParentRevs;
+	// map file revision to changelog revision (sparse array, only file revisions to visit are set)
+	private int[] file2changelog;
+	private int originChangelogRev = BAD_REVISION, originFileRev = BAD_REVISION;
+	private int csetRangeStart = NO_REVISION, csetRangeEnd = BAD_REVISION; 
+	
+
+	public FileRevisionHistoryChunk(HgDataFile file) {
+		df = file;
+	}
+	
+	/**
+	 * @return file at this specific chunk of history (i.e. its path may be different from the paths of other chunks)
+	 */
+	public HgDataFile getFile() {
+		return df;
+	}
+	
+	/**
+	 * @return changeset this file history chunk was chopped at, or {@link HgRepository#NO_REVISION} if none specified
+	 */
+	public int getStartChangeset() {
+		return csetRangeStart;
+	}
+	
+	/**
+	 * @return changeset this file history chunk ends at
+	 */
+	public int getEndChangeset() {
+		return csetRangeEnd;
+	}
+	
+	public void init(int changelogRevisionIndex) throws HgRuntimeException {
+		csetRangeEnd = changelogRevisionIndex;
+		// XXX df.indexWalk(0, fileRevIndex, ) might be more effective
+		Nodeid fileRev = df.getRepo().getManifest().getFileRevision(changelogRevisionIndex, df.getPath());
+		int fileRevIndex = df.getRevisionIndex(fileRev);
+		int[] fileRevParents = new int[2];
+		fileParentRevs = new IntVector((fileRevIndex+1) * 2, 0);
+		fileParentRevs.add(NO_REVISION, NO_REVISION); // parents of fileRevIndex == 0
+		for (int i = 1; i <= fileRevIndex; i++) {
+			df.parents(i, fileRevParents, null, null);
+			fileParentRevs.add(fileRevParents[0], fileRevParents[1]);
+		}
+		// fileRevsToVisit keep file change ancestry from new to old
+		fileRevsToVisit = new IntVector(fileRevIndex + 1, 0);
+		// keep map of file revision to changelog revision
+		file2changelog = new int[fileRevIndex+1];
+		// only elements worth visit would get mapped, so there would be unfilled areas in the file2changelog,
+		// prevent from error (make it explicit) by bad value
+		Arrays.fill(file2changelog, BAD_REVISION);
+		LinkedList<Integer> queue = new LinkedList<Integer>();
+		BitSet seen = new BitSet(fileRevIndex + 1);
+		queue.add(fileRevIndex);
+		do {
+			int x = queue.removeFirst();
+			if (seen.get(x)) {
+				continue;
+			}
+			seen.set(x);
+			fileRevsToVisit.add(x);
+			file2changelog[x] = df.getChangesetRevisionIndex(x);
+			int p1 = fileParentRevs.get(2*x);
+			int p2 = fileParentRevs.get(2*x + 1);
+			if (p1 != NO_REVISION) {
+				queue.addLast(p1);
+			}
+			if (p2 != NO_REVISION) {
+				queue.addLast(p2);
+			}
+		} while (!queue.isEmpty());
+		// make sure no child is processed before we handled all (grand-)parents of the element
+		fileRevsToVisit.sort(false);
+	}
+	
+	public void linkTo(FileRevisionHistoryChunk target) {
+		// assume that target.init() has been called already 
+		if (target == null) {
+			return;
+		}
+		target.originFileRev = fileRevsToVisit.get(0); // files to visit are new to old
+		target.originChangelogRev = changeset(target.originFileRev);
+	}
+
+	/**
+	 * Mark revision closest(ceil) to specified as the very first one (no parents) 
+	 */
+	public void chopAtChangeset(int firstChangelogRevOfInterest) {
+		csetRangeStart = firstChangelogRevOfInterest;
+		if (firstChangelogRevOfInterest == 0) {
+			return; // nothing to do
+		}
+		int i = 0, x = fileRevsToVisit.size(), fileRev = BAD_REVISION;
+		// fileRevsToVisit is new to old, greater numbers to smaller
+		while (i < x && changeset(fileRev = fileRevsToVisit.get(i)) >= firstChangelogRevOfInterest) {
+			i++;
+		}
+		assert fileRev != BAD_REVISION; // there's at least 1 revision in fileRevsToVisit
+		if (i == x && changeset(fileRev) != firstChangelogRevOfInterest) {
+			assert false : "Requested changeset shall belong to the chunk";
+			return;
+		}
+		fileRevsToVisit.trimTo(i); // no need to iterate more
+		// pretend fileRev got no parents
+		fileParentRevs.set(fileRev * 2, NO_REVISION);
+		fileParentRevs.set(fileRev, NO_REVISION);
+	}
+
+	public int[] fileRevisions(HgIterateDirection iterateOrder) {
+		// fileRevsToVisit is { r10, r7, r6, r5, r0 }, new to old
+		int[] rv = fileRevsToVisit.toArray();
+		if (iterateOrder == OldToNew) {
+			// reverse return value
+			for (int a = 0, b = rv.length-1; a < b; a++, b--) {
+				int t = rv[b];
+				rv[b] = rv[a];
+				rv[a] = t;
+			}
+		}
+		return rv;
+	}
+	
+	/**
+	 * @return number of file revisions in this chunk of its history
+	 */
+	public int revisionCount() {
+		return fileRevsToVisit.size();
+	}
+	
+	public int changeset(int fileRevIndex) {
+		return file2changelog[fileRevIndex];
+	}
+	
+	public void fillFileParents(int fileRevIndex, int[] fileParents) {
+		if (fileRevIndex == 0 && originFileRev != BAD_REVISION) {
+			// this chunk continues another file
+			assert originFileRev != NO_REVISION;
+			fileParents[0] = originFileRev;
+			fileParents[1] = NO_REVISION;
+			return;
+		}
+		fileParents[0] = fileParentRevs.get(fileRevIndex * 2);
+		fileParents[1] = fileParentRevs.get(fileRevIndex * 2 + 1);
+	}
+	
+	public void fillCsetParents(int fileRevIndex, int[] csetParents) {
+		if (fileRevIndex == 0 && originFileRev != BAD_REVISION) {
+			assert originFileRev != NO_REVISION;
+			csetParents[0] = originChangelogRev;
+			csetParents[1] = NO_REVISION; // I wonder if possible to start a copy with two parents?
+			return;
+		}
+		int fp1 = fileParentRevs.get(fileRevIndex * 2);
+		int fp2 = fileParentRevs.get(fileRevIndex * 2 + 1);
+		csetParents[0] = fp1 == NO_REVISION ? NO_REVISION : changeset(fp1);
+		csetParents[1] = fp2 == NO_REVISION ? NO_REVISION : changeset(fp2);
+	}
+}
\ No newline at end of file
--- a/src/org/tmatesoft/hg/internal/FileSystemHelper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FileSystemHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -63,7 +63,9 @@
 		try {
 			execHelper.exec(command);
 		} catch (InterruptedException ex) {
-			throw new IOException(ex);
+			IOException e = new IOException();
+			ex.initCause(ex); // XXX Java 1.5
+			throw e;
 		}
 	}
 	
@@ -77,7 +79,9 @@
 		try {
 			execHelper.exec(command);
 		} catch (InterruptedException ex) {
-			throw new IOException(ex);
+			IOException e = new IOException();
+			ex.initCause(ex); // XXX Java 1.5
+			throw e;
 		}
 	}
 
@@ -90,12 +94,14 @@
 		String result = null;
 		try {
 			result = execHelper.exec(command).toString().trim();
-			if (result.isEmpty()) {
+			if (result.length() == 0) { // XXX Java 1.5 isEmpty()
 				return defaultValue;
 			}
 			return Integer.parseInt(result, 8);
 		} catch (InterruptedException ex) {
-			throw new IOException(ex);
+			IOException e = new IOException();
+			ex.initCause(ex); // XXX Java 1.5
+			throw e;
 		} catch (NumberFormatException ex) {
 			ctx.getLog().dump(getClass(), Warn, ex, String.format("Bad value for access rights:%s", result));
 			return defaultValue;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/FileUtils.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.util.LogFacility.Severity.Debug;
+
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.channels.FileChannel;
+
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.util.LogFacility;
+import org.tmatesoft.hg.util.LogFacility.Severity;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class FileUtils {
+	
+	private final LogFacility log;
+	private final Class<?> troublemaker;
+	
+	public static void copyFile(File from, File to) throws HgIOException {
+		new FileUtils(new StreamLogFacility(Debug, true, System.err), FileUtils.class).copy(from, to);
+	}
+
+	public FileUtils(LogFacility logFacility, Object troubleSource) {
+		log = logFacility;
+		if (troubleSource == null) {
+			troublemaker = null;
+		} else {
+			troublemaker = troubleSource instanceof Class ? (Class<?>) troubleSource : troubleSource.getClass();
+		}
+	}
+
+	public void copy(File from, File to) throws HgIOException {
+		FileInputStream fis = null;
+		FileOutputStream fos = null;
+		try {
+			fis = new FileInputStream(from);
+			fos = new FileOutputStream(to);
+			FileChannel input = fis.getChannel();
+			FileChannel output = fos.getChannel();
+			long count = input.size();
+			long pos = 0;
+			int zeroCopied = 0; // flag to prevent hang-up
+			do {
+				long c = input.transferTo(pos, count, output);
+				pos += c;
+				count -= c;
+				if (c == 0) {
+					if (++zeroCopied == 3) {
+						String m = String.format("Can't copy %s to %s, transferTo copies 0 bytes. Position: %d, bytes left:%d", from.getName(), to.getName(), pos, count);
+						throw new IOException(m);
+					}
+				} else {
+					// reset
+					zeroCopied = 0;
+				}
+			} while (count > 0);
+			fos.close();
+			fos = null;
+			fis.close();
+			fis = null;
+		} catch (IOException ex) {
+			// not in finally because I don't want to loose exception from fos.close()
+			closeQuietly(fis, from);
+			closeQuietly(fos, to);
+			String m = String.format("Failed to copy %s to %s", from.getName(), to.getName());
+			throw new HgIOException(m, ex, from);
+		}
+		/* Copy of cpython's 00changelog.d, 20Mb+
+		 * Linux&Windows: 300-400 ms,
+		 * Windows uncached run: 1.6 seconds
+		 */
+	}
+
+	public void closeQuietly(Closeable stream) {
+		closeQuietly(stream, null);
+	}
+
+	public void closeQuietly(Closeable stream, File f) {
+		if (stream != null) {
+			try {
+				stream.close();
+			} catch (IOException ex) {
+				// ignore
+				final String msg;
+				if (f == null) {
+					msg = "Exception while closing stream quietly";
+				} else {
+					msg = String.format("Failed to close %s", f);
+				}
+				log.dump(troublemaker == null ? getClass() : troublemaker, Severity.Warn, ex, msg);
+			}
+		}
+	}
+}
--- a/src/org/tmatesoft/hg/internal/FilterDataAccess.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FilterDataAccess.java	Wed Jul 10 11:48:55 2013 +0200
@@ -45,6 +45,7 @@
 
 	@Override
 	public FilterDataAccess reset() throws IOException {
+		dataAccess.reset();
 		count = length;
 		return this;
 	}
--- a/src/org/tmatesoft/hg/internal/InflaterDataAccess.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/InflaterDataAccess.java	Wed Jul 10 11:48:55 2013 +0200
@@ -39,22 +39,22 @@
 	private int decompressedLength;
 
 	public InflaterDataAccess(DataAccess dataAccess, long offset, int compressedLength) {
-		this(dataAccess, offset, compressedLength, -1, new Inflater(), new byte[512]);
+		this(dataAccess, offset, compressedLength, -1, new Inflater(), new byte[512], null);
 	}
 
 	public InflaterDataAccess(DataAccess dataAccess, long offset, int compressedLength, int actualLength) {
-		this(dataAccess, offset, compressedLength, actualLength, new Inflater(), new byte[512]);
+		this(dataAccess, offset, compressedLength, actualLength, new Inflater(), new byte[512], null);
 	}
 
-	public InflaterDataAccess(DataAccess dataAccess, long offset, int compressedLength, int actualLength, Inflater inflater, byte[] buf) {
+	public InflaterDataAccess(DataAccess dataAccess, long offset, int compressedLength, int actualLength, Inflater inflater, byte[] inBuf, ByteBuffer outBuf) {
 		super(dataAccess, offset, compressedLength);
-		if (inflater == null || buf == null) {
+		if (inflater == null || inBuf == null) {
 			throw new IllegalArgumentException();
 		}
 		this.inflater = inflater;
 		this.decompressedLength = actualLength;
-		inBuffer = buf;
-		outBuffer = ByteBuffer.allocate(inBuffer.length * 2);
+		inBuffer = inBuf;
+		outBuffer = outBuf == null ? ByteBuffer.allocate(inBuffer.length * 2) : outBuf;
 		outBuffer.limit(0); // there's nothing to read in the buffer 
 	}
 	
--- a/src/org/tmatesoft/hg/internal/IntMap.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/IntMap.java	Wed Jul 10 11:48:55 2013 +0200
@@ -17,6 +17,7 @@
 package org.tmatesoft.hg.internal;
 
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -136,7 +137,6 @@
 	/**
 	 * Forget first N entries (in natural order) in the map.
 	 */
-	@Experimental
 	public void removeFromStart(int count) {
 		if (count > 0 && count <= size) {
 			if (count < size) {
@@ -217,6 +217,13 @@
 		}
 		return map;
 	}
+	
+	public Collection<V> values() {
+		@SuppressWarnings("unchecked")
+		V[] rv = (V[]) new Object[size];
+		System.arraycopy(values, 0, rv, 0, size);
+		return Arrays.<V>asList(rv);
+	}
 
 	// copy of Arrays.binarySearch, with upper search limit as argument
 	private static int binarySearch(int[] a, int high, int key) {
--- a/src/org/tmatesoft/hg/internal/IntVector.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/IntVector.java	Wed Jul 10 11:48:55 2013 +0200
@@ -130,7 +130,6 @@
 	/**
 	 * Use only when this instance won't be used any longer
 	 */
-	@Experimental
 	int[] toArray(boolean internalIfSizeMatchCapacity) {
 		if (count == data.length) {
 			return data;
--- a/src/org/tmatesoft/hg/internal/Internals.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/Internals.java	Wed Jul 10 11:48:55 2013 +0200
@@ -19,7 +19,6 @@
 import static org.tmatesoft.hg.util.LogFacility.Severity.Error;
 
 import java.io.File;
-import java.io.IOException;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -29,6 +28,7 @@
 import java.util.List;
 import java.util.StringTokenizer;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgInternals;
@@ -37,6 +37,8 @@
 import org.tmatesoft.hg.repo.HgRepositoryFiles;
 import org.tmatesoft.hg.repo.HgRepositoryLock;
 import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.LogFacility;
+import org.tmatesoft.hg.util.Path;
 import org.tmatesoft.hg.util.PathRewrite;
 
 /**
@@ -115,25 +117,32 @@
 	private final HgRepository repo;
 	private final File repoDir;
 	private final boolean isCaseSensitiveFileSystem;
-	private final boolean shallCacheRevlogsInRepo;
 	private final DataAccessProvider dataAccess;
+	private final ImplAccess implAccess;
 	
 	private final int requiresFlags;
 
 	private final PathRewrite dataPathHelper; // access to file storage area (usually under .hg/store/data/), with filenames mangled  
 	private final PathRewrite repoPathHelper; // access to system files (under .hg/store if requires has 'store' flag)
 
-	public Internals(HgRepository hgRepo, File hgDir) throws HgRuntimeException {
+	private final boolean shallMergePatches;
+	private final RevlogStreamFactory streamProvider;
+
+	public Internals(HgRepository hgRepo, File hgDir, ImplAccess implementationAccess) throws HgRuntimeException {
 		repo = hgRepo;
 		repoDir = hgDir;
+		implAccess = implementationAccess;
 		isCaseSensitiveFileSystem = !runningOnWindows();
 		SessionContext ctx = repo.getSessionContext();
-		shallCacheRevlogsInRepo = new PropertyMarshal(ctx).getBoolean(CFG_PROPERTY_REVLOG_STREAM_CACHE, true);
 		dataAccess = new DataAccessProvider(ctx);
 		RepoInitializer repoInit = new RepoInitializer().initRequiresFromFile(repoDir);
 		requiresFlags = repoInit.getRequires();
 		dataPathHelper = repoInit.buildDataFilesHelper(getSessionContext());
 		repoPathHelper = repoInit.buildStoreFilesHelper();
+		final PropertyMarshal pm = new PropertyMarshal(ctx);
+		boolean shallCacheRevlogsInRepo = pm.getBoolean(CFG_PROPERTY_REVLOG_STREAM_CACHE, true);
+		streamProvider = new RevlogStreamFactory(this, shallCacheRevlogsInRepo); 
+		shallMergePatches = pm.getBoolean(Internals.CFG_PROPERTY_PATCH_MERGE, true);
 	}
 	
 	public boolean isInvalid() {
@@ -141,12 +150,16 @@
 	}
 	
 	public File getRepositoryFile(HgRepositoryFiles f) {
-		return f.residesUnderRepositoryRoot() ? getFileFromRepoDir(f.getName()) : getFileFromDataDir(f.getName());
+		switch (f.getHome()) {
+			case Store : return getFileFromStoreDir(f.getName());
+			case Repo : return getFileFromRepoDir(f.getName());
+			default : return new File(repo.getWorkingDir(), f.getName());
+		}
 	}
 
 	/**
 	 * Access files under ".hg/".
-	 * File not necessarily exists, this method is merely a factory for Files at specific, configuration-dependent location. 
+	 * File not necessarily exists, this method is merely a factory for {@link File files} at specific, configuration-dependent location. 
 	 * 
 	 * @param name shall be normalized path
 	 */
@@ -180,6 +193,10 @@
 		return repo.getSessionContext();
 	}
 	
+	public LogFacility getLog() {
+		return getSessionContext().getLog();
+	}
+	
 	public HgRepository getRepo() {
 		return repo;
 	}
@@ -260,6 +277,16 @@
 		return requiresFlags;
 	}
 	
+	boolean shallMergePatches() {
+		return shallMergePatches;
+	}
+
+	RevlogChangeMonitor getRevlogTracker(File f) {
+		// TODO decide whether to use one monitor per multiple files or 
+		// an instance per file; and let SessionContext pass alternative implementation)
+		return new RevlogChangeMonitor(f);
+	}
+	
 	public static boolean runningOnWindows() {
 		return System.getProperty("os.name").indexOf("Windows") != -1;
 	}
@@ -314,10 +341,9 @@
 	
 	/**
 	 * User-specific configuration, from system-wide and user home locations, without any repository-specific data.
-	 * 
 	 * @see http://www.selenic.com/mercurial/hgrc.5.html
 	 */
-	public static ConfigFile readConfiguration(SessionContext sessionCtx) throws IOException {
+	public static ConfigFile readConfiguration(SessionContext sessionCtx) throws HgIOException {
 		ConfigFile configFile = new ConfigFile(sessionCtx);
 		File hgInstallRoot = findHgInstallRoot(sessionCtx); // may be null
 		//
@@ -363,7 +389,7 @@
 	 * Repository-specific configuration
 	 * @see http://www.selenic.com/mercurial/hgrc.5.html
 	 */
-	public ConfigFile readConfiguration() throws IOException {
+	public ConfigFile readConfiguration() throws HgIOException {
 		ConfigFile configFile = readConfiguration(repo.getSessionContext());
 		// last one, overrides anything else
 		// <repo>/.hg/hgrc
@@ -371,6 +397,9 @@
 		return configFile;
 	}
 
+	/*package-local*/ImplAccess getImplAccess() {
+		return implAccess;
+	}
 	
 	private static List<File> getWindowsConfigFilesPerInstall(File hgInstallDir) {
 		File f = new File(hgInstallDir, "Mercurial.ini");
@@ -381,7 +410,7 @@
 		if (f.canRead() && f.isDirectory()) {
 			return listConfigFiles(f);
 		}
-		// TODO post-1.0 query registry, e.g. with
+		// TODO [post-1.1] query registry, e.g. with
 		// Runtime.exec("reg query HKLM\Software\Mercurial")
 		//
 		f = new File("C:\\Mercurial\\Mercurial.ini");
@@ -454,11 +483,21 @@
 		// fallback to default, let calling code fail with Exception if can't write
 		return new File(System.getProperty("user.home"), ".hgrc");
 	}
+	
+	public RevlogStream createManifestStream() {
+		File manifestFile = getFileFromStoreDir("00manifest.i");
+		return streamProvider.create(manifestFile);
+	}
 
-	public boolean shallCacheRevlogs() {
-		return shallCacheRevlogsInRepo;
+	public RevlogStream createChangelogStream() {
+		File chlogFile = getFileFromStoreDir("00changelog.i");
+		return streamProvider.create(chlogFile);
 	}
-	
+
+	public RevlogStream resolveStoreFile(Path path) {
+		return streamProvider.getStoreFile(path, false);
+	}
+
 	// marker method
 	public static IllegalStateException notImplemented() {
 		return new IllegalStateException("Not implemented");
@@ -496,4 +535,11 @@
 		assert ((long) i) == l : "Loss of data!";
 		return i;
 	}
+
+	// access implementation details (fields, methods) of oth.repo package
+	public interface ImplAccess {
+		public RevlogStream getStream(HgDataFile df);
+		public RevlogStream getManifestStream();
+		public RevlogStream getChangelogStream();
+	}
 }
--- a/src/org/tmatesoft/hg/internal/LineReader.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/LineReader.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,8 +16,6 @@
  */
 package org.tmatesoft.hg.internal;
 
-import static org.tmatesoft.hg.util.LogFacility.Severity.Warn;
-
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
@@ -28,8 +26,7 @@
 import java.nio.charset.Charset;
 import java.util.Collection;
 
-import org.tmatesoft.hg.repo.HgInvalidFileException;
-import org.tmatesoft.hg.repo.ext.MqManager;
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.util.LogFacility;
 
 /**
@@ -98,7 +95,14 @@
 			return this;
 		}
 
-		public <T> void read(LineConsumer<T> consumer, T paramObj) throws HgInvalidFileException {
+		/**
+		 * 
+		 * @param consumer where to pipe read lines to
+		 * @param paramObj parameterizes consumer
+		 * @return paramObj value for convenience
+		 * @throws HgIOException if there's {@link IOException} while reading file
+		 */
+		public <T> T read(LineConsumer<T> consumer, T paramObj) throws HgIOException {
 			BufferedReader statusFileReader = null;
 			try {
 //				consumer.begin(file, paramObj);
@@ -122,20 +126,15 @@
 						ok = consumer.consume(line, paramObj);
 					}
 				}
+				return paramObj;
 			} catch (IOException ex) {
-				throw new HgInvalidFileException(ex.getMessage(), ex, file);
+				throw new HgIOException(ex.getMessage(), ex, file);
 			} finally {
-				if (statusFileReader != null) {
-					try {
-						statusFileReader.close();
-					} catch (IOException ex) {
-						log.dump(MqManager.class, Warn, ex, null);
-					}
-				}
+				new FileUtils(log, this).closeQuietly(statusFileReader);
 //				try {
 //					consumer.end(file, paramObj);
 //				} catch (IOException ex) {
-//					log.warn(MqManager.class, ex, null);
+//					log.warn(getClass(), ex, null);
 //				}
 			}
 		}
--- a/src/org/tmatesoft/hg/internal/ManifestEntryBuilder.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ManifestEntryBuilder.java	Wed Jul 10 11:48:55 2013 +0200
@@ -18,7 +18,9 @@
 
 import java.io.ByteArrayOutputStream;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
 
 /**
  * Create binary manifest entry ready to write down into 00manifest.i
@@ -36,16 +38,20 @@
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class ManifestEntryBuilder {
-	private ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+public class ManifestEntryBuilder implements DataSource {
+	private final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+	private final EncodingHelper encHelper;
 
+	public ManifestEntryBuilder(EncodingHelper encodingHelper) {
+		encHelper = encodingHelper;
+	}
 	
 	public ManifestEntryBuilder reset() {
 		buffer.reset();
 		return this;
 	}
 	public ManifestEntryBuilder add(String fname, Nodeid revision) {
-		byte[] b = fname.getBytes();
+		byte[] b = encHelper.toManifest(fname);
 		buffer.write(b, 0, b.length);
 		buffer.write('\0');
 		b = revision.toString().getBytes();
@@ -58,4 +64,13 @@
 		return buffer.toByteArray();
 	}
 
+	public void serialize(DataSerializer out) throws HgIOException {
+		byte[] r = build();
+		out.write(r, 0 , r.length);
+	}
+
+	public int serializeLength() {
+		return buffer.size();
+	}
+
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/Metadata.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,209 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.util.LogFacility.Severity.Error;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.util.LogFacility;
+
+/**
+ * Container for metadata recorded as part of file revisions
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class Metadata {
+	private static class Record {
+		public final int offset;
+		public final MetadataEntry[] entries;
+		
+		public Record(int off, MetadataEntry[] entr) {
+			offset = off;
+			entries = entr;
+		}
+	}
+	// XXX sparse array needed
+	private final IntMap<Metadata.Record> entries = new IntMap<Metadata.Record>(5);
+	
+	private final Metadata.Record NONE = new Record(-1, null); // don't want statics
+
+	private final LogFacility log;
+
+	public Metadata(SessionContext.Source sessionCtx) {
+		log = sessionCtx.getSessionContext().getLog();
+	}
+	
+	// true when there's metadata for given revision
+	public boolean known(int revision) {
+		Metadata.Record i = entries.get(revision);
+		return i != null && NONE != i;
+	}
+
+	// true when revision has been checked for metadata presence.
+	public boolean checked(int revision) {
+		return entries.containsKey(revision);
+	}
+
+	// true when revision has been checked and found not having any metadata
+	public boolean none(int revision) {
+		Metadata.Record i = entries.get(revision);
+		return i == NONE;
+	}
+
+	// mark revision as having no metadata.
+	void recordNone(int revision) {
+		Metadata.Record i = entries.get(revision);
+		if (i == NONE) {
+			return; // already there
+		} 
+		if (i != null) {
+			throw new HgInvalidStateException(String.format("Trying to override Metadata state for revision %d (known offset: %d)", revision, i));
+		}
+		entries.put(revision, NONE);
+	}
+
+	// since this is internal class, callers are supposed to ensure arg correctness (i.e. ask known() before)
+	public int dataOffset(int revision) {
+		return entries.get(revision).offset;
+	}
+	void add(int revision, int dataOffset, Collection<MetadataEntry> e) {
+		assert !entries.containsKey(revision);
+		entries.put(revision, new Record(dataOffset, e.toArray(new MetadataEntry[e.size()])));
+	}
+	
+	/**
+	 * @return <code>true</code> if metadata has been found
+	 */
+	public boolean tryRead(int revisionNumber, DataAccess data) throws IOException, HgInvalidControlFileException {
+		final int daLength = data.length();
+		if (daLength < 4 || data.readByte() != 1 || data.readByte() != 10) {
+			recordNone(revisionNumber);
+			return false;
+		} else {
+			ArrayList<MetadataEntry> _metadata = new ArrayList<MetadataEntry>();
+			int offset = parseMetadata(data, daLength, _metadata);
+			add(revisionNumber, offset, _metadata);
+			return true;
+		}
+	}
+
+	public String find(int revision, String key) {
+		for (MetadataEntry me : entries.get(revision).entries) {
+			if (me.matchKey(key)) {
+				return me.value();
+			}
+		}
+		return null;
+	}
+
+	private int parseMetadata(DataAccess data, final int daLength, ArrayList<MetadataEntry> _metadata) throws IOException, HgInvalidControlFileException {
+		int lastEntryStart = 2;
+		int lastColon = -1;
+		// XXX in fact, need smth like ByteArrayBuilder, similar to StringBuilder,
+		// which can't be used here because we can't convert bytes to chars as we read them
+		// (there might be multi-byte encoding), and we need to collect all bytes before converting to string 
+		ByteArrayOutputStream bos = new ByteArrayOutputStream();
+		String key = null, value = null;
+		boolean byteOne = false;
+		boolean metadataIsComplete = false;
+		for (int i = 2; i < daLength; i++) {
+			byte b = data.readByte();
+			if (b == '\n') {
+				if (byteOne) { // i.e. \n follows 1
+					lastEntryStart = i+1;
+					metadataIsComplete = true;
+					// XXX is it possible to have here incomplete key/value (i.e. if last pair didn't end with \n)
+					// if yes, need to set metadataIsComplete to true in that case as well
+					break;
+				}
+				if (key == null || lastColon == -1 || i <= lastColon) {
+					log.dump(getClass(), Error, "Missing key in file revision metadata at index %d", i);
+				}
+				value = new String(bos.toByteArray()).trim();
+				bos.reset();
+				_metadata.add(new MetadataEntry(key, value));
+				key = value = null;
+				lastColon = -1;
+				lastEntryStart = i+1;
+				continue;
+			} 
+			// byteOne has to be consumed up to this line, if not yet, consume it
+			if (byteOne) {
+				// insert 1 we've read on previous step into the byte builder
+				bos.write(1);
+				byteOne = false;
+				// fall-through to consume current byte
+			}
+			if (b == (int) ':') {
+				assert value == null;
+				key = new String(bos.toByteArray());
+				bos.reset();
+				lastColon = i;
+			} else if (b == 1) {
+				byteOne = true;
+			} else {
+				bos.write(b);
+			}
+		}
+		// data.isEmpty is not reliable, renamed files of size==0 keep only metadata
+		if (!metadataIsComplete) {
+			// XXX perhaps, worth a testcase (empty file, renamed, read or ask ifCopy
+			throw new HgInvalidControlFileException("Metadata is not closed properly", null, null);
+		}
+		return lastEntryStart;
+	}
+
+	/**
+	 * There may be several entries of metadata per single revision, this class captures single entry
+	 */
+	private static class MetadataEntry {
+		private final String entry;
+		private final int valueStart;
+
+		// key may be null
+		/* package-local */MetadataEntry(String key, String value) {
+			if (key == null) {
+				entry = value;
+				valueStart = -1; // not 0 to tell between key == null and key == ""
+			} else {
+				entry = key + value;
+				valueStart = key.length();
+			}
+		}
+
+		/* package-local */boolean matchKey(String key) {
+			return key == null ? valueStart == -1 : key.length() == valueStart && entry.startsWith(key);
+		}
+
+//			uncomment once/if needed
+//			public String key() {
+//				return entry.substring(0, valueStart);
+//			}
+
+		public String value() {
+			return valueStart == -1 ? entry : entry.substring(valueStart);
+		}
+	}
+}
\ No newline at end of file
--- a/src/org/tmatesoft/hg/internal/NewlineFilter.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/NewlineFilter.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -28,7 +28,7 @@
 import java.util.ArrayList;
 import java.util.Map;
 
-import org.tmatesoft.hg.repo.HgInvalidFileException;
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.util.Adaptable;
@@ -314,7 +314,7 @@
 			ConfigFile hgeol = new ConfigFile(hgRepo.getSessionContext());
 			try {
 				hgeol.addLocation(cfgFile);
-			} catch (HgInvalidFileException ex) {
+			} catch (HgIOException ex) {
 				hgRepo.getSessionContext().getLog().dump(getClass(), Warn, ex, null);
 			}
 			nativeRepoFormat = hgeol.getSection("repository").get("native");
--- a/src/org/tmatesoft/hg/internal/Patch.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/Patch.java	Wed Jul 10 11:48:55 2013 +0200
@@ -20,6 +20,8 @@
 import java.util.ArrayList;
 import java.util.Formatter;
 
+import org.tmatesoft.hg.core.HgIOException;
+
 /**
  * @see http://mercurial.selenic.com/wiki/BundleFormat
  * in Changelog group description
@@ -177,7 +179,7 @@
 		return prefix + totalDataLen;
 	}
 	
-	/*package-local*/ void serialize(DataSerializer out) throws IOException {
+	/*package-local*/ void serialize(DataSerializer out) throws HgIOException {
 		for (int i = 0, x = data.size(); i < x; i++) {
 			final int start = starts.get(i);
 			final int end = ends.get(i);
@@ -462,7 +464,7 @@
 
 	public class PatchDataSource implements DataSerializer.DataSource {
 
-		public void serialize(DataSerializer out) throws IOException {
+		public void serialize(DataSerializer out) throws HgIOException {
 			Patch.this.serialize(out);
 		}
 
--- a/src/org/tmatesoft/hg/internal/PhasesHelper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/PhasesHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -18,25 +18,29 @@
 
 import static org.tmatesoft.hg.repo.HgPhase.Draft;
 import static org.tmatesoft.hg.repo.HgPhase.Secret;
-import static org.tmatesoft.hg.util.LogFacility.Severity.Info;
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.Phaseroots;
 import static org.tmatesoft.hg.util.LogFacility.Severity.Warn;
 
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
+import java.io.FileWriter;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 
 import org.tmatesoft.hg.core.HgChangeset;
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgInvalidStateException;
 import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.repo.HgPhase;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 
 /**
  * Support to deal with Mercurial phases feature (as of Mercurial version 2.1)
@@ -69,21 +73,37 @@
 		return repo.getRepo();
 	}
 
-	public boolean isCapableOfPhases() throws HgInvalidControlFileException {
+	public boolean isCapableOfPhases() throws HgRuntimeException {
 		if (null == repoSupporsPhases) {
 			repoSupporsPhases = readRoots();
 		}
 		return repoSupporsPhases.booleanValue();
 	}
+	
+	public boolean withSecretRoots() {
+		return !secretPhaseRoots.isEmpty();
+	}
 
-
-	public HgPhase getPhase(HgChangeset cset) throws HgInvalidControlFileException {
+	/**
+	 * @param cset revision to query
+	 * @return phase of the changeset, never <code>null</code>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
+	 */
+	public HgPhase getPhase(HgChangeset cset) throws HgRuntimeException {
 		final Nodeid csetRev = cset.getNodeid();
 		final int csetRevIndex = cset.getRevisionIndex();
 		return getPhase(csetRevIndex, csetRev);
 	}
 
-	public HgPhase getPhase(final int csetRevIndex, Nodeid csetRev) throws HgInvalidControlFileException {
+	/**
+	 * @param csetRevIndex revision index to query
+	 * @param csetRev revision nodeid, optional 
+	 * @return phase of the changeset, never <code>null</code>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
+	 */
+	public HgPhase getPhase(final int csetRevIndex, Nodeid csetRev) throws HgRuntimeException {
 		if (!isCapableOfPhases()) {
 			return HgPhase.Undefined;
 		}
@@ -116,21 +136,104 @@
 			}
 		}
 		return HgPhase.Public;
-
 	}
 
-	private Boolean readRoots() throws HgInvalidControlFileException {
-		File phaseroots = repo.getFileFromStoreDir("phaseroots");
-		BufferedReader br = null;
+
+	/**
+	 * @return all revisions with secret phase
+	 */
+	public RevisionSet allSecret() {
+		return allOf(HgPhase.Secret);
+	}
+	
+	/**
+	 * @return all revisions with draft phase
+	 */
+	public RevisionSet allDraft() {
+		return allOf(HgPhase.Draft).subtract(allOf(HgPhase.Secret));
+	}
+	
+	public void updateRoots(Collection<Nodeid> draftRoots, Collection<Nodeid> secretRoots) throws HgInvalidControlFileException {
+		draftPhaseRoots = draftRoots.isEmpty() ? Collections.<Nodeid>emptyList() : new ArrayList<Nodeid>(draftRoots);
+		secretPhaseRoots = secretRoots.isEmpty() ? Collections.<Nodeid>emptyList() : new ArrayList<Nodeid>(secretRoots);
+		String fmt = "%d %s\n";
+		File phaseroots = repo.getRepositoryFile(Phaseroots);
+		FileWriter fw = null;
+		try {
+			fw = new FileWriter(phaseroots);
+			for (Nodeid n : secretPhaseRoots) {
+				fw.write(String.format(fmt, HgPhase.Secret.mercurialOrdinal(), n.toString()));
+			}
+			for (Nodeid n : draftPhaseRoots) {
+				fw.write(String.format(fmt, HgPhase.Draft.mercurialOrdinal(), n.toString()));
+			}
+			fw.flush();
+		} catch (IOException ex) {
+			throw new HgInvalidControlFileException(ex.getMessage(), ex, phaseroots);
+		} finally {
+			new FileUtils(repo.getLog(), this).closeQuietly(fw);
+		}
+	}
+
+	public void newCommitNode(Nodeid newChangeset, HgPhase newCommitPhase) throws HgRuntimeException {
+		final int riCset = repo.getRepo().getChangelog().getRevisionIndex(newChangeset);
+		HgPhase ph = getPhase(riCset, newChangeset);
+		if (ph.compareTo(newCommitPhase) >= 0) {
+			// present phase is more secret than the desired one
+			return;
+		}
+		// newCommitPhase can't be public here, condition above would be satisfied
+		assert newCommitPhase != HgPhase.Public;
+		// ph is e.g public when newCommitPhase is draft
+		// or is draft when desired phase is secret
+		final RevisionSet rs = allOf(newCommitPhase).union(new RevisionSet(Collections.singleton(newChangeset)));
+		final RevisionSet newRoots;
+		if (parentHelper != null) {
+			newRoots = rs.roots(parentHelper);
+		} else {
+			newRoots = rs.roots(repo.getRepo());
+		}
+		if (newCommitPhase == HgPhase.Draft) {
+			updateRoots(newRoots.asList(), secretPhaseRoots);
+		} else if (newCommitPhase == HgPhase.Secret) {
+			updateRoots(draftPhaseRoots, newRoots.asList());
+		} else {
+			throw new HgInvalidStateException(String.format("Unexpected phase %s for new commits", newCommitPhase));
+		}
+	}
+
+	/**
+	 * For a given phase, collect all revisions with phase that is the same or more private (i.e. for Draft, returns Draft+Secret)
+	 * The reason is not a nice API intention (which is awful, indeed), but an ease of implementation 
+	 */
+	private RevisionSet allOf(HgPhase phase) {
+		assert phase != HgPhase.Public;
+		if (!isCapableOfPhases()) {
+			return new RevisionSet(Collections.<Nodeid>emptyList());
+		}
+		final List<Nodeid> roots = getPhaseRoots(phase);
+		if (parentHelper != null) {
+			return new RevisionSet(roots).union(new RevisionSet(parentHelper.childrenOf(roots)));
+		} else {
+			RevisionSet rv = new RevisionSet(Collections.<Nodeid>emptyList());
+			for (RevisionDescendants rd : getPhaseDescendants(phase)) {
+				rv = rv.union(rd.asRevisionSet());
+			}
+			return rv;
+		}
+	}
+
+	private Boolean readRoots() throws HgRuntimeException {
+		File phaseroots = repo.getRepositoryFile(Phaseroots);
 		try {
 			if (!phaseroots.exists()) {
 				return Boolean.FALSE;
 			}
+			LineReader lr = new LineReader(phaseroots, repo.getLog());
+			final Collection<String> lines = lr.read(new LineReader.SimpleLineCollector(), new LinkedList<String>());
 			HashMap<HgPhase, List<Nodeid>> phase2roots = new HashMap<HgPhase, List<Nodeid>>();
-			br = new BufferedReader(new FileReader(phaseroots));
-			String line;
-			while ((line = br.readLine()) != null) {
-				String[] lc = line.trim().split("\\s+");
+			for (String line : lines) {
+				String[] lc = line.split("\\s+");
 				if (lc.length == 0) {
 					continue;
 				}
@@ -153,17 +256,8 @@
 			}
 			draftPhaseRoots = phase2roots.containsKey(Draft) ? phase2roots.get(Draft) : Collections.<Nodeid>emptyList();
 			secretPhaseRoots = phase2roots.containsKey(Secret) ? phase2roots.get(Secret) : Collections.<Nodeid>emptyList();
-		} catch (IOException ex) {
-			throw new HgInvalidControlFileException(ex.toString(), ex, phaseroots);
-		} finally {
-			if (br != null) {
-				try {
-					br.close();
-				} catch (IOException ex) {
-					repo.getSessionContext().getLog().dump(getClass(), Info, ex, null);
-					// ignore the exception otherwise 
-				}
-			}
+		} catch (HgIOException ex) {
+			throw new HgInvalidControlFileException(ex, true);
 		}
 		return Boolean.TRUE;
 	}
@@ -177,7 +271,7 @@
 	}
 
 
-	private RevisionDescendants[] getPhaseDescendants(HgPhase phase) throws HgInvalidControlFileException {
+	private RevisionDescendants[] getPhaseDescendants(HgPhase phase) throws HgRuntimeException {
 		int ordinal = phase.ordinal();
 		if (phaseDescendants[ordinal] == null) {
 			phaseDescendants[ordinal] = buildPhaseDescendants(phase);
@@ -185,7 +279,7 @@
 		return phaseDescendants[ordinal];
 	}
 
-	private RevisionDescendants[] buildPhaseDescendants(HgPhase phase) throws HgInvalidControlFileException {
+	private RevisionDescendants[] buildPhaseDescendants(HgPhase phase) throws HgRuntimeException {
 		int[] roots = toIndexes(getPhaseRoots(phase));
 		RevisionDescendants[] rv = new RevisionDescendants[roots.length];
 		for (int i = 0; i < roots.length; i++) {
@@ -195,7 +289,7 @@
 		return rv;
 	}
 	
-	private int[] toIndexes(List<Nodeid> roots) throws HgInvalidControlFileException {
+	private int[] toIndexes(List<Nodeid> roots) throws HgRuntimeException {
 		int[] rv = new int[roots.size()];
 		for (int i = 0; i < rv.length; i++) {
 			rv[i] = getRepo().getChangelog().getRevisionIndex(roots.get(i));
--- a/src/org/tmatesoft/hg/internal/RepoInitializer.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RepoInitializer.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -23,6 +23,7 @@
 import java.io.IOException;
 import java.nio.charset.Charset;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.util.PathRewrite;
@@ -31,6 +32,7 @@
  * Responsible of `requires` processing both on repo read and repo write
  * XXX needs better name, perhaps
  * 
+ * @see http://mercurial.selenic.com/wiki/RequiresFile
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
@@ -59,23 +61,32 @@
 		return requiresFlags;
 	}
 
-	public void initEmptyRepository(File repoDir) throws IOException {
+	public void initEmptyRepository(File repoDir) throws HgIOException {
 		repoDir.mkdirs();
-		FileOutputStream requiresFile = new FileOutputStream(new File(repoDir, "requires"));
-		StringBuilder sb = new StringBuilder(40);
-		sb.append("revlogv1\n");
-		if ((requiresFlags & STORE) != 0) {
-			sb.append("store\n");
+		final File requiresFile = new File(repoDir, "requires");
+		try {
+			FileOutputStream requiresStream = new FileOutputStream(requiresFile);
+			StringBuilder sb = new StringBuilder(40);
+			if ((requiresFlags & REVLOGV1) != 0) {
+				sb.append("revlogv1\n");
+			}
+			if ((requiresFlags & STORE) != 0) {
+				sb.append("store\n");
+			}
+			if ((requiresFlags & FNCACHE) != 0) {
+				sb.append("fncache\n");
+			}
+			if ((requiresFlags & DOTENCODE) != 0) {
+				sb.append("dotencode\n");
+			}
+			requiresStream.write(sb.toString().getBytes());
+			requiresStream.close();
+		} catch (IOException ex) {
+			throw new HgIOException("Failed to initialize empty repo", ex, requiresFile);
 		}
-		if ((requiresFlags & FNCACHE) != 0) {
-			sb.append("fncache\n");
+		if ((requiresFlags & STORE) != 0) {
+			new File(repoDir, "store").mkdir(); // with that, hg verify says ok.
 		}
-		if ((requiresFlags & DOTENCODE) != 0) {
-			sb.append("dotencode\n");
-		}
-		requiresFile.write(sb.toString().getBytes());
-		requiresFile.close();
-		new File(repoDir, "store").mkdir(); // with that, hg verify says ok.
 	}
 
 	public PathRewrite buildDataFilesHelper(SessionContext ctx) {
--- a/src/org/tmatesoft/hg/internal/RepositoryComparator.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RepositoryComparator.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -33,12 +33,11 @@
 import org.tmatesoft.hg.core.HgRemoteConnectionException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgChangelog;
-import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.repo.HgRemoteRepository;
 import org.tmatesoft.hg.repo.HgRemoteRepository.Range;
 import org.tmatesoft.hg.repo.HgRemoteRepository.RemoteBranch;
-import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.CancelledException;
 import org.tmatesoft.hg.util.ProgressSupport;
@@ -54,6 +53,7 @@
 	private final HgParentChildMap<HgChangelog> localRepo;
 	private final HgRemoteRepository remoteRepo;
 	private List<Nodeid> common;
+	private List<Nodeid> remoteHeads;
 
 	public RepositoryComparator(HgParentChildMap<HgChangelog> pwLocal, HgRemoteRepository hgRemote) {
 		localRepo = pwLocal;
@@ -81,54 +81,43 @@
 		return common;
 	}
 	
+	public List<Nodeid> getRemoteHeads() {
+		assert remoteHeads != null;
+		return remoteHeads;
+	}
+	
 	/**
 	 * @return revisions that are children of common entries, i.e. revisions that are present on the local server and not on remote.
 	 */
 	public List<Nodeid> getLocalOnlyRevisions() {
-		return localRepo.childrenOf(getCommon());
+		final List<Nodeid> c = getCommon();
+		if (c.isEmpty()) {
+			return localRepo.all();
+		} else {
+			final RevisionSet rsCommon = new RevisionSet(c);
+			final RevisionSet localHeads = new RevisionSet(localRepo.heads());
+			final List<Nodeid> commonChildren = localRepo.childrenOf(c);
+			final RevisionSet rsCommonChildren = new RevisionSet(commonChildren);
+			// check if there's any revision in the repository that doesn't trace to common
+			// e.g. branches from one of common ancestors
+			RevisionSet headsNotFromCommon = localHeads.subtract(rsCommonChildren).subtract(rsCommon);
+			if (headsNotFromCommon.isEmpty()) {
+				return commonChildren;
+			}
+			RevisionSet all = new RevisionSet(localRepo.all());
+			// need outgoing := ancestors(missing) - ancestors(common):
+			RevisionSet rsAncestors = all.ancestors(headsNotFromCommon, localRepo);
+			// #ancestors gives only parents, we need terminating children as well
+			rsAncestors = rsAncestors.union(headsNotFromCommon);
+			final RevisionSet rsAncestorsCommon = all.ancestors(rsCommon, localRepo);
+			RevisionSet outgoing = rsAncestors.subtract(rsAncestorsCommon).subtract(rsCommon);
+			// outgoing keeps children that spined off prior to common revisions
+			return outgoing.union(rsCommonChildren).asList();
+		}
 	}
 	
-	/**
-	 * Similar to @link {@link #getLocalOnlyRevisions()}, use this one if you need access to changelog entry content, not 
-	 * only its revision number. 
-	 * @param inspector delegate to analyze changesets, shall not be <code>null</code>
-	 */
-	public void visitLocalOnlyRevisions(HgChangelog.Inspector inspector) throws HgInvalidControlFileException {
-		if (inspector == null) {
-			throw new IllegalArgumentException();
-		}
-		// one can use localRepo.childrenOf(getCommon()) and then iterate over nodeids, but there seems to be
-		// another approach to get all changes after common:
-		// find index of earliest revision, and report all that were later
-		final HgChangelog changelog = localRepo.getRepo().getChangelog();
-		int earliestRevision = Integer.MAX_VALUE;
-		List<Nodeid> commonKnown = getCommon();
-		for (Nodeid n : commonKnown) {
-			if (!localRepo.hasChildren(n)) {
-				// there might be (old) nodes, known both locally and remotely, with no children
-				// hence, we don't need to consider their local revision number
-				continue;
-			}
-			int lr = changelog.getRevisionIndex(n);
-			if (lr < earliestRevision) {
-				earliestRevision = lr;
-			}
-		}
-		if (earliestRevision == Integer.MAX_VALUE) {
-			// either there are no common nodes (known locally and at remote)
-			// or no local children found (local is up to date). In former case, perhaps I shall bit return silently,
-			// but check for possible wrong repo comparison (hs says 'repository is unrelated' if I try to 
-			// check in/out for a repo that has no common nodes.
-			return;
-		}
-		if (earliestRevision < 0 || earliestRevision >= changelog.getLastRevision()) {
-			throw new HgInvalidStateException(String.format("Invalid index of common known revision: %d in total of %d", earliestRevision, 1+changelog.getLastRevision()));
-		}
-		changelog.range(earliestRevision+1, changelog.getLastRevision(), inspector);
-	}
-
 	private List<Nodeid> findCommonWithRemote() throws HgRemoteConnectionException {
-		List<Nodeid> remoteHeads = remoteRepo.heads();
+		remoteHeads = remoteRepo.heads();
 		LinkedList<Nodeid> resultCommon = new LinkedList<Nodeid>(); // these remotes are known in local
 		LinkedList<Nodeid> toQuery = new LinkedList<Nodeid>(); // these need further queries to find common
 		for (Nodeid rh : remoteHeads) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/ReverseIterator.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.ListIterator;
+
+/**
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class ReverseIterator<E> implements Iterator<E> {
+	private final ListIterator<E> listIterator;
+	
+	public ReverseIterator(List<E> list) {
+		listIterator = list.listIterator(list.size());
+	}
+
+	public boolean hasNext() {
+		return listIterator.hasPrevious();
+	}
+	public E next() {
+		return listIterator.previous();
+	}
+	public void remove() {
+		listIterator.remove();
+	}
+
+	public static <T> Iterable<T> reversed(final List<T> list) {
+		return new Iterable<T>() {
+
+			public Iterator<T> iterator() {
+				return new ReverseIterator<T>(list);
+			}
+		};
+	}
+}
\ No newline at end of file
--- a/src/org/tmatesoft/hg/internal/RevisionDescendants.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RevisionDescendants.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -16,13 +16,14 @@
  */
 package org.tmatesoft.hg.internal;
 
+import java.util.ArrayList;
 import java.util.BitSet;
 
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgChangelog;
-import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 
 /**
  * Represent indicators which revisions are descendants of the supplied root revision
@@ -37,9 +38,10 @@
 	private final int rootRevIndex;
 	private final int tipRevIndex; // this is the last revision we cache to
 	private final BitSet descendants;
+	private RevisionSet revset;
 
 	// in fact, may be refactored to deal not only with changelog, but any revlog (not sure what would be the usecase, though)
-	public RevisionDescendants(HgRepository hgRepo, int revisionIndex) {
+	public RevisionDescendants(HgRepository hgRepo, int revisionIndex) throws HgRuntimeException {
 		repo = hgRepo;
 		rootRevIndex = revisionIndex;
 		// even if tip moves, we still answer correctly for those isCandidate()
@@ -51,7 +53,7 @@
 		descendants = new BitSet(tipRevIndex - rootRevIndex + 1);
 	}
 	
-	public void build() throws HgInvalidControlFileException {
+	public void build() throws HgRuntimeException {
 		final BitSet result = descendants;
 		result.set(0);
 		if (rootRevIndex == tipRevIndex) {
@@ -108,4 +110,21 @@
 		assert ix < descendants.size();
 		return descendants.get(ix);
 	}
+
+	public RevisionSet asRevisionSet() {
+		if (revset == null) {
+			final ArrayList<Nodeid> revisions = new ArrayList<Nodeid>(descendants.cardinality());
+			repo.getChangelog().indexWalk(rootRevIndex, tipRevIndex, new HgChangelog.RevisionInspector() {
+
+				public void next(int revisionIndex, Nodeid revision, int linkedRevisionIndex) throws HgRuntimeException {
+					if (isDescendant(revisionIndex)) {
+						revisions.add(revision);
+					}
+				}
+			});
+			assert revisions.size() == descendants.cardinality();
+			revset = new RevisionSet(revisions);
+		}
+		return revset;
+	}
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/RevisionLookup.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION;
+
+import java.util.Arrays;
+
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgInvalidRevisionException;
+import org.tmatesoft.hg.repo.HgRevisionMap;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+
+/**
+ * Lite alternative to {@link HgRevisionMap}, to speed up nodeid to index conversion without consuming too much memory.
+ * E.g. for a 100k revisions, {@link HgRevisionMap} consumes 3 * (N * sizeof(int)) for indexes plus 48 bytes per 
+ * Nodeid instance, total (12+48)*N = 6 mb of memory. {RevisionLookup} instead keeps only Nodeid hashes, (N * sizeof(int) = 400 kb),
+ * but is slower in lookup, O(N/2) to find potential match plus disk read operatin (or few, in an unlikely case of hash collisions).
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class RevisionLookup implements RevlogStream.Inspector {
+	
+	private final RevlogStream content;
+	private int[] nodeidHashes;
+
+	public RevisionLookup(RevlogStream stream) {
+		assert stream != null;
+		content = stream;
+	}
+	
+	public static RevisionLookup createFor(RevlogStream stream) throws HgRuntimeException {
+		RevisionLookup rv = new RevisionLookup(stream);
+		int revCount = stream.revisionCount();
+		rv.prepare(revCount);
+		if (revCount > 0) {
+			stream.iterate(0, revCount - 1, false, rv);
+		}
+		return rv;
+	}
+
+	public void prepare(int count) {
+		nodeidHashes = new int[count];
+		Arrays.fill(nodeidHashes, BAD_REVISION);
+	}
+	public void next(int index, byte[] nodeid) {
+		nodeidHashes[index] = Nodeid.hashCode(nodeid);
+	}
+	public void next(int index, Nodeid nodeid) {
+		nodeidHashes[index] = nodeid.hashCode();
+	}
+	public int findIndex(Nodeid nodeid) throws HgInvalidControlFileException, HgInvalidRevisionException {
+		final int hash = nodeid.hashCode();
+		for (int i = 0; i < nodeidHashes.length; i++) {
+			if (nodeidHashes[i] == hash) {
+				byte[] nodeidAtI = content.nodeid(i);
+				if (nodeid.equalsTo(nodeidAtI)) {
+					return i;
+				}
+			}
+			// else: false match (only 4 head bytes matched, continue loop
+		}
+		return BAD_REVISION;
+	}
+
+	public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
+		next(revisionIndex, nodeid);
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/RevisionSet.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,271 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgParentChildMap;
+import org.tmatesoft.hg.repo.HgRepository;
+
+/**
+ * Unmodifiable collection of revisions with handy set operations
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class RevisionSet implements Iterable<Nodeid> {
+	
+	private final Set<Nodeid> elements;
+	
+	public RevisionSet(Nodeid... revisions) {
+		this(revisions == null ? null : Arrays.asList(revisions));
+	}
+	
+	public RevisionSet(Collection<Nodeid> revisions) {
+		this(revisions == null ? new HashSet<Nodeid>() : new HashSet<Nodeid>(revisions));
+	}
+	
+	private RevisionSet(HashSet<Nodeid> revisions) {
+		if (revisions.isEmpty()) {
+			elements = Collections.<Nodeid>emptySet();
+		} else {
+			elements = revisions;
+		}
+	}
+
+	/**
+	 * elements of the set with no parents or parents not from the same set 
+	 */
+	public RevisionSet roots(HgParentChildMap<HgChangelog> ph) {
+		HashSet<Nodeid> copy = new HashSet<Nodeid>(elements);
+		for (Nodeid n : elements) {
+			assert ph.knownNode(n);
+			Nodeid p1 = ph.firstParent(n);
+			if (p1 != null && elements.contains(p1)) {
+				copy.remove(n);
+				continue;
+			}
+			Nodeid p2 = ph.secondParent(n);
+			if (p2 != null && elements.contains(p2)) {
+				copy.remove(n);
+				continue;
+			}
+		}
+		return copy.size() == elements.size() ? this : new RevisionSet(copy);
+	}
+	
+	/**
+	 * Same as {@link #roots(HgParentChildMap)}, but doesn't require a parent-child map
+	 */
+	public RevisionSet roots(HgRepository repo) {
+		// TODO introduce parent access interface, use it here, provide implementations 
+		// that delegate to HgParentChildMap or HgRepository
+		HashSet<Nodeid> copy = new HashSet<Nodeid>(elements);
+		final HgChangelog clog = repo.getChangelog();
+		byte[] parent1 = new byte[Nodeid.SIZE], parent2 = new byte[Nodeid.SIZE];
+		int[] parentRevs = new int[2];
+		for (Nodeid n : elements) {
+			assert clog.isKnown(n);
+			clog.parents(clog.getRevisionIndex(n), parentRevs, parent1, parent2);
+			if (parentRevs[0] != NO_REVISION && elements.contains(new Nodeid(parent1, false))) {
+				copy.remove(n);
+				continue;
+			}
+			if (parentRevs[1] != NO_REVISION && elements.contains(new Nodeid(parent2, false))) {
+				copy.remove(n);
+				continue;
+			}
+		}
+		return copy.size() == elements.size() ? this : new RevisionSet(copy);
+	}
+	
+	/**
+	 * elements of the set that has no children in this set 
+	 */
+	public RevisionSet heads(HgParentChildMap<HgChangelog> ph) {
+		HashSet<Nodeid> copy = new HashSet<Nodeid>(elements);
+		// can't do copy.removeAll(ph.childrenOf(asList())); as actual heads are indeed children of some other node
+		for (Nodeid n : elements) {
+			assert ph.knownNode(n);
+			Nodeid p1 = ph.firstParent(n);
+			Nodeid p2 = ph.secondParent(n);
+			if (p1 != null && elements.contains(p1)) {
+				copy.remove(p1);
+			}
+			if (p2 != null && elements.contains(p2)) {
+				copy.remove(p2);
+			}
+		}
+		return copy.size() == elements.size() ? this : new RevisionSet(copy);
+	}
+
+	/**
+	 * Any ancestor of an element from the supplied child set found in this one. 
+	 * Elements of the supplied child set are not part of return value.  
+	 */
+	public RevisionSet ancestors(RevisionSet children, HgParentChildMap<HgChangelog> parentHelper) {
+		if (isEmpty()) {
+			return this;
+		}
+		if (children.isEmpty()) {
+			return children;
+		}
+		RevisionSet chRoots = children.roots(parentHelper);
+		HashSet<Nodeid> ancestors = new HashSet<Nodeid>();
+		Set<Nodeid> childrenToCheck = chRoots.elements;
+		while (!childrenToCheck.isEmpty()) {
+			HashSet<Nodeid> nextRound = new HashSet<Nodeid>();
+			for (Nodeid n : childrenToCheck) {
+				Nodeid p1 = parentHelper.firstParent(n);
+				Nodeid p2 = parentHelper.secondParent(n);
+				if (p1 != null && elements.contains(p1)) {
+					nextRound.add(p1);
+				}
+				if (p2 != null && elements.contains(p2)) {
+					nextRound.add(p2);
+				}
+			}
+			ancestors.addAll(nextRound);
+			childrenToCheck = nextRound;
+		} 
+		return new RevisionSet(ancestors);
+	}
+	
+	/**
+	 * Revisions that are both direct and indirect children of elements of this revision set
+	 * as known in supplied parent-child map
+	 */
+	public RevisionSet children(HgParentChildMap<HgChangelog> parentHelper) {
+		if (isEmpty()) {
+			return this;
+		}
+		List<Nodeid> children = parentHelper.childrenOf(elements);
+		return new RevisionSet(new HashSet<Nodeid>(children));
+	}
+
+	public RevisionSet intersect(RevisionSet other) {
+		if (isEmpty()) {
+			return this;
+		}
+		if (other.isEmpty()) {
+			return other;
+		}
+		HashSet<Nodeid> copy = new HashSet<Nodeid>(elements);
+		copy.retainAll(other.elements);
+		return copy.size() == elements.size() ? this : new RevisionSet(copy);
+	}
+	
+	public RevisionSet subtract(RevisionSet other) {
+		if (isEmpty() || other.isEmpty()) {
+			return this;
+		}
+		HashSet<Nodeid> copy = new HashSet<Nodeid>(elements);
+		copy.removeAll(other.elements);
+		return copy.size() == elements.size() ? this : new RevisionSet(copy);
+	}
+
+	public RevisionSet union(RevisionSet other) {
+		if (isEmpty()) {
+			return other;
+		}
+		if (other.isEmpty()) {
+			return this;
+		}
+		HashSet<Nodeid> copy = new HashSet<Nodeid>(elements);
+		copy.addAll(other.elements);
+		return copy.size() == elements.size() ? this : new RevisionSet(copy);
+	}
+
+	/**
+	 * A ^ B := (A\B).union(B\A)
+	 * A ^ B := A.union(B) \ A.intersect(B)
+	 */
+	public RevisionSet symmetricDifference(RevisionSet other) {
+		if (isEmpty()) {
+			return this;
+		}
+		if (other.isEmpty()) {
+			return other;
+		}
+		HashSet<Nodeid> copyA = new HashSet<Nodeid>(elements);
+		HashSet<Nodeid> copyB = new HashSet<Nodeid>(other.elements);
+		copyA.removeAll(other.elements);
+		copyB.removeAll(elements);
+		copyA.addAll(copyB);
+		return new RevisionSet(copyA);
+	}
+
+	public boolean isEmpty() {
+		return elements.isEmpty();
+	}
+
+	public int size() {
+		return elements.size();
+	}
+
+	public List<Nodeid> asList() {
+		return new ArrayList<Nodeid>(elements);
+	}
+	
+	public Iterator<Nodeid> iterator() {
+		return elements.iterator();
+	}
+	
+	@Override
+	public String toString() {
+		StringBuilder sb = new StringBuilder();
+		sb.append('<');
+		if (!isEmpty()) {
+			sb.append(elements.size());
+			sb.append(':');
+		}
+		for (Nodeid n : elements) {
+			sb.append(n.shortNotation());
+			sb.append(',');
+		}
+		if (sb.length() > 1) {
+			sb.setCharAt(sb.length() - 1, '>');
+		} else {
+			sb.append('>');
+		}
+		return sb.toString();
+	}
+	
+	@Override
+	public boolean equals(Object obj) {
+		if (false == obj instanceof RevisionSet) {
+			return false;
+		}
+		return elements.equals(((RevisionSet) obj).elements);
+	}
+	
+	@Override
+	public int hashCode() {
+		return elements.hashCode();
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/RevlogChangeMonitor.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+import java.util.WeakHashMap;
+
+/**
+ * Detect changes to revlog files. Not a general file change monitoring as we utilize the fact revlogs are append-only (and even in case
+ * of stripped-off tail revisions, with e.g. mq, detection approach is still valid).
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class RevlogChangeMonitor {
+	
+	private final WeakHashMap<File, Long> lastKnownSize;
+	private final WeakHashMap<File, Long> lastKnownTime;
+	private final File soleFile;
+	private long soleFileSize = -1;
+	private long soleFileTime = -1;
+	
+	// use single for multiple files. TODO [1.2] repository/session context shall provide
+	// alternative (configurable) implementations, so that Java7 users may supply better one
+	public RevlogChangeMonitor() {
+		lastKnownSize = new WeakHashMap<File, Long>();
+		lastKnownTime= new WeakHashMap<File, Long>();
+		soleFile = null;
+	}
+	
+	public RevlogChangeMonitor(File f) {
+		assert f != null;
+		lastKnownSize = lastKnownTime = null;
+		soleFile = f;
+	}
+	
+	public void touch(File f) {
+		assert f != null;
+		if (lastKnownSize == null) {
+			assert f == soleFile;
+			soleFileSize = f.length();
+			soleFileTime = f.lastModified();
+		} else {
+			lastKnownSize.put(f, f.length());
+			lastKnownTime.put(f, f.lastModified());
+		}
+	}
+	
+	public boolean hasChanged(File f) {
+		assert f != null;
+		if (lastKnownSize == null) {
+			assert f == soleFile;
+			return soleFileSize != f.length() || soleFileTime != f.lastModified();
+		} else {
+			Long lastSize = lastKnownSize.get(f);
+			Long lastTime = lastKnownTime.get(f);
+			if (lastSize == null || lastTime == null) {
+				return true;
+			}
+			return f.length() != lastSize || f.lastModified() != lastTime;
+		}
+	}
+}
--- a/src/org/tmatesoft/hg/internal/RevlogCompressor.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RevlogCompressor.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,10 +16,11 @@
  */
 package org.tmatesoft.hg.internal;
 
-import java.io.IOException;
 import java.util.zip.Deflater;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.LogFacility.Severity;
 
 /**
@@ -44,7 +45,7 @@
 	}
 	
 	// out stream is not closed!
-	public int writeCompressedData(DataSerializer out) throws IOException {
+	public int writeCompressedData(DataSerializer out) throws HgIOException, HgRuntimeException {
 		zip.reset();
 		DeflaterDataSerializer dds = new DeflaterDataSerializer(out, zip, sourceData.serializeLength());
 		sourceData.serialize(dds);
@@ -52,7 +53,7 @@
 		return zip.getTotalOut();
 	}
 
-	public int getCompressedLength() {
+	public int getCompressedLength() throws HgRuntimeException {
 		if (compressedLen != -1) {
 			return compressedLen;
 		}
@@ -61,7 +62,7 @@
 			compressedLen = writeCompressedData(counter);
 			assert counter.totalWritten == compressedLen;
 	        return compressedLen;
-		} catch (IOException ex) {
+		} catch (HgIOException ex) {
 			// can't happen provided we write to our stream that does nothing but byte counting
 			ctx.getLog().dump(getClass(), Severity.Error, ex, "Failed estimating compressed length of revlog data");
 			return counter.totalWritten; // use best known value so far
@@ -71,15 +72,15 @@
 	private static class Counter extends DataSerializer {
 		public int totalWritten = 0;
 
-		public void writeByte(byte... values) throws IOException {
+		public void writeByte(byte... values) throws HgIOException {
 			totalWritten += values.length;
 		}
 
-		public void writeInt(int... values) throws IOException {
+		public void writeInt(int... values) throws HgIOException {
 			totalWritten += 4 * values.length;
 		}
 
-		public void write(byte[] data, int offset, int length) throws IOException {
+		public void write(byte[] data, int offset, int length) throws HgIOException {
 			totalWritten += length;
 		}
 	}
--- a/src/org/tmatesoft/hg/internal/RevlogStream.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RevlogStream.java	Wed Jul 10 11:48:55 2013 +0200
@@ -17,19 +17,28 @@
 package org.tmatesoft.hg.internal;
 
 import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION;
+import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
 import static org.tmatesoft.hg.repo.HgRepository.TIP;
 import static org.tmatesoft.hg.internal.Internals.REVLOGV1_RECORD_SIZE;
 
 import java.io.File;
 import java.io.IOException;
+import java.lang.ref.Reference;
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.SoftReference;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.zip.Inflater;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgInternals;
 import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgInvalidRevisionException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Adaptable;
 
 
@@ -45,6 +54,8 @@
  */
 public class RevlogStream {
 
+	static final int INLINEDATA = 1 << 16;
+
 	/*
 	 * makes sense for index with inline data only - actual offset of the record in the .i file (record entry + revision * record size))
 	 * 
@@ -58,30 +69,54 @@
 	private boolean inline = false;
 	private final File indexFile;
 	private File dataFile;
-	private final DataAccessProvider dataAccess;
+	private final Internals repo;
+	// keeps last complete revision we've read. Note, this cached revision doesn't help
+	// for subsequent #iterate() calls with the same revision (Inspector needs more data than 
+	// we currently cache here, perhaps, we shall cache everything it wants to cover same 
+	// revision case as well). Now this helps when second #iterate() call is for a revision greater
+	// than one from the first call, and both revisions got same base rev. It's often the case when
+	// parents/children are analyzed.
+	private SoftReference<CachedRevision> lastRevisionRead;
+	private final ReferenceQueue<CachedRevision> lastRevisionQueue = new ReferenceQueue<CachedRevision>();
+	//
+	private final RevlogChangeMonitor changeTracker;
+	private List<Observer> observers;
+	private boolean shallDropDerivedCaches = false;
 
-	// if we need anything else from HgRepo, might replace DAP parameter with HgRepo and query it for DAP.
-	public RevlogStream(DataAccessProvider dap, File indexFile) {
-		this.dataAccess = dap;
+	public RevlogStream(Internals hgRepo, File indexFile) {
+		repo = hgRepo;
 		this.indexFile = indexFile;
+		changeTracker = repo.getRevlogTracker(indexFile);
+	}
+	
+	public boolean exists() {
+		return indexFile.exists();
 	}
 
-	/*package*/ DataAccess getIndexStream() {
-		// FIXME [1.1] must supply a hint that I'll need really few bytes of data (perhaps, at some offset) 
-		// to avoid mmap files when only few bytes are to be read (i.e. #dataLength())
-		return dataAccess.createReader(indexFile);
+	/**
+	 * @param shortRead pass <code>true</code> to indicate intention to read few revisions only (as opposed to reading most of/complete revlog)
+	 * @return never <code>null</code>, empty {@link DataAccess} if no stream is available
+	 */
+	/*package*/ DataAccess getIndexStream(boolean shortRead) {
+		// shortRead hint helps  to avoid mmap files when only 
+		// few bytes are to be read (i.e. #dataLength())
+		DataAccessProvider dataAccess = repo.getDataAccess();
+		return dataAccess.createReader(indexFile, shortRead);
 	}
 
 	/*package*/ DataAccess getDataStream() {
-		return dataAccess.createReader(getDataFile());
+		DataAccessProvider dataAccess = repo.getDataAccess();
+		return dataAccess.createReader(getDataFile(), false);
 	}
 	
-	/*package*/ DataSerializer getIndexStreamWriter() {
-		return dataAccess.createWriter(indexFile, true);
+	/*package*/ DataSerializer getIndexStreamWriter(Transaction tr) throws HgIOException {
+		DataAccessProvider dataAccess = repo.getDataAccess();
+		return dataAccess.createWriter(tr, indexFile, true);
 	}
 	
-	/*package*/ DataSerializer getDataStreamWriter() {
-		return dataAccess.createWriter(getDataFile(), true);
+	/*package*/ DataSerializer getDataStreamWriter(Transaction tr) throws HgIOException {
+		DataAccessProvider dataAccess = repo.getDataAccess();
+		return dataAccess.createWriter(tr, getDataFile(), true);
 	}
 	
 	/**
@@ -118,12 +153,12 @@
 		return inline ? indexFile.getPath() : getDataFile().getPath();
 	}
 
-	public boolean isInlineData() {
+	public boolean isInlineData() throws HgInvalidControlFileException {
 		initOutline();
 		return inline;
 	}
 	
-	public int revisionCount() {
+	public int revisionCount() throws HgInvalidControlFileException {
 		initOutline();
 		return baseRevisions.length;
 	}
@@ -136,7 +171,7 @@
 		// XXX in fact, use of iterate() instead of this implementation may be quite reasonable.
 		//
 		revisionIndex = checkRevisionIndex(revisionIndex);
-		DataAccess daIndex = getIndexStream();
+		DataAccess daIndex = getIndexStream(true);
 		try {
 			int recordOffset = getIndexOffsetInt(revisionIndex);
 			daIndex.seek(recordOffset + 12); // 6+2+4
@@ -157,7 +192,7 @@
 	 */
 	public byte[] nodeid(int revisionIndex) throws HgInvalidControlFileException, HgInvalidRevisionException {
 		revisionIndex = checkRevisionIndex(revisionIndex);
-		DataAccess daIndex = getIndexStream();
+		DataAccess daIndex = getIndexStream(true);
 		try {
 			int recordOffset = getIndexOffsetInt(revisionIndex);
 			daIndex.seek(recordOffset + 32);
@@ -179,7 +214,7 @@
 	 */
 	public int linkRevision(int revisionIndex) throws HgInvalidControlFileException, HgInvalidRevisionException {
 		revisionIndex = checkRevisionIndex(revisionIndex);
-		DataAccess daIndex = getIndexStream();
+		DataAccess daIndex = getIndexStream(true);
 		try {
 			int recordOffset = getIndexOffsetInt(revisionIndex);
 			daIndex.seek(recordOffset + 20);
@@ -199,11 +234,38 @@
 	 * @throws HgInvalidRevisionException if revisionIndex argument doesn't represent a valid record in the revlog
 	 */
 	public int baseRevision(int revisionIndex) throws HgInvalidControlFileException, HgInvalidRevisionException {
-		initOutline();
 		revisionIndex = checkRevisionIndex(revisionIndex);
 		return getBaseRevision(revisionIndex);
 	}
 	
+	/**
+	 * Read indexes of parent revisions
+	 * @param revisionIndex index of child revision
+	 * @param parents array to hold return value, length >= 2
+	 * @return value of <code>parents</code> parameter for convenience
+	 * @throws HgInvalidControlFileException if attempt to read index file failed
+	 * @throws HgInvalidRevisionException if revisionIndex argument doesn't represent a valid record in the revlog
+	 */
+	public int[] parents(int revisionIndex, int[] parents) throws HgInvalidControlFileException, HgInvalidRevisionException {
+		assert parents.length > 1;
+		revisionIndex = checkRevisionIndex(revisionIndex);
+		DataAccess daIndex = getIndexStream(true);
+		try {
+			int recordOffset = getIndexOffsetInt(revisionIndex);
+			daIndex.seek(recordOffset + 24);
+			int p1 = daIndex.readInt();
+			int p2 = daIndex.readInt();
+			// although NO_REVISION == -1, it doesn't hurt to ensure this
+			parents[0] = p1 == -1 ? NO_REVISION : p1;
+			parents[1] = p2 == -1 ? NO_REVISION : p2;
+			return parents;
+		} catch (IOException ex) {
+			throw new HgInvalidControlFileException("Parents lookup failed", ex, indexFile).setRevisionIndex(revisionIndex);
+		} finally {
+			daIndex.done();
+		}
+	}
+	
 	// Perhaps, RevlogStream should be limited to use of plain int revisions for access,
 	// while Nodeids should be kept on the level up, in Revlog. Guess, Revlog better keep
 	// map of nodeids, and once this comes true, we may get rid of this method.
@@ -215,7 +277,7 @@
 	public int findRevisionIndex(Nodeid nodeid) throws HgInvalidControlFileException {
 		// XXX this one may be implemented with iterate() once there's mechanism to stop iterations
 		final int indexSize = revisionCount();
-		DataAccess daIndex = getIndexStream();
+		DataAccess daIndex = getIndexStream(false);
 		try {
 			byte[] nodeidBuf = new byte[20];
 			for (int i = 0; i < indexSize; i++) {
@@ -240,11 +302,11 @@
 	 * @return value suitable for the corresponding field in the new revision's header, not physical offset in the file 
 	 * (which is different in case of inline revlogs)
 	 */
-	public long newEntryOffset() {
+	public long newEntryOffset() throws HgInvalidControlFileException {
 		if (revisionCount() == 0) {
 			return 0;
 		}
-		DataAccess daIndex = getIndexStream();
+		DataAccess daIndex = getIndexStream(true);
 		int lastRev = revisionCount() - 1;
 		try {
 			int recordOffset = getIndexOffsetInt(lastRev);
@@ -260,11 +322,12 @@
 		}
 	}
 
-
-
-	// should be possible to use TIP, ALL, or -1, -2, -n notation of Hg
-	// ? boolean needsNodeid
-	public void iterate(int start, int end, boolean needData, Inspector inspector) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	/**
+	 * should be possible to use TIP, ALL, or -1, -2, -n notation of Hg
+	 * ? boolean needsNodeid
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 */
+	public void iterate(int start, int end, boolean needData, Inspector inspector) throws HgRuntimeException {
 		initOutline();
 		final int indexSize = revisionCount();
 		if (indexSize == 0) {
@@ -279,16 +342,15 @@
 		HgInternals.checkRevlogRange(start, end, indexSize-1);
 		// XXX may cache [start .. end] from index with a single read (pre-read)
 		
-		ReaderN1 r = new ReaderN1(needData, inspector, dataAccess.shallMergePatches());
+		ReaderN1 r = new ReaderN1(needData, inspector, repo.shallMergePatches());
 		try {
-			r.start(end - start + 1);
+			r.start(end - start + 1, getLastRevisionRead());
 			r.range(start, end);
 		} catch (IOException ex) {
 			throw new HgInvalidControlFileException(String.format("Failed reading [%d..%d]", start, end), ex, indexFile);
-		} catch (HgInvalidControlFileException ex) {
-			throw ex;
 		} finally {
-			r.finish();
+			CachedRevision cr = r.finish();
+			setLastRevisionRead(cr);
 		}
 	}
 	
@@ -298,8 +360,9 @@
 	 * @param sortedRevisions revisions to walk, in ascending order.
 	 * @param needData whether inspector needs access to header only
 	 * @param inspector callback to process entries
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
-	public void iterate(int[] sortedRevisions, boolean needData, Inspector inspector) throws HgInvalidRevisionException, HgInvalidControlFileException /*REVISIT - too general exception*/ {
+	public void iterate(int[] sortedRevisions, boolean needData, Inspector inspector) throws HgRuntimeException {
 		final int indexSize = revisionCount();
 		if (indexSize == 0 || sortedRevisions.length == 0) {
 			return;
@@ -311,9 +374,9 @@
 			throw new HgInvalidRevisionException(String.format("Can't iterate [%d, %d] in range [0..%d]", sortedRevisions[0], sortedRevisions[sortedRevisions.length - 1], indexSize), null, sortedRevisions[sortedRevisions.length - 1]);
 		}
 
-		ReaderN1 r = new ReaderN1(needData, inspector, dataAccess.shallMergePatches());
+		ReaderN1 r = new ReaderN1(needData, inspector, repo.shallMergePatches());
 		try {
-			r.start(sortedRevisions.length);
+			r.start(sortedRevisions.length, getLastRevisionRead());
 			for (int i = 0; i < sortedRevisions.length; ) {
 				int x = i;
 				i++;
@@ -331,16 +394,47 @@
 			}
 		} catch (IOException ex) {
 			final int c = sortedRevisions.length;
-			throw new HgInvalidControlFileException(String.format("Failed reading %d revisions in [%d; %d]",c, sortedRevisions[0], sortedRevisions[c-1]), ex, indexFile);
-		} catch (HgInvalidControlFileException ex) {
-			// TODO post-1.0 fill HgRuntimeException with appropriate file (either index or data, depending on error source)
-			throw ex;
+			throw new HgInvalidControlFileException(String.format("Failed reading %d revisions in [%d; %d]", c, sortedRevisions[0], sortedRevisions[c-1]), ex, indexFile);
 		} finally {
-			r.finish();
+			CachedRevision cr = r.finish();
+			setLastRevisionRead(cr);
 		}
 	}
+	
+	public void attach(Observer listener) {
+		assert listener != null;
+		if (observers == null) {
+			observers = new ArrayList<Observer>(3);
+		}
+		observers.add(listener);
+	}
+	
+	public void detach(Observer listener) {
+		assert listener != null;
+		if (observers != null) {
+			observers.remove(listener);
+		}
+	}
+	
+	/*
+	 * Note, this method IS NOT a replacement for Observer. It has to be invoked when the validity of any
+	 * cache built using revision information is in doubt, but it provides reasonable value only till the
+	 * first initOutline() to be invoked, i.e. in [change..revlog read operation] time frame. If your code
+	 * accesses cached information without any prior explicit read operation, you shall consult this method
+	 * if next read operation would in fact bring changed content.
+	 * Observer is needed in addition to this method because any revlog read operation (e.g. Revlog#getLastRevision)
+	 * would clear shallDropDerivedCaches(), and if code relies only on this method to clear its derived caches,
+	 * it would miss the update.
+	 */
+	public boolean shallDropDerivedCaches() {
+		if (shallDropDerivedCaches) {
+			return shallDropDerivedCaches;
+		}
+		return shallDropDerivedCaches = changeTracker.hasChanged(indexFile);
+	}
 
 	void revisionAdded(int revisionIndex, Nodeid revision, int baseRevisionIndex, long revisionOffset) throws HgInvalidControlFileException {
+		shallDropDerivedCaches = true;
 		if (!outlineCached()) {
 			return;
 		}
@@ -353,6 +447,10 @@
 		}
 		assert revision != null;
 		assert !revision.isNull();
+		// next effort doesn't seem to be of any value at least in case of regular commit
+		// as the next call to #initOutline would recognize the file change and reload complete revlog anyway
+		// OTOH, there might be transaction strategy that doesn't update the file until its completion,
+		// while it's handy to know new revisions meanwhile.
 		int[] baseRevisionsCopy = new int[baseRevisions.length + 1];
 		System.arraycopy(baseRevisions, 0, baseRevisionsCopy, 0, baseRevisions.length);
 		baseRevisionsCopy[baseRevisions.length] = baseRevisionIndex;
@@ -379,7 +477,7 @@
 		return inline ? indexRecordOffset[revisionIndex] : revisionIndex * REVLOGV1_RECORD_SIZE;
 	}
 	
-	private int checkRevisionIndex(int revisionIndex) throws HgInvalidRevisionException {
+	private int checkRevisionIndex(int revisionIndex) throws HgInvalidControlFileException, HgInvalidRevisionException {
 		final int last = revisionCount() - 1;
 		if (revisionIndex == TIP) {
 			revisionIndex = last;
@@ -406,11 +504,21 @@
 		return o + REVLOGV1_RECORD_SIZE * recordIndex;
 	}
 
+	// every access to index revlog goes after this method only.
 	private void initOutline() throws HgInvalidControlFileException {
+		// true to send out 'drop-your-caches' event after outline has been built
+		final boolean notifyReload;
 		if (outlineCached()) {
-			return;
+			if (!changeTracker.hasChanged(indexFile)) {
+				return;
+			}
+			notifyReload = true;
+		} else {
+			// no cached outline - inital read, do not send any reload/invalidate notifications
+			notifyReload = false;
 		}
-		DataAccess da = getIndexStream();
+		changeTracker.touch(indexFile);
+		DataAccess da = getIndexStream(false);
 		try {
 			if (da.isEmpty()) {
 				// do not fail with exception if stream is empty, it's likely intentional
@@ -421,7 +529,6 @@
 			}
 			int versionField = da.readInt();
 			da.readInt(); // just to skip next 4 bytes of offset + flags
-			final int INLINEDATA = 1 << 16;
 			inline = (versionField & INLINEDATA) != 0;
 			IntVector resBases, resOffsets = null;
 			int entryCountGuess = Internals.ltoi(da.longLength() / REVLOGV1_RECORD_SIZE);
@@ -468,9 +575,46 @@
 			throw new HgInvalidControlFileException("Failed to analyze revlog index", ex, indexFile);
 		} finally {
 			da.done();
+			if (notifyReload && observers != null) {
+				for (Observer l : observers) {
+					l.reloaded(this);
+				}
+				shallDropDerivedCaches = false;
+			}
 		}
 	}
 	
+	private CachedRevision getLastRevisionRead() {
+		return lastRevisionRead == null ? null : lastRevisionRead.get();
+	}
+	
+	private void setLastRevisionRead(CachedRevision cr) {
+		// done() for lastRevisionRead.userData has been called by ReaderN1 once
+		// it noticed unsuitable DataAccess.
+		// Now, done() for any CachedRevision cleared by GC:
+		for (Reference<? extends CachedRevision> r; (r = lastRevisionQueue.poll()) != null;) {
+			CachedRevision toClean = r.get();
+			if (toClean != null && toClean.userData != null) {
+				toClean.userData.done();
+			}
+		}
+		if (cr != null) {
+			lastRevisionRead = new SoftReference<CachedRevision>(cr, lastRevisionQueue);
+		} else {
+			lastRevisionRead = null;
+		}
+	}
+	
+	final static class CachedRevision {
+		final int revision;
+		final DataAccess userData;
+		
+		public CachedRevision(int lastRevisionRead, DataAccess lastUserData) {
+			revision = lastRevisionRead;
+			userData = lastUserData;
+		}
+	}
+
 	/**
 	 * operation with single file open/close and multiple diverse reads.
 	 * XXX initOutline might need similar extraction to keep N1 format knowledge  
@@ -488,7 +632,8 @@
 		// next are transient values, for range() use only
 		private final Inflater inflater = new Inflater();
 		// can share buffer between instances of InflaterDataAccess as I never read any two of them in parallel
-		private final byte[] inflaterBuffer = new byte[10 * 1024]; // TODO consider using DAP.DEFAULT_FILE_BUFFER
+		private final byte[] inflaterBuffer = new byte[10 * 1024]; // TODO [post-1.1] consider using DAP.DEFAULT_FILE_BUFFER
+		private final ByteBuffer inflaterOutBuffer = ByteBuffer.allocate(inflaterBuffer.length * 2);
 		private final byte[] nodeidBuf = new byte[20];
 		// revlog record fields
 		private long offset;
@@ -500,8 +645,6 @@
 		private int linkRevision;
 		private int parent1Revision;
 		private int parent2Revision;
-		// next are to track two major bottlenecks - patch application and actual time spent in inspector 
-//		private long applyTime, inspectorTime; // TIMING
 		
 		public ReaderN1(boolean dataRequested, Inspector insp, boolean usePatchMerge) {
 			assert insp != null;
@@ -510,8 +653,8 @@
 			mergePatches = usePatchMerge;
 		}
 		
-		public void start(int totalWork) {
-			daIndex = getIndexStream();
+		public void start(int totalWork, CachedRevision cachedRevision) {
+			daIndex = getIndexStream(totalWork <= 10);
 			if (needData && !inline) {
 				daData = getDataStream();
 			}
@@ -520,13 +663,27 @@
 				cb = new Lifecycle.BasicCallback();
 				lifecycleListener.start(totalWork, cb, cb);
 			}
-//			applyTime = inspectorTime = 0; // TIMING
+			if (needData && cachedRevision != null) {
+				lastUserData = cachedRevision.userData;
+				lastRevisionRead = cachedRevision.revision;
+				assert lastUserData != null;
+			}
 		}
 
 		// invoked only once per instance
-		public void finish() {
+		public CachedRevision finish() {
+			CachedRevision rv = null;
 			if (lastUserData != null) {
-				lastUserData.done();
+				if (lastUserData instanceof ByteArrayDataAccess) {
+					// it's safe to cache only in-memory revision texts,
+					// if lastUserData is merely a filter over file stream,
+					// we'd need to keep file open, and this is bad.
+					// XXX perhaps, wrap any DataAccess.byteArray into
+					// ByteArrayDataAccess?
+					rv = new CachedRevision(lastRevisionRead, lastUserData);
+				} else {
+					lastUserData.done();
+				}
 				lastUserData = null;
 			}
 			if (lifecycleListener != null) {
@@ -540,7 +697,7 @@
 				daData.done();
 				daData = null;
 			}
-//			System.out.printf("applyTime:%d ms, inspectorTime: %d ms\n", applyTime, inspectorTime); // TIMING
+			return rv;
 		}
 		
 		private void readHeaderRecord(int i) throws IOException {
@@ -586,7 +743,7 @@
 				final byte firstByte = streamDataAccess.readByte();
 				if (firstByte == 0x78 /* 'x' */) {
 					inflater.reset();
-					userDataAccess = new InflaterDataAccess(streamDataAccess, streamOffset, compressedLen, isPatch(i) ? -1 : actualLen, inflater, inflaterBuffer);
+					userDataAccess = new InflaterDataAccess(streamDataAccess, streamOffset, compressedLen, isPatch(i) ? -1 : actualLen, inflater, inflaterBuffer, inflaterOutBuffer);
 				} else if (firstByte == 0x75 /* 'u' */) {
 					userDataAccess = new FilterDataAccess(streamDataAccess, streamOffset+1, compressedLen-1);
 				} else {
@@ -601,7 +758,7 @@
 		}
 
 		// may be invoked few times per instance life
-		public boolean range(int start, int end) throws IOException {
+		public boolean range(int start, int end) throws IOException, HgRuntimeException {
 			int i;
 			// it (i.e. replace with i >= start)
 			if (needData && (i = getBaseRevision(start)) < start) {
@@ -713,10 +870,27 @@
 
 	
 	public interface Inspector {
-		// XXX boolean retVal to indicate whether to continue?
-		// TODO specify nodeid and data length, and reuse policy (i.e. if revlog stream doesn't reuse nodeid[] for each call)
-		// implementers shall not invoke DataAccess.done(), it's accomplished by #iterate at appropraite moment
-		void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[/*20*/] nodeid, DataAccess data);
+		/**
+		 * XXX boolean retVal to indicate whether to continue?
+		 * 
+		 * Implementers shall not invoke DataAccess.done(), it's accomplished by #iterate at appropriate moment
+		 * 
+		 * @param revisionIndex absolute index of revision in revlog being iterated
+		 * @param actualLen length of the user data at this revision
+		 * @param baseRevision last revision known to hold complete revision (other hold patches). 
+		 *        if baseRevision != revisionIndex, data for this revision is a result of a sequence of patches
+		 * @param linkRevision index of corresponding changeset revision
+		 * @param parent1Revision index of first parent revision in this revlog, or {@link HgRepository#NO_REVISION}
+		 * @param parent2Revision index of second parent revision in this revlog, or {@link HgRepository#NO_REVISION}
+		 * @param nodeid 20-byte buffer, shared between invocations 
+		 * @param data access to revision content of actualLen size, or <code>null</code> if no data has been requested with 
+		 *        {@link RevlogStream#iterate(int[], boolean, Inspector)}
+		 */
+		void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[/*20*/] nodeid, DataAccess data) throws HgRuntimeException;
 	}
 
+	public interface Observer {
+		// notify observer of invalidate/reload event in the stream
+		public void reloaded(RevlogStream src);
+	}
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/RevlogStreamFactory.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+import java.lang.ref.SoftReference;
+import java.util.HashMap;
+
+import org.tmatesoft.hg.util.Path;
+
+/**
+ * Factory to create {@link RevlogStream RevlogStreams}, cache-capable.
+ *   
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class RevlogStreamFactory {
+	
+	private final Internals repo;
+	private final HashMap<Path, SoftReference<RevlogStream>> streamsCache;
+
+
+	public RevlogStreamFactory(Internals hgRepo, boolean shallCacheRevlogs) {
+		repo = hgRepo;
+		if (shallCacheRevlogs) {
+			streamsCache = new HashMap<Path, SoftReference<RevlogStream>>();
+		} else {
+			streamsCache = null;
+		}
+	}
+	
+	/**
+	 * Creates a stream for specified file, doesn't cache stream
+	 */
+	/*package-local*/ RevlogStream create(File f) {
+		return new RevlogStream(repo, f);
+	}
+
+	/**
+	 * Perhaps, should be separate interface, like ContentLookup
+	 * @param path - normalized file name
+	 * @return <code>null</code> if path doesn't resolve to a existing file
+	 */
+	/*package-local*/ RevlogStream getStoreFile(Path path, boolean onlyIfExists) {
+		final SoftReference<RevlogStream> ref = shallCacheRevlogs() ? streamsCache.get(path) : null;
+		RevlogStream cached = ref == null ? null : ref.get();
+		if (cached != null) {
+			return cached;
+		}
+		File f = repo.getFileFromDataDir(path);
+		if (!onlyIfExists || f.exists()) {
+			RevlogStream s = create(f);
+			if (shallCacheRevlogs()) {
+				streamsCache.put(path, new SoftReference<RevlogStream>(s));
+			}
+			return s;
+		}
+		return null;
+	}
+	
+	private boolean shallCacheRevlogs() {
+		return streamsCache != null;
+	}
+}
--- a/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java	Wed Jul 10 11:48:55 2013 +0200
@@ -22,14 +22,23 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.internal.DataSerializer.ByteArrayDataSource;
+import org.tmatesoft.hg.internal.DataSerializer.ByteArraySerializer;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
+import org.tmatesoft.hg.repo.HgBundle.GroupElement;
 import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.repo.HgInvalidRevisionException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.Pair;
 
 /**
  * 
- * TODO separate operation to check if index is too big and split into index+data
+ * TODO [post-1.1] separate operation to check if index is too big and split into index+data
  * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
@@ -38,34 +47,117 @@
 
 	private final DigestHelper dh = new DigestHelper();
 	private final RevlogCompressor revlogDataZip;
-	private int lastEntryBase, lastEntryIndex;
-	private byte[] lastEntryContent;
+	private final Transaction transaction;
+	private int lastEntryBase, lastEntryIndex, lastEntryActualLen;
+	// record revision and its full content
+	// the name might be misleading, it does not necessarily match lastEntryIndex
+	private Pair<Integer, byte[]> lastFullContent;
 	private Nodeid lastEntryRevision;
 	private IntMap<Nodeid> revisionCache = new IntMap<Nodeid>(32);
 	private RevlogStream revlogStream;
 	
-	public RevlogStreamWriter(SessionContext ctx, RevlogStream stream) {
-		assert ctx != null;
+	public RevlogStreamWriter(SessionContext.Source ctxSource, RevlogStream stream, Transaction tr) {
+		assert ctxSource != null;
 		assert stream != null;
+		assert tr != null;
 				
-		revlogDataZip = new RevlogCompressor(ctx);
+		revlogDataZip = new RevlogCompressor(ctxSource.getSessionContext());
 		revlogStream = stream;
+		transaction = tr;
+	}
+	
+	public Pair<Integer,Nodeid> addPatchRevision(GroupElement ge, RevisionToIndexMap clogRevs, RevisionToIndexMap revlogRevs) throws HgIOException, HgRuntimeException {
+		populateLastEntryIndex();
+		//
+		final Nodeid nodeRev = ge.node();
+		final Nodeid csetRev = ge.cset();
+		int linkRev;
+		if (nodeRev.equals(csetRev)) {
+			linkRev = lastEntryIndex+1;
+		} else {
+			linkRev = clogRevs.revisionIndex(csetRev);
+		}
+		assert linkRev >= 0;
+		final Nodeid p1Rev = ge.firstParent();
+		int p1 = p1Rev.isNull() ? NO_REVISION : revlogRevs.revisionIndex(p1Rev);
+		final Nodeid p2Rev = ge.secondParent();
+		int p2 = p2Rev.isNull() ? NO_REVISION : revlogRevs.revisionIndex(p2Rev);
+		Patch p = new Patch();
+		final byte[] patchBytes;
+		try {
+			// XXX there's ge.rawData(), to avoid extra array wrap
+			patchBytes = ge.rawDataByteArray();
+			p.read(new ByteArrayDataAccess(patchBytes));
+		} catch (IOException ex) {
+			throw new HgIOException("Failed to read patch information", ex, null);
+		}
+		//
+		final Nodeid patchBase = ge.patchBase();
+		int patchBaseRev = patchBase.isNull() ? NO_REVISION : revlogRevs.revisionIndex(patchBase);
+		int baseRev = lastEntryIndex == NO_REVISION ? 0 : revlogStream.baseRevision(patchBaseRev);
+		int revLen;
+		DataSource ds;
+		byte[] complete = null;
+		if (patchBaseRev == lastEntryIndex && lastEntryIndex != NO_REVISION) {
+			// we may write patch from GroupElement as is
+			int patchBaseLen = dataLength(patchBaseRev);
+			revLen = patchBaseLen + p.patchSizeDelta();
+			ds = new ByteArrayDataSource(patchBytes);
+		} else {
+			// read baseRev, unless it's the pull to empty repository
+			try {
+				if (lastEntryIndex == NO_REVISION) {
+					complete = p.apply(new ByteArrayDataAccess(new byte[0]), -1);
+					baseRev = 0; // it's done above, but doesn't hurt
+				} else {
+					ReadContentInspector insp = new ReadContentInspector().read(revlogStream, baseRev);
+					complete = p.apply(new ByteArrayDataAccess(insp.content), -1);
+					baseRev = lastEntryIndex + 1;
+				}
+				ds = new ByteArrayDataSource(complete);
+				revLen = complete.length;
+			} catch (IOException ex) {
+				// unlikely to happen, as ByteArrayDataSource doesn't throw IOException
+				throw new HgIOException("Failed to reconstruct revision", ex, null);
+			}
+		}
+		doAdd(nodeRev, p1, p2, linkRev, baseRev, revLen, ds);
+		if (complete != null) {
+			lastFullContent = new Pair<Integer, byte[]>(lastEntryIndex, complete);
+		}
+		return new Pair<Integer, Nodeid>(lastEntryIndex, lastEntryRevision);
 	}
 	
 	/**
 	 * @return nodeid of added revision
+	 * @throws HgRuntimeException 
 	 */
-	public Nodeid addRevision(byte[] content, int linkRevision, int p1, int p2) {
-		lastEntryRevision = Nodeid.NULL;
-		int revCount = revlogStream.revisionCount();
-		lastEntryIndex = revCount == 0 ? NO_REVISION : revCount - 1;
-		populateLastEntry();
+	public Pair<Integer,Nodeid> addRevision(DataSource content, int linkRevision, int p1, int p2) throws HgIOException, HgRuntimeException {
+		populateLastEntryIndex();
+		populateLastEntryContent();
 		//
-		Patch patch = GeneratePatchInspector.delta(lastEntryContent, content);
+		byte[] contentByteArray = toByteArray(content);
+		Patch patch = GeneratePatchInspector.delta(lastFullContent.second(), contentByteArray);
 		int patchSerializedLength = patch.serializedLength();
 		
-		final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, content.length);
-		DataSerializer.DataSource dataSource = writeComplete ? new DataSerializer.ByteArrayDataSource(content) : patch.new PatchDataSource();
+		final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, contentByteArray.length);
+		DataSerializer.DataSource dataSource = writeComplete ? new ByteArrayDataSource(contentByteArray) : patch.new PatchDataSource();
+		//
+		Nodeid p1Rev = revision(p1);
+		Nodeid p2Rev = revision(p2);
+		Nodeid newRev = Nodeid.fromBinary(dh.sha1(p1Rev, p2Rev, contentByteArray).asBinary(), 0);
+		doAdd(newRev, p1, p2, linkRevision, writeComplete ? lastEntryIndex+1 : lastEntryBase, contentByteArray.length, dataSource);
+		lastFullContent = new Pair<Integer, byte[]>(lastEntryIndex, contentByteArray);
+		return new Pair<Integer, Nodeid>(lastEntryIndex, lastEntryRevision);
+	}
+
+	private Nodeid doAdd(Nodeid rev, int p1, int p2, int linkRevision, int baseRevision, int revLen, DataSerializer.DataSource dataSource) throws HgIOException, HgRuntimeException  {
+		assert linkRevision >= 0;
+		assert baseRevision >= 0;
+		assert p1 == NO_REVISION || p1 >= 0;
+		assert p2 == NO_REVISION || p2 >= 0;
+		assert !rev.isNull();
+		assert revLen >= 0;
 		revlogDataZip.reset(dataSource);
 		final int compressedLen;
 		final boolean useCompressedData = preferCompressedOverComplete(revlogDataZip.getCompressedLength(), dataSource.serializeLength());
@@ -76,23 +168,18 @@
 			compressedLen = dataSource.serializeLength() + 1 /*1 byte for 'u' - uncompressed prefix byte*/;
 		}
 		//
-		Nodeid p1Rev = revision(p1);
-		Nodeid p2Rev = revision(p2);
-		byte[] revisionNodeidBytes = dh.sha1(p1Rev, p2Rev, content).asBinary();
-		//
-
-		DataSerializer indexFile, dataFile, activeFile;
-		indexFile = dataFile = activeFile = null;
+		DataSerializer indexFile, dataFile;
+		indexFile = dataFile = null;
 		try {
 			//
-			activeFile = indexFile = revlogStream.getIndexStreamWriter();
+			indexFile = revlogStream.getIndexStreamWriter(transaction);
 			final boolean isInlineData = revlogStream.isInlineData();
 			HeaderWriter revlogHeader = new HeaderWriter(isInlineData);
-			revlogHeader.length(content.length, compressedLen);
-			revlogHeader.nodeid(revisionNodeidBytes);
+			revlogHeader.length(revLen, compressedLen);
+			revlogHeader.nodeid(rev.toByteArray());
 			revlogHeader.linkRevision(linkRevision);
 			revlogHeader.parents(p1, p2);
-			revlogHeader.baseRevision(writeComplete ? lastEntryIndex+1 : lastEntryBase);
+			revlogHeader.baseRevision(baseRevision);
 			long lastEntryOffset = revlogStream.newEntryOffset();
 			revlogHeader.offset(lastEntryOffset);
 			//
@@ -101,9 +188,8 @@
 			if (isInlineData) {
 				dataFile = indexFile;
 			} else {
-				dataFile = revlogStream.getDataStreamWriter();
+				dataFile = revlogStream.getDataStreamWriter(transaction);
 			}
-			activeFile = dataFile;
 			if (useCompressedData) {
 				int actualCompressedLenWritten = revlogDataZip.writeCompressedData(dataFile);
 				if (actualCompressedLenWritten != compressedLen) {
@@ -114,22 +200,13 @@
 				dataSource.serialize(dataFile);
 			}
 			
-			
-			lastEntryContent = content;
 			lastEntryBase = revlogHeader.baseRevision();
 			lastEntryIndex++;
-			lastEntryRevision = Nodeid.fromBinary(revisionNodeidBytes, 0);
+			lastEntryActualLen = revLen;
+			lastEntryRevision = rev;
 			revisionCache.put(lastEntryIndex, lastEntryRevision);
 
 			revlogStream.revisionAdded(lastEntryIndex, lastEntryRevision, lastEntryBase, lastEntryOffset);
-		} catch (IOException ex) {
-			String m = String.format("Failed to write revision %d", lastEntryIndex+1, null);
-			HgInvalidControlFileException t = new HgInvalidControlFileException(m, ex, null);
-			if (activeFile == dataFile) {
-				throw revlogStream.initWithDataFile(t);
-			} else {
-				throw revlogStream.initWithIndexFile(t);
-			}
 		} finally {
 			indexFile.done();
 			if (dataFile != null && dataFile != indexFile) {
@@ -139,7 +216,13 @@
 		return lastEntryRevision;
 	}
 	
-	private Nodeid revision(int revisionIndex) {
+	private byte[] toByteArray(DataSource content) throws HgIOException, HgRuntimeException {
+		ByteArraySerializer ba = new ByteArraySerializer();
+		content.serialize(ba);
+		return ba.toByteArray();
+	}
+
+	private Nodeid revision(int revisionIndex) throws HgInvalidControlFileException, HgInvalidRevisionException {
 		if (revisionIndex == NO_REVISION) {
 			return Nodeid.NULL;
 		}
@@ -151,32 +234,38 @@
 		return n;
 	}
 	
-	private void populateLastEntry() throws HgInvalidControlFileException {
-		if (lastEntryContent != null) {
+	private int dataLength(int revisionIndex) throws HgInvalidControlFileException, HgInvalidRevisionException {
+		assert revisionIndex >= 0;
+		if (revisionIndex == lastEntryIndex) {
+			return lastEntryActualLen;
+		}
+		if (lastFullContent != null && lastFullContent.first() == revisionIndex) {
+			return lastFullContent.second().length;
+		}
+		return revlogStream.dataLength(revisionIndex);
+	}
+	
+	private void populateLastEntryIndex() throws HgRuntimeException {
+		int revCount = revlogStream.revisionCount();
+		lastEntryIndex = revCount == 0 ? NO_REVISION : revCount - 1;
+	}
+	
+	private void populateLastEntryContent() throws HgRuntimeException {
+		if (lastFullContent != null && lastFullContent.first() == lastEntryIndex) {
+			// we have last entry cached
 			return;
 		}
+		lastEntryRevision = Nodeid.NULL;
 		if (lastEntryIndex != NO_REVISION) {
-			assert lastEntryIndex >= 0;
-			final IOException[] failure = new IOException[1];
-			revlogStream.iterate(lastEntryIndex, lastEntryIndex, true, new RevlogStream.Inspector() {
-				
-				public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
-					try {
-						lastEntryBase = baseRevision;
-						lastEntryRevision = Nodeid.fromBinary(nodeid, 0);
-						lastEntryContent = data.byteArray();
-					} catch (IOException ex) {
-						failure[0] = ex;
-					}
-				}
-			});
-			if (failure[0] != null) {
-				String m = String.format("Failed to get content of most recent revision %d", lastEntryIndex);
-				throw revlogStream.initWithDataFile(new HgInvalidControlFileException(m, failure[0], null));
-			}
+			ReadContentInspector insp = new ReadContentInspector().read(revlogStream, lastEntryIndex);
+			lastEntryBase = insp.baseRev;
+			lastEntryRevision = insp.rev;
+			lastFullContent = new Pair<Integer, byte[]>(lastEntryIndex, insp.content);
 		} else {
-			lastEntryContent = new byte[0];
+			lastFullContent = new Pair<Integer, byte[]>(lastEntryIndex, new byte[0]);
 		}
+		assert lastFullContent.first() == lastEntryIndex;
+		assert lastFullContent.second() != null;
 	}
 	
 	public static boolean preferCompleteOverPatch(int patchLength, int fullContentLength) {
@@ -250,13 +339,12 @@
 			return this;
 		}
 		
-		public void serialize(DataSerializer out) throws IOException {
+		public void serialize(DataSerializer out) throws HgIOException {
 			header.clear();
 			if (offset == 0) {
 				int version = 1 /* RevlogNG */;
 				if (isInline) {
-					final int INLINEDATA = 1 << 16; // FIXME extract constant
-					version |= INLINEDATA;
+					version |= RevlogStream.INLINEDATA;
 				}
 				header.putInt(version);
 				header.putInt(0);
@@ -283,4 +371,40 @@
 			return header.capacity();
 		}
 	}
-}
+	
+	// XXX part of HgRevisionMap contract, need public counterparts (along with IndexToRevisionMap)
+	public interface RevisionToIndexMap {
+		
+		/**
+		 * @return {@link HgRepository#NO_REVISION} if unknown revision
+		 */
+		int revisionIndex(Nodeid revision);
+	}
+
+	private static class ReadContentInspector implements RevlogStream.Inspector {
+		public int baseRev;
+		public Nodeid rev;
+		public byte[] content;
+		private IOException failure;
+		
+		public ReadContentInspector read(RevlogStream rs, int revIndex) throws HgInvalidControlFileException {
+			assert revIndex >= 0;
+			rs.iterate(revIndex, revIndex, true, this);
+			if (failure != null) {
+				String m = String.format("Failed to get content of revision %d", revIndex);
+				throw rs.initWithDataFile(new HgInvalidControlFileException(m, failure, null));
+			}
+			return this;
+		}
+		
+		public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
+			try {
+				baseRev = baseRevision;
+				rev = Nodeid.fromBinary(nodeid, 0);
+				content = data.byteArray();
+			} catch (IOException ex) {
+				failure = ex;
+			}
+		}
+	}
+}
\ No newline at end of file
--- a/src/org/tmatesoft/hg/internal/StoragePathHelper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/StoragePathHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -22,8 +22,6 @@
 import java.nio.charset.CharsetEncoder;
 import java.util.Arrays;
 import java.util.TreeSet;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 import org.tmatesoft.hg.util.PathRewrite;
 
@@ -36,11 +34,15 @@
  * @author TMate Software Ltd.
  */
 class StoragePathHelper implements PathRewrite {
-	
+
+	static final String STR_STORE = "store/";
+	static final String STR_DATA = "data/";
+	static final String STR_DH = "dh/";
+
 	private final boolean store;
 	private final boolean fncache;
 	private final boolean dotencode;
-	private final Pattern suffix2replace;
+	private final EncodeDirPathHelper dirPathRewrite;
 	private final CharsetEncoder csEncoder;
 	private final char[] hexEncodedByte = new char[] {'~', '0', '0'};
 	private final ByteBuffer byteEncodingBuf;
@@ -55,7 +57,7 @@
 		store = isStore;
 		fncache = isFncache;
 		dotencode = isDotencode;
-		suffix2replace = Pattern.compile("\\.([id]|hg)/");
+		dirPathRewrite = new EncodeDirPathHelper();
 		csEncoder = fsEncoding.newEncoder();
 		byteEncodingBuf = ByteBuffer.allocate(Math.round(csEncoder.maxBytesPerChar()) + 1/*in fact, need ceil, hence +1*/);
 		charEncodingBuf = CharBuffer.allocate(1);
@@ -66,25 +68,9 @@
 	 * It has to be normalized (slashes) and shall not include extension .i or .d.
 	 */
 	public CharSequence rewrite(CharSequence p) {
-		final String STR_STORE = "store/";
-		final String STR_DATA = "data/";
-		final String STR_DH = "dh/";
 		final String reservedChars = "\\:*?\"<>|";
 		
-		Matcher suffixMatcher = suffix2replace.matcher(p);
-		CharSequence path;
-		// Matcher.replaceAll, but without extra toString
-		boolean found = suffixMatcher.find();
-		if (found) {
-			StringBuffer sb = new StringBuffer(p.length()  + 20);
-			do {
-				suffixMatcher.appendReplacement(sb, ".$1.hg/");
-			} while (found = suffixMatcher.find());
-			suffixMatcher.appendTail(sb);
-			path = sb;
-		} else {
-			path = p;
-		}
+		CharSequence path = dirPathRewrite.rewrite(p);
 		
 		StringBuilder sb = new StringBuilder(path.length() << 1);
 		if (store || fncache) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/Transaction.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.repo.HgInvalidStateException;
+
+/**
+ * Implementation strategies possible:<ul>
+ * <li> Get a copy, write changes to origin, keep copy as backup till #commit
+ *   <p>(-) doesn't break hard links 
+ * <li> Get a copy, write changes to a copy, on commit rename copy to origin. 
+ *   <p>(-) What if we read newly written data (won't find it);
+ *   <p>(-) complex #commit
+ *   <p>(+) simple rollback
+ * <li> Get a copy, rename origin to backup (breaks hard links), rename copy to origin, write changes 
+ *   <p>(+) Modified file is in place right away;
+ *   <p>(+) easy #commit
+ * <li> Do not copy, just record file size, truncate to that size on rollback
+ * <li> ...?
+ * </ul> 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public abstract class Transaction {
+	/**
+	 * Record the file is going to be modified during this transaction, obtain actual
+	 * destination to write to.
+	 * The file to be modified not necessarily exists, might be just a name of an added file  
+	 */
+	public abstract File prepare(File f) throws HgIOException;
+	/**
+	 * overwrites backup if exists, backup is kept after successful {@link #commit()}
+	 */
+	public abstract File prepare(File origin, File backup) throws HgIOException;
+	/**
+	 * Tell that file was successfully processed
+	 */
+	public abstract void done(File f) throws HgIOException;
+	/**
+	 * optional?
+	 */
+	public abstract void failure(File f, IOException ex);
+	/**
+	 * Complete the transaction
+	 */
+	public abstract void commit() throws HgIOException;
+	/**
+	 * Undo all the changes
+	 */
+	public abstract void rollback() throws HgIOException;
+
+	public interface Factory {
+		public Transaction create(SessionContext.Source ctxSource);
+	}
+
+	public static class NoRollback extends Transaction {
+
+		@Override
+		public File prepare(File f) throws HgIOException {
+			return f;
+		}
+
+		@Override
+		public File prepare(File origin, File backup) throws HgIOException {
+			return origin;
+		}
+
+		@Override
+		public void done(File f) throws HgIOException {
+			// no-op
+		}
+
+		@Override
+		public void failure(File f, IOException ex) {
+			// no-op
+		}
+
+		@Override
+		public void commit() throws HgIOException {
+			// no-op
+		}
+
+		@Override
+		public void rollback() throws HgIOException {
+			throw new HgInvalidStateException("This transaction doesn't support rollback");
+		}
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/WorkingCopyContent.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.tmatesoft.hg.core.HgCommitCommand;
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgInvalidFileException;
+import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.ByteChannel;
+import org.tmatesoft.hg.util.CancelledException;
+
+/**
+ * Access content of the working copy. The difference with {@link FileContentSupplier} is that this one doesn't need {@link File}
+ * in the working directory. However, provided this class is used from {@link HgCommitCommand} when "modified" file was detected,
+ * it's odd to expect no file in the working dir.
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class WorkingCopyContent implements DataSerializer.DataSource {
+	private final HgDataFile file;
+
+	public WorkingCopyContent(HgDataFile dataFile) {
+		file = dataFile;
+		if (!dataFile.exists()) {
+			throw new IllegalArgumentException();
+		}
+	}
+
+	public void serialize(final DataSerializer out) throws HgIOException, HgRuntimeException {
+		final HgIOException failure[] = new HgIOException[1];
+		try {
+			// TODO #workingCopy API is very limiting, CancelledException is inconvenient, 
+			// and absence of HgIOException is very uncomfortable
+			file.workingCopy(new ByteChannel() {
+				
+				public int write(ByteBuffer buffer) throws IOException {
+					try {
+						if (buffer.hasArray()) {
+							out.write(buffer.array(), buffer.position(), buffer.remaining());
+						}
+						int rv = buffer.remaining();
+						buffer.position(buffer.limit()); // pretend we've consumed the data
+						return rv;
+					} catch (HgIOException ex) {
+						failure[0] = ex;
+						IOException e = new IOException();
+						ex.initCause(ex); // XXX Java 1.5
+						throw e;
+					}
+				}
+			});
+		} catch (HgInvalidFileException ex) {
+			if (failure[0] != null) {
+				throw failure[0];
+			}
+			throw new HgIOException("Write failure", ex, new File(file.getRepo().getWorkingDir(), file.getPath().toString()));
+		} catch (CancelledException ex) {
+			throw new HgInvalidStateException("Our channel doesn't cancel here");
+		}
+	}
+
+	public int serializeLength() throws HgRuntimeException {
+		return file.getLength(HgRepository.WORKING_COPY);
+	}
+}
\ No newline at end of file
--- a/src/org/tmatesoft/hg/internal/WorkingDirFileWriter.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/WorkingDirFileWriter.java	Wed Jul 10 11:48:55 2013 +0200
@@ -26,6 +26,7 @@
 
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgManifest;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.ByteChannel;
 import org.tmatesoft.hg.util.CancelledException;
 import org.tmatesoft.hg.util.LogFacility.Severity;
@@ -62,8 +63,9 @@
 	/**
 	 * Writes content of specified file revision into local filesystem, or create a symlink according to flags. 
 	 * Executable bit is set if specified and filesystem supports it. 
+	 * @throws HgRuntimeException 
 	 */
-	public void processFile(HgDataFile df, int fileRevIndex, HgManifest.Flags flags) throws IOException {
+	public void processFile(HgDataFile df, int fileRevIndex, HgManifest.Flags flags) throws IOException, HgRuntimeException {
 		try {
 			prepare(df.getPath());
 			if (flags != HgManifest.Flags.Link) {
--- a/src/org/tmatesoft/hg/repo/CommitFacility.java	Thu Jun 06 14:21:11 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,250 +0,0 @@
-/*
- * Copyright (c) 2013 TMate Software Ltd
- *  
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; version 2 of the License.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- * GNU General Public License for more details.
- *
- * For information on how to redistribute this software under
- * the terms of a license other than GNU General Public License
- * contact TMate Software at support@hg4j.com
- */
-package org.tmatesoft.hg.repo;
-
-import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.tmatesoft.hg.core.HgCommitCommand;
-import org.tmatesoft.hg.core.HgIOException;
-import org.tmatesoft.hg.core.HgRepositoryLockException;
-import org.tmatesoft.hg.core.Nodeid;
-import org.tmatesoft.hg.internal.ByteArrayChannel;
-import org.tmatesoft.hg.internal.ChangelogEntryBuilder;
-import org.tmatesoft.hg.internal.DirstateBuilder;
-import org.tmatesoft.hg.internal.DirstateReader;
-import org.tmatesoft.hg.internal.Experimental;
-import org.tmatesoft.hg.internal.FNCacheFile;
-import org.tmatesoft.hg.internal.Internals;
-import org.tmatesoft.hg.internal.ManifestEntryBuilder;
-import org.tmatesoft.hg.internal.ManifestRevision;
-import org.tmatesoft.hg.internal.RevlogStream;
-import org.tmatesoft.hg.internal.RevlogStreamWriter;
-import org.tmatesoft.hg.util.Pair;
-import org.tmatesoft.hg.util.Path;
-import org.tmatesoft.hg.util.LogFacility.Severity;
-
-/**
- * WORK IN PROGRESS
- * Name: CommitObject, FutureCommit or PendingCommit
- * Only public API now: {@link HgCommitCommand}. TEMPORARILY lives in the oth.repo public packages, until code interdependencies are resolved
- * 
- * @author Artem Tikhomirov
- * @author TMate Software Ltd.
- */
-@Experimental(reason="Work in progress")
-public final class CommitFacility {
-	private final HgRepository repo;
-	private final int p1Commit, p2Commit;
-	private Map<Path, Pair<HgDataFile, ByteDataSupplier>> files = new LinkedHashMap<Path, Pair<HgDataFile, ByteDataSupplier>>();
-	private Set<Path> removals = new TreeSet<Path>();
-	private String branch, user;
-
-	public CommitFacility(HgRepository hgRepo, int parentCommit) {
-		this(hgRepo, parentCommit, NO_REVISION);
-	}
-	
-	public CommitFacility(HgRepository hgRepo, int parent1Commit, int parent2Commit) {
-		repo = hgRepo;
-		p1Commit = parent1Commit;
-		p2Commit = parent2Commit;
-		if (parent1Commit != NO_REVISION && parent1Commit == parent2Commit) {
-			throw new IllegalArgumentException("Merging same revision is dubious");
-		}
-	}
-
-	public boolean isMerge() {
-		return p1Commit != NO_REVISION && p2Commit != NO_REVISION;
-	}
-
-	public void add(HgDataFile dataFile, ByteDataSupplier content) {
-		if (content == null) {
-			throw new IllegalArgumentException();
-		}
-		removals.remove(dataFile.getPath());
-		files.put(dataFile.getPath(), new Pair<HgDataFile, ByteDataSupplier>(dataFile, content));
-	}
-
-	public void forget(HgDataFile dataFile) {
-		files.remove(dataFile.getPath());
-		removals.add(dataFile.getPath());
-	}
-	
-	public void branch(String branchName) {
-		branch = branchName;
-	}
-	
-	public void user(String userName) {
-		user = userName;
-	}
-	
-	public Nodeid commit(String message) throws HgIOException, HgRepositoryLockException {
-		
-		final HgChangelog clog = repo.getChangelog();
-		final int clogRevisionIndex = clog.getRevisionCount();
-		ManifestRevision c1Manifest = new ManifestRevision(null, null);
-		ManifestRevision c2Manifest = new ManifestRevision(null, null);
-		if (p1Commit != NO_REVISION) {
-			repo.getManifest().walk(p1Commit, p1Commit, c1Manifest);
-		}
-		if (p2Commit != NO_REVISION) {
-			repo.getManifest().walk(p2Commit, p2Commit, c2Manifest);
-		}
-//		Pair<Integer, Integer> manifestParents = getManifestParents();
-		Pair<Integer, Integer> manifestParents = new Pair<Integer, Integer>(c1Manifest.revisionIndex(), c2Manifest.revisionIndex());
-		TreeMap<Path, Nodeid> newManifestRevision = new TreeMap<Path, Nodeid>();
-		HashMap<Path, Pair<Integer, Integer>> fileParents = new HashMap<Path, Pair<Integer,Integer>>();
-		for (Path f : c1Manifest.files()) {
-			HgDataFile df = repo.getFileNode(f);
-			Nodeid fileKnownRev1 = c1Manifest.nodeid(f), fileKnownRev2;
-			final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev1);
-			final int fileRevIndex2;
-			if ((fileKnownRev2 = c2Manifest.nodeid(f)) != null) {
-				// merged files
-				fileRevIndex2 = df.getRevisionIndex(fileKnownRev2);
-			} else {
-				fileRevIndex2 = NO_REVISION;
-			}
-				
-			fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex1, fileRevIndex2));
-			newManifestRevision.put(f, fileKnownRev1);
-		}
-		//
-		// Forget removed
-		for (Path p : removals) {
-			newManifestRevision.remove(p);
-		}
-		//
-		// Register new/changed
-		ArrayList<Path> newlyAddedFiles = new ArrayList<Path>();
-		ArrayList<Path> touchInDirstate = new ArrayList<Path>();
-		for (Pair<HgDataFile, ByteDataSupplier> e : files.values()) {
-			HgDataFile df = e.first();
-			Pair<Integer, Integer> fp = fileParents.get(df.getPath());
-			if (fp == null) {
-				// NEW FILE
-				fp = new Pair<Integer, Integer>(NO_REVISION, NO_REVISION);
-			}
-			ByteDataSupplier bds = e.second();
-			// FIXME quickfix, instead, pass ByteDataSupplier directly to RevlogStreamWriter
-			ByteBuffer bb = ByteBuffer.allocate(2048);
-			ByteArrayChannel bac = new ByteArrayChannel();
-			while (bds.read(bb) != -1) {
-				bb.flip();
-				bac.write(bb);
-				bb.clear();
-			}
-			RevlogStream contentStream;
-			if (df.exists()) {
-				contentStream = df.content;
-			} else {
-				contentStream = repo.createStoreFile(df.getPath());
-				newlyAddedFiles.add(df.getPath());
-				// FIXME df doesn't get df.content updated, and clients
-				// that would attempt to access newly added file after commit would fail
-				// (despite the fact the file is in there)
-			}
-			RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo.getSessionContext(), contentStream);
-			Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second());
-			newManifestRevision.put(df.getPath(), fileRev);
-			touchInDirstate.add(df.getPath());
-		}
-		//
-		// Manifest
-		final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder();
-		for (Map.Entry<Path, Nodeid> me : newManifestRevision.entrySet()) {
-			manifestBuilder.add(me.getKey().toString(), me.getValue());
-		}
-		RevlogStreamWriter manifestWriter = new RevlogStreamWriter(repo.getSessionContext(), repo.getManifest().content);
-		Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder.build(), clogRevisionIndex, manifestParents.first(), manifestParents.second());
-		//
-		// Changelog
-		final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder();
-		changelogBuilder.setModified(files.keySet());
-		changelogBuilder.branch(branch == null ? HgRepository.DEFAULT_BRANCH_NAME : branch);
-		changelogBuilder.user(String.valueOf(user));
-		byte[] clogContent = changelogBuilder.build(manifestRev, message);
-		RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo.getSessionContext(), clog.content);
-		Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit);
-		// FIXME move fncache update to an external facility, along with dirstate update
-		if (!newlyAddedFiles.isEmpty() && repo.getImplHelper().fncacheInUse()) {
-			FNCacheFile fncache = new FNCacheFile(repo.getImplHelper());
-			for (Path p : newlyAddedFiles) {
-				fncache.add(p);
-			}
-			try {
-				fncache.write();
-			} catch (IOException ex) {
-				// see comment above for fnchache.read()
-				repo.getSessionContext().getLog().dump(getClass(), Severity.Error, ex, "Failed to write fncache, error ignored");
-			}
-		}
-		// bring dirstate up to commit state
-		Internals implRepo = Internals.getInstance(repo);
-		final DirstateBuilder dirstateBuilder = new DirstateBuilder(implRepo);
-		dirstateBuilder.fillFrom(new DirstateReader(implRepo, new Path.SimpleSource()));
-		for (Path p : removals) {
-			dirstateBuilder.recordRemoved(p);
-		}
-		for (Path p : touchInDirstate) {
-			dirstateBuilder.recordUncertain(p);
-		}
-		dirstateBuilder.parents(changesetRev, Nodeid.NULL);
-		dirstateBuilder.serialize();
-		return changesetRev;
-	}
-/*
-	private Pair<Integer, Integer> getManifestParents() {
-		return new Pair<Integer, Integer>(extractManifestRevisionIndex(p1Commit), extractManifestRevisionIndex(p2Commit));
-	}
-
-	private int extractManifestRevisionIndex(int clogRevIndex) {
-		if (clogRevIndex == NO_REVISION) {
-			return NO_REVISION;
-		}
-		RawChangeset commitObject = repo.getChangelog().range(clogRevIndex, clogRevIndex).get(0);
-		Nodeid manifestRev = commitObject.manifest();
-		if (manifestRev.isNull()) {
-			return NO_REVISION;
-		}
-		return repo.getManifest().getRevisionIndex(manifestRev);
-	}
-*/
-
-	// unlike DataAccess (which provides structured access), this one 
-	// deals with a sequence of bytes, when there's no need in structure of the data
-	// FIXME java.nio.ReadableByteChannel or ByteStream/ByteSequence(read, length, reset)
-	// SHALL be inline with util.ByteChannel, reading bytes from HgDataFile, preferably DataAccess#readBytes(BB) to match API,
-	// and a wrap for ByteVector
-	public interface ByteDataSupplier { // TODO look if can resolve DataAccess in HgCloneCommand visibility issue
-		// FIXME needs lifecycle, e.g. for supplier that reads from WC
-		int read(ByteBuffer buf);
-	}
-	
-	public interface ByteDataConsumer {
-		void write(ByteBuffer buf);
-	}
-}
--- a/src/org/tmatesoft/hg/repo/HgBlameFacility.java	Thu Jun 06 14:21:11 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,487 +0,0 @@
-/*
- * Copyright (c) 2013 TMate Software Ltd
- *  
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; version 2 of the License.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- * GNU General Public License for more details.
- *
- * For information on how to redistribute this software under
- * the terms of a license other than GNU General Public License
- * contact TMate Software at support@hg4j.com
- */
-package org.tmatesoft.hg.repo;
-
-import static org.tmatesoft.hg.core.HgIterateDirection.NewToOld;
-import static org.tmatesoft.hg.core.HgIterateDirection.OldToNew;
-import static org.tmatesoft.hg.repo.HgInternals.wrongRevisionIndex;
-import static org.tmatesoft.hg.repo.HgRepository.*;
-
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.Collections;
-import java.util.LinkedList;
-
-import org.tmatesoft.hg.core.HgCallbackTargetException;
-import org.tmatesoft.hg.core.HgIterateDirection;
-import org.tmatesoft.hg.core.Nodeid;
-import org.tmatesoft.hg.internal.BlameHelper;
-import org.tmatesoft.hg.internal.Callback;
-import org.tmatesoft.hg.internal.Experimental;
-import org.tmatesoft.hg.internal.IntVector;
-import org.tmatesoft.hg.util.Adaptable;
-
-/**
- * Facility with diff/annotate functionality.
- * 
- * @author Artem Tikhomirov
- * @author TMate Software Ltd.
- */
-@Experimental(reason="Unstable API")
-public final class HgBlameFacility {
-	private final HgDataFile df;
-	
-	public HgBlameFacility(HgDataFile file) {
-		if (file == null) {
-			throw new IllegalArgumentException();
-		}
-		df = file;
-	}
-	
-	/**
-	 * mimic 'hg diff -r clogRevIndex1 -r clogRevIndex2'
-	 */
-	public void diff(int clogRevIndex1, int clogRevIndex2, Inspector insp) throws HgCallbackTargetException {
-		// FIXME clogRevIndex1 and clogRevIndex2 may point to different files, need to decide whether to throw an exception
-		// or to attempt to look up correct file node (tricky)
-		int fileRevIndex1 = fileRevIndex(df, clogRevIndex1);
-		int fileRevIndex2 = fileRevIndex(df, clogRevIndex2);
-		BlameHelper bh = new BlameHelper(insp, 5);
-		bh.useFileUpTo(df, clogRevIndex2);
-		bh.diff(fileRevIndex1, clogRevIndex1, fileRevIndex2, clogRevIndex2);
-	}
-	
-	/**
-	 * Walk file history up/down to revision at given changeset and report changes for each revision
-	 */
-	public void annotate(int changelogRevisionIndex, Inspector insp, HgIterateDirection iterateOrder) throws HgCallbackTargetException {
-		annotate(0, changelogRevisionIndex, insp, iterateOrder);
-	}
-
-	/**
-	 * Walk file history range and report changes for each revision
-	 */
-	public void annotate(int changelogRevIndexStart, int changelogRevIndexEnd, Inspector insp, HgIterateDirection iterateOrder) throws HgCallbackTargetException {
-		if (wrongRevisionIndex(changelogRevIndexStart) || wrongRevisionIndex(changelogRevIndexEnd)) {
-			throw new IllegalArgumentException();
-		}
-		// Note, changelogRevIndexEnd may be TIP, while the code below doesn't tolerate constants
-		//
-		int lastRevision = df.getRepo().getChangelog().getLastRevision();
-		if (changelogRevIndexEnd == TIP) {
-			changelogRevIndexEnd = lastRevision;
-		}
-		HgInternals.checkRevlogRange(changelogRevIndexStart, changelogRevIndexEnd, lastRevision);
-		if (!df.exists()) {
-			return;
-		}
-		BlameHelper bh = new BlameHelper(insp, 10);
-		HgDataFile currentFile = df;
-		int fileLastClogRevIndex = changelogRevIndexEnd;
-		FileRevisionHistoryChunk nextChunk = null;
-		LinkedList<FileRevisionHistoryChunk> fileCompleteHistory = new LinkedList<FileRevisionHistoryChunk>();
-		do {
-			FileRevisionHistoryChunk fileHistory = new FileRevisionHistoryChunk(currentFile);
-			fileHistory.init(fileLastClogRevIndex);
-			fileHistory.linkTo(nextChunk);
-			fileCompleteHistory.addFirst(fileHistory); // to get the list in old-to-new order
-			nextChunk = fileHistory;
-			bh.useFileUpTo(currentFile, fileLastClogRevIndex);
-			if (fileHistory.changeset(0) > changelogRevIndexStart && currentFile.isCopy()) {
-				// fileHistory.changeset(0) is the earliest revision we know about so far,
-				// once we get to revisions earlier than the requested start, stop digging.
-				// The reason there's NO == (i.e. not >=) because:
-				// (easy): once it's equal, we've reached our intended start
-				// (hard): if changelogRevIndexStart happens to be exact start of one of renames in the 
-				// chain of renames (test-annotate2 repository, file1->file1a->file1b, i.e. points 
-				// to the very start of file1a or file1 history), presence of == would get us to the next 
-				// chunk and hence changed parents of present chunk's first element. Our annotate alg 
-				// relies on parents only (i.e. knows nothing about 'last iteration element') to find out 
-				// what to compare, and hence won't report all lines of 'last iteration element' (which is the
-				// first revision of the renamed file) as "added in this revision", leaving gaps in annotate
-				HgRepository repo = currentFile.getRepo();
-				Nodeid originLastRev = currentFile.getCopySourceRevision();
-				currentFile = repo.getFileNode(currentFile.getCopySourceName());
-				fileLastClogRevIndex = currentFile.getChangesetRevisionIndex(currentFile.getRevisionIndex(originLastRev));
-				// XXX perhaps, shall fail with meaningful exception if new file doesn't exist (.i/.d not found for whatever reason)
-				// or source revision is missing?
-			} else {
-				fileHistory.chopAtChangeset(changelogRevIndexStart);
-				currentFile = null; // stop iterating
-			}
-		} while (currentFile != null && fileLastClogRevIndex > changelogRevIndexStart);
-		// fileCompleteHistory is in (origin, intermediate target, ultimate target) order
-
-		int[] fileClogParentRevs = new int[2];
-		int[] fileParentRevs = new int[2];
-		if (iterateOrder == NewToOld) {
-			Collections.reverse(fileCompleteHistory);
-		}
-		boolean shallFilterStart = changelogRevIndexStart != 0; // no reason if complete history is walked
-		for (FileRevisionHistoryChunk fileHistory : fileCompleteHistory) {
-			for (int fri : fileHistory.fileRevisions(iterateOrder)) {
-				int clogRevIndex = fileHistory.changeset(fri);
-				if (shallFilterStart) {
-					if (iterateOrder == NewToOld) {
-						// clogRevIndex decreases
-						if (clogRevIndex < changelogRevIndexStart) {
-							break;
-						}
-						// fall-through, clogRevIndex is in the [start..end] range
-					} else { // old to new
-						// the way we built fileHistory ensures we won't walk past changelogRevIndexEnd
-						// here we ensure we start from the right one, the one indicated with changelogRevIndexStart
-						if (clogRevIndex < changelogRevIndexStart) {
-							continue;
-						} else {
-							shallFilterStart = false; // once boundary is crossed, no need to check
-							// fall-through
-						}
-					}
-				}
-				fileHistory.fillFileParents(fri, fileParentRevs);
-				fileHistory.fillCsetParents(fri, fileClogParentRevs);
-				bh.annotateChange(fri, clogRevIndex, fileParentRevs, fileClogParentRevs);
-			}
-		}
-	}
-
-	/**
-	 * Annotates changes of the file against its parent(s). 
-	 * Unlike {@link #annotate(HgDataFile, int, Inspector, HgIterateDirection)}, doesn't
-	 * walk file history, looks at the specified revision only. Handles both parents (if merge revision).
-	 */
-	public void annotateSingleRevision(int changelogRevisionIndex, Inspector insp) throws HgCallbackTargetException {
-		// TODO detect if file is text/binary (e.g. looking for chars < ' ' and not \t\r\n\f
-		int fileRevIndex = fileRevIndex(df, changelogRevisionIndex);
-		int[] fileRevParents = new int[2];
-		df.parents(fileRevIndex, fileRevParents, null, null);
-		if (changelogRevisionIndex == TIP) {
-			changelogRevisionIndex = df.getChangesetRevisionIndex(fileRevIndex);
-		}
-		BlameHelper bh = new BlameHelper(insp, 5);
-		bh.useFileUpTo(df, changelogRevisionIndex);
-		int[] fileClogParentRevs = new int[2];
-		fileClogParentRevs[0] = fileRevParents[0] == NO_REVISION ? NO_REVISION : df.getChangesetRevisionIndex(fileRevParents[0]);
-		fileClogParentRevs[1] = fileRevParents[1] == NO_REVISION ? NO_REVISION : df.getChangesetRevisionIndex(fileRevParents[1]);
-		bh.annotateChange(fileRevIndex, changelogRevisionIndex, fileRevParents, fileClogParentRevs);
-	}
-
-	/**
-	 * Client's sink for revision differences.
-	 * 
-	 * When implemented, clients shall not expect new {@link Block blocks} instances in each call.
-	 * 
-	 * In case more information about annotated revision is needed, inspector instances may supply 
-	 * {@link RevisionDescriptor.Recipient} through {@link Adaptable}.  
-	 */
-	@Callback
-	public interface Inspector {
-		void same(EqualBlock block) throws HgCallbackTargetException;
-		void added(AddBlock block) throws HgCallbackTargetException;
-		void changed(ChangeBlock block) throws HgCallbackTargetException;
-		void deleted(DeleteBlock block) throws HgCallbackTargetException;
-	}
-	
-	/**
-	 * No need to keep "Block" prefix as long as there's only one {@link Inspector}
-	 */
-	@Deprecated
-	public interface BlockInspector extends Inspector {
-	}
-	
-	/**
-	 * Represents content of a block, either as a sequence of bytes or a 
-	 * sequence of smaller blocks (lines), if appropriate (according to usage context).
-	 * 
-	 * This approach allows line-by-line access to content data along with complete byte sequence for the whole block, i.e.
-	 * <pre>
-	 *    BlockData bd = addBlock.addedLines()
-	 *    // bd describes data from the addition completely.
-	 *    // elements of the BlockData are lines
-	 *    bd.elementCount() == addBlock.totalAddedLines();
-	 *    // one cat obtain complete addition with
-	 *    byte[] everythingAdded = bd.asArray();
-	 *    // or iterate line by line
-	 *    for (int i = 0; i < bd.elementCount(); i++) {
-	 *    	 byte[] lineContent = bd.elementAt(i);
-	 *       String line = new String(lineContent, fileEncodingCharset);
-	 *    }
-	 *    where bd.elementAt(0) is the line at index addBlock.firstAddedLine() 
-	 * </pre> 
-	 * 
-	 * LineData or ChunkData? 
-	 */
-	public interface BlockData {
-		BlockData elementAt(int index);
-		int elementCount();
-		byte[] asArray();
-	}
-	
-	/**
-	 * {@link Inspector} may optionally request extra information about revisions
-	 * being inspected, denoting itself as a {@link RevisionDescriptor.Recipient}. This class 
-	 * provides complete information about file revision under annotation now. 
-	 */
-	public interface RevisionDescriptor {
-		/**
-		 * @return complete source of the diff origin, never <code>null</code>
-		 */
-		BlockData origin();
-		/**
-		 * @return complete source of the diff target, never <code>null</code>
-		 */
-		BlockData target();
-		/**
-		 * @return changeset revision index of original file, or {@link HgRepository#NO_REVISION} if it's the very first revision
-		 */
-		int originChangesetIndex();
-		/**
-		 * @return changeset revision index of the target file
-		 */
-		int targetChangesetIndex();
-		/**
-		 * @return <code>true</code> if this revision is merge
-		 */
-		boolean isMerge();
-		/**
-		 * @return changeset revision index of the second, merged parent
-		 */
-		int mergeChangesetIndex();
-		/**
-		 * @return revision index of the change in target file's revlog
-		 */
-		int fileRevisionIndex();
-
-		/**
-		 * @return file object under blame (target file)
-		 */
-		HgDataFile file();
-
-		/**
-		 * Implement to indicate interest in {@link RevisionDescriptor}.
-		 * 
-		 * Note, instance of {@link RevisionDescriptor} is the same for 
-		 * {@link #start(RevisionDescriptor)} and {@link #done(RevisionDescriptor)} 
-		 * methods, and not necessarily a new one (i.e. <code>==</code>) for the next
-		 * revision announced.
-		 */
-		@Callback
-		public interface Recipient {
-			/**
-			 * Comes prior to any change {@link Block blocks}
-			 */
-			void start(RevisionDescriptor revisionDescription) throws HgCallbackTargetException;
-			/**
-			 * Comes after all change {@link Block blocks} were dispatched
-			 */
-			void done(RevisionDescriptor revisionDescription) throws HgCallbackTargetException;
-		}
-	}
-	
-	/**
-	 * Each change block comes from a single origin, blocks that are result of a merge
-	 * have {@link #originChangesetIndex()} equal to {@link RevisionDescriptor#mergeChangesetIndex()}.
-	 */
-	public interface Block {
-		int originChangesetIndex();
-		int targetChangesetIndex();
-	}
-	
-	public interface EqualBlock extends Block {
-		int originStart();
-		int targetStart();
-		int length();
-		BlockData content();
-	}
-	
-	public interface AddBlock extends Block {
-		/**
-		 * @return line index in the origin where this block is inserted
-		 */
-		int insertedAt();  
-		/**
-		 * @return line index of the first added line in the target revision
-		 */
-		int firstAddedLine();
-		/**
-		 * @return number of added lines in this block
-		 */
-		int totalAddedLines();
-		/**
-		 * @return content of added lines
-		 */
-		BlockData addedLines();
-	}
-	public interface DeleteBlock extends Block {
-		/**
-		 * @return line index in the target revision were this deleted block would be
-		 */
-		int removedAt();
-		/**
-		 * @return line index of the first removed line in the original revision
-		 */
-		int firstRemovedLine();
-		/**
-		 * @return number of deleted lines in this block
-		 */
-		int totalRemovedLines();
-		/**
-		 * @return content of deleted lines
-		 */
-		BlockData removedLines();
-	}
-	public interface ChangeBlock extends AddBlock, DeleteBlock {
-	}
-
-
-	private static int fileRevIndex(HgDataFile df, int csetRevIndex) {
-		Nodeid fileRev = df.getRepo().getManifest().getFileRevision(csetRevIndex, df.getPath());
-		return df.getRevisionIndex(fileRev);
-	}
-	
-	private static class FileRevisionHistoryChunk {
-		private final HgDataFile df;
-		// change ancestry, sequence of file revisions
-		private IntVector fileRevsToVisit;
-		// parent pairs of complete file history
-		private IntVector fileParentRevs;
-		// map file revision to changelog revision (sparse array, only file revisions to visit are set)
-		private int[] file2changelog;
-		private int originChangelogRev = BAD_REVISION, originFileRev = BAD_REVISION;
-
-		public FileRevisionHistoryChunk(HgDataFile file) {
-			df = file;
-		}
-		
-		public void init(int changelogRevisionIndex) {
-			// XXX df.indexWalk(0, fileRevIndex, ) might be more effective
-			int fileRevIndex = fileRevIndex(df, changelogRevisionIndex);
-			int[] fileRevParents = new int[2];
-			fileParentRevs = new IntVector((fileRevIndex+1) * 2, 0);
-			fileParentRevs.add(NO_REVISION, NO_REVISION); // parents of fileRevIndex == 0
-			for (int i = 1; i <= fileRevIndex; i++) {
-				df.parents(i, fileRevParents, null, null);
-				fileParentRevs.add(fileRevParents[0], fileRevParents[1]);
-			}
-			// fileRevsToVisit keep file change ancestry from new to old
-			fileRevsToVisit = new IntVector(fileRevIndex + 1, 0);
-			// keep map of file revision to changelog revision
-			file2changelog = new int[fileRevIndex+1];
-			// only elements worth visit would get mapped, so there would be unfilled areas in the file2changelog,
-			// prevent from error (make it explicit) by bad value
-			Arrays.fill(file2changelog, BAD_REVISION);
-			LinkedList<Integer> queue = new LinkedList<Integer>();
-			BitSet seen = new BitSet(fileRevIndex + 1);
-			queue.add(fileRevIndex);
-			do {
-				int x = queue.removeFirst();
-				if (seen.get(x)) {
-					continue;
-				}
-				seen.set(x);
-				fileRevsToVisit.add(x);
-				file2changelog[x] = df.getChangesetRevisionIndex(x);
-				int p1 = fileParentRevs.get(2*x);
-				int p2 = fileParentRevs.get(2*x + 1);
-				if (p1 != NO_REVISION) {
-					queue.addLast(p1);
-				}
-				if (p2 != NO_REVISION) {
-					queue.addLast(p2);
-				}
-			} while (!queue.isEmpty());
-			// make sure no child is processed before we handled all (grand-)parents of the element
-			fileRevsToVisit.sort(false);
-		}
-		
-		public void linkTo(FileRevisionHistoryChunk target) {
-			// assume that target.init() has been called already 
-			if (target == null) {
-				return;
-			}
-			target.originFileRev = fileRevsToVisit.get(0); // files to visit are new to old
-			target.originChangelogRev = changeset(target.originFileRev);
-		}
-
-		/**
-		 * Mark revision closest(ceil) to specified as the very first one (no parents) 
-		 */
-		public void chopAtChangeset(int firstChangelogRevOfInterest) {
-			if (firstChangelogRevOfInterest == 0) {
-				return; // nothing to do
-			}
-			int i = 0, x = fileRevsToVisit.size(), fileRev = BAD_REVISION;
-			// fileRevsToVisit is new to old, greater numbers to smaller
-			while (i < x && changeset(fileRev = fileRevsToVisit.get(i)) >= firstChangelogRevOfInterest) {
-				i++;
-			}
-			assert fileRev != BAD_REVISION; // there's at least 1 revision in fileRevsToVisit
-			if (i == x && changeset(fileRev) != firstChangelogRevOfInterest) {
-				assert false : "Requested changeset shall belong to the chunk";
-				return;
-			}
-			fileRevsToVisit.trimTo(i); // no need to iterate more
-			// pretend fileRev got no parents
-			fileParentRevs.set(fileRev * 2, NO_REVISION);
-			fileParentRevs.set(fileRev, NO_REVISION);
-		}
-
-		public int[] fileRevisions(HgIterateDirection iterateOrder) {
-			// fileRevsToVisit is { r10, r7, r6, r5, r0 }, new to old
-			int[] rv = fileRevsToVisit.toArray();
-			if (iterateOrder == OldToNew) {
-				// reverse return value
-				for (int a = 0, b = rv.length-1; a < b; a++, b--) {
-					int t = rv[b];
-					rv[b] = rv[a];
-					rv[a] = t;
-				}
-			}
-			return rv;
-		}
-		
-		public int changeset(int fileRevIndex) {
-			return file2changelog[fileRevIndex];
-		}
-		
-		public void fillFileParents(int fileRevIndex, int[] fileParents) {
-			if (fileRevIndex == 0 && originFileRev != BAD_REVISION) {
-				// this chunk continues another file
-				assert originFileRev != NO_REVISION;
-				fileParents[0] = originFileRev;
-				fileParents[1] = NO_REVISION;
-				return;
-			}
-			fileParents[0] = fileParentRevs.get(fileRevIndex * 2);
-			fileParents[1] = fileParentRevs.get(fileRevIndex * 2 + 1);
-		}
-		
-		public void fillCsetParents(int fileRevIndex, int[] csetParents) {
-			if (fileRevIndex == 0 && originFileRev != BAD_REVISION) {
-				assert originFileRev != NO_REVISION;
-				csetParents[0] = originChangelogRev;
-				csetParents[1] = NO_REVISION; // I wonder if possible to start a copy with two parents?
-				return;
-			}
-			int fp1 = fileParentRevs.get(fileRevIndex * 2);
-			int fp2 = fileParentRevs.get(fileRevIndex * 2 + 1);
-			csetParents[0] = fp1 == NO_REVISION ? NO_REVISION : changeset(fp1);
-			csetParents[1] = fp2 == NO_REVISION ? NO_REVISION : changeset(fp2);
-		}
-	}
-}
--- a/src/org/tmatesoft/hg/repo/HgBookmarks.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgBookmarks.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -16,76 +16,124 @@
  */
 package org.tmatesoft.hg.repo;
 
+import static org.tmatesoft.hg.util.LogFacility.Severity.Error;
+
 import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.HgRepositoryLockException;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.FileChangeMonitor;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.LineReader;
 import org.tmatesoft.hg.util.LogFacility;
 
 /**
+ * Access to bookmarks state
  * 
+ * @see http://mercurial.selenic.com/wiki/Bookmarks
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
 public final class HgBookmarks {
-	private final Internals internalRepo;
+	private final Internals repo;
 	private Map<String, Nodeid> bookmarks = Collections.emptyMap();
-	private String activeBookmark; 
+	private String activeBookmark;
+	private FileChangeMonitor activeTracker, bmFileTracker;
 
 	HgBookmarks(Internals internals) {
-		internalRepo = internals;
+		repo = internals;
 	}
 	
-	/*package-local*/ void read() throws HgInvalidControlFileException {
-		final LogFacility log = internalRepo.getSessionContext().getLog();
-		final HgRepository repo = internalRepo.getRepo();
-		File all = internalRepo.getFileFromRepoDir(HgRepositoryFiles.Bookmarks.getName());
-		LinkedHashMap<String, Nodeid> bm = new LinkedHashMap<String, Nodeid>();
-		if (all.canRead()) {
-			LineReader lr1 = new LineReader(all, log);
-			ArrayList<String> c = new ArrayList<String>();
-			lr1.read(new LineReader.SimpleLineCollector(), c);
-			for (String s : c) {
-				int x = s.indexOf(' ');
-				try {
-					if (x > 0) {
-						Nodeid nid = Nodeid.fromAscii(s.substring(0, x));
-						String name = new String(s.substring(x+1));
-						if (repo.getChangelog().isKnown(nid)) {
-							// copy name part not to drag complete line
-							bm.put(name, nid);
+	/*package-local*/ void read() throws HgRuntimeException {
+		readBookmarks();
+		readActiveBookmark();
+	}
+	
+	private void readBookmarks() throws HgRuntimeException {
+		final LogFacility log = repo.getLog();
+		File all = repo.getRepositoryFile(HgRepositoryFiles.Bookmarks);
+		try {
+			LinkedHashMap<String, Nodeid> bm = new LinkedHashMap<String, Nodeid>();
+			if (all.canRead() && all.isFile()) {
+				LineReader lr1 = new LineReader(all, log);
+				ArrayList<String> c = new ArrayList<String>();
+				lr1.read(new LineReader.SimpleLineCollector(), c);
+				for (String s : c) {
+					int x = s.indexOf(' ');
+					try {
+						if (x > 0) {
+							Nodeid nid = Nodeid.fromAscii(s.substring(0, x));
+							String name = new String(s.substring(x+1));
+							if (repo.getRepo().getChangelog().isKnown(nid)) {
+								// copy name part not to drag complete line
+								bm.put(name, nid);
+							} else {
+								log.dump(getClass(), LogFacility.Severity.Info, "Bookmark %s points to non-existent revision %s, ignored.", name, nid);
+							}
 						} else {
-							log.dump(getClass(), LogFacility.Severity.Info, "Bookmark %s points to non-existent revision %s, ignored.", name, nid);
+							log.dump(getClass(), LogFacility.Severity.Warn, "Can't parse bookmark entry: %s", s);
 						}
-					} else {
-						log.dump(getClass(), LogFacility.Severity.Warn, "Can't parse bookmark entry: %s", s);
+					} catch (IllegalArgumentException ex) {
+						log.dump(getClass(), LogFacility.Severity.Warn, ex, String.format("Can't parse bookmark entry: %s", s));
 					}
-				} catch (IllegalArgumentException ex) {
-					log.dump(getClass(), LogFacility.Severity.Warn, ex, String.format("Can't parse bookmark entry: %s", s));
+				}
+				bookmarks = bm;
+			} else {
+				bookmarks = Collections.emptyMap();
+			}
+			if (bmFileTracker == null) {
+				bmFileTracker = new FileChangeMonitor(all);
+			}
+			bmFileTracker.touch(this);
+		} catch (HgInvalidControlFileException ex) {
+			// do not translate HgInvalidControlFileException into another HgInvalidControlFileException
+			// but only HgInvalidFileException
+			throw ex;
+		} catch (HgIOException ex) {
+			throw new HgInvalidControlFileException(ex, true);
+		}
+	}
+
+	private void readActiveBookmark() throws HgInvalidControlFileException { 
+		activeBookmark = null;
+		File active = repo.getRepositoryFile(HgRepositoryFiles.BookmarksCurrent);
+		try {
+			if (active.canRead() && active.isFile()) {
+				LineReader lr2 = new LineReader(active, repo.getLog());
+				ArrayList<String> c = new ArrayList<String>(2);
+				lr2.read(new LineReader.SimpleLineCollector(), c);
+				if (c.size() > 0) {
+					activeBookmark = c.get(0);
 				}
 			}
-			bookmarks = bm;
-		} else {
-			bookmarks = Collections.emptyMap();
-		}
-		
-		activeBookmark = null;
-		File active = internalRepo.getFileFromRepoDir(HgRepositoryFiles.BookmarksCurrent.getName());
-		if (active.canRead()) {
-			LineReader lr2 = new LineReader(active, log);
-			ArrayList<String> c = new ArrayList<String>(2);
-			lr2.read(new LineReader.SimpleLineCollector(), c);
-			if (c.size() > 0) {
-				activeBookmark = c.get(0);
+			if (activeTracker == null) {
+				activeTracker = new FileChangeMonitor(active);
 			}
+			activeTracker.touch(this);
+		} catch (HgIOException ex) {
+			throw new HgInvalidControlFileException(ex, true);
 		}
 	}
+	
+	/*package-local*/void reloadIfChanged() throws HgRuntimeException {
+		assert activeTracker != null;
+		assert bmFileTracker != null;
+		if (bmFileTracker.changed(this)) {
+			readBookmarks();
+		}
+		if (activeTracker.changed(this)) {
+			readActiveBookmark();
+		}
+	}
+
 
 	/**
 	 * Tell name of the active bookmark 
@@ -114,4 +162,57 @@
 		// hence can use view (not a synchronized copy) here
 		return Collections.unmodifiableSet(bookmarks.keySet());
 	}
+
+	/**
+	 * Update currently bookmark with new commit.
+	 * Note, child has to be descendant of a p1 or p2
+	 * 
+	 * @param p1 first parent, or <code>null</code>
+	 * @param p2 second parent, or <code>null</code>
+	 * @param child new commit, descendant of one of the parents, not <code>null</code>
+	 * @throws HgIOException if failed to write updated bookmark information 
+	 * @throws HgRepositoryLockException  if failed to lock repository for modifications
+	 */
+	public void updateActive(Nodeid p1, Nodeid p2, Nodeid child) throws HgIOException, HgRepositoryLockException {
+		if (activeBookmark == null) {
+			return;
+		}
+		Nodeid activeRev = getRevision(activeBookmark);
+		if (!activeRev.equals(p1) && !activeRev.equals(p2)) {
+			return; // TestCommit#testNoBookmarkUpdate
+		}
+		if (child.equals(activeRev)) {
+			return;
+		}
+		LinkedHashMap<String, Nodeid> copy = new LinkedHashMap<String, Nodeid>(bookmarks);
+		copy.put(activeBookmark, child);
+		bookmarks = copy;
+		write();
+	}
+	
+	private void write() throws HgIOException, HgRepositoryLockException {
+		File bookmarksFile = repo.getRepositoryFile(HgRepositoryFiles.Bookmarks);
+		HgRepositoryLock workingDirLock = repo.getRepo().getWorkingDirLock();
+		FileWriter fileWriter = null;
+		workingDirLock.acquire();
+		try {
+			fileWriter = new FileWriter(bookmarksFile);
+			for (String bm : bookmarks.keySet()) {
+				Nodeid nid = bookmarks.get(bm);
+				fileWriter.write(String.format("%s %s\n", nid.toString(), bm));
+			}
+			fileWriter.flush();
+		} catch (IOException ex) {
+			throw new HgIOException("Failed to serialize bookmarks", ex, bookmarksFile);
+		} finally {
+			try {
+				if (fileWriter != null) {
+					fileWriter.close();
+				}
+			} catch (IOException ex) {
+				repo.getLog().dump(getClass(), Error, ex, null);
+			}
+			workingDirLock.release();
+		}
+	}
 }
--- a/src/org/tmatesoft/hg/repo/HgBranches.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgBranches.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -27,8 +27,8 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.LinkedList;
@@ -38,35 +38,39 @@
 import java.util.regex.Pattern;
 
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.ChangelogMonitor;
 import org.tmatesoft.hg.internal.Experimental;
+import org.tmatesoft.hg.internal.FileUtils;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
 import org.tmatesoft.hg.util.ProgressSupport;
 
 /**
- *
+ * Access information about branches in the repository
+ * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
 public class HgBranches {
 	
+	private final Internals internalRepo;
+	private final ChangelogMonitor repoChangeTracker;
 	private final Map<String, BranchInfo> branches = new TreeMap<String, BranchInfo>();
-	private final HgRepository repo;
-	private final Internals internalRepo;
 	private boolean isCacheActual = false;
 
 	HgBranches(Internals internals) {
 		internalRepo = internals;
-		repo = internals.getRepo(); // merely a cached value
+		repoChangeTracker = new ChangelogMonitor(internals.getRepo());
 	}
 
 	private int readCache() {
+		final HgRepository repo = internalRepo.getRepo();
 		File branchheadsCache = getCacheFile();
 		int lastInCache = -1;
 		if (!branchheadsCache.canRead()) {
 			return lastInCache;
 		}
-		BufferedReader br = null;
+		BufferedReader br = null; // TODO replace with LineReader
 		final Pattern spacePattern = Pattern.compile(" ");
 		try {
 			final LinkedHashMap<String, List<Nodeid>> branchHeads = new LinkedHashMap<String, List<Nodeid>>();
@@ -109,133 +113,106 @@
 		} catch (NumberFormatException ex) {
 			repo.getSessionContext().getLog().dump(getClass(), Warn, ex, null);
 			// FALL THROUGH
-		} catch (HgInvalidControlFileException ex) {
-			// shall not happen, thus log as error
-			repo.getSessionContext().getLog().dump(getClass(), Error, ex, null);
-			// FALL THROUGH
-		} catch (HgInvalidRevisionException ex) {
+		} catch (HgRuntimeException ex) {
+			// if happens, log error and pretend there's no cache
 			repo.getSessionContext().getLog().dump(getClass(), Error, ex, null);
 			// FALL THROUGH
 		} finally {
-			if (br != null) {
-				try {
-					br.close();
-				} catch (IOException ex) {
-					repo.getSessionContext().getLog().dump(getClass(), Warn, ex, null); // ignore
-				}
-			}
+			new FileUtils(repo.getSessionContext().getLog(), this).closeQuietly(br);
 		}
 		return -1; // deliberately not lastInCache, to avoid anything but -1 when 1st line was read and there's error is in lines 2..end
 	}
-
-	void collect(final ProgressSupport ps) throws HgInvalidControlFileException {
+	
+	void collect(final ProgressSupport ps) throws HgRuntimeException {
 		branches.clear();
-		ps.start(1 + repo.getChangelog().getRevisionCount() * 2);
+		final HgRepository repo = internalRepo.getRepo();
+		final HgChangelog clog = repo.getChangelog();
+		final HgRevisionMap<HgChangelog> rmap;
+		ps.start(1 + clog.getRevisionCount() * 2);
 		//
 		int lastCached = readCache();
-		isCacheActual = lastCached == repo.getChangelog().getLastRevision();
+		isCacheActual = lastCached == clog.getLastRevision();
 		if (!isCacheActual) {
-			final HgParentChildMap<HgChangelog> pw = new HgParentChildMap<HgChangelog>(repo.getChangelog());
+			// XXX need a way to share HgParentChildMap<HgChangelog>
+			final HgParentChildMap<HgChangelog> pw = new HgParentChildMap<HgChangelog>(clog);
 			pw.init();
-			ps.worked(repo.getChangelog().getRevisionCount());
+			ps.worked(clog.getRevisionCount());
+			//
 			// first revision branch found at
 			final HashMap<String, Nodeid> branchStart = new HashMap<String, Nodeid>();
-			// last revision seen for the branch
-			final HashMap<String, Nodeid> branchLastSeen = new HashMap<String, Nodeid>();
 			// revisions from the branch that have no children at all
 			final HashMap<String, List<Nodeid>> branchHeads = new HashMap<String, List<Nodeid>>();
-			// revisions that are immediate children of a node from a given branch
-			// after iteration, there are some revisions left in this map (children of a branch last revision
-			// that doesn't belong to the branch. No use of this now, perhaps can deduce isInactive (e.g.those 
-			// branches that have non-empty candidates are inactive if all their heads are roots for those left)
-			final HashMap<String, List<Nodeid>> branchHeadCandidates = new HashMap<String, List<Nodeid>>();
 			HgChangelog.Inspector insp = new HgChangelog.Inspector() {
 				
+				private final ArrayList<Nodeid> parents = new ArrayList<Nodeid>(3);
+				
 				public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
 					String branchName = cset.branch();
+					List<Nodeid> _branchHeads;
+					// there are chances (with --force key) branch can get more than one start
+					// revision. Neither BranchInfo nor this code support this scenario at the moment. 
 					if (!branchStart.containsKey(branchName)) {
 						branchStart.put(branchName, nodeid);
-						branchHeads.put(branchName, new LinkedList<Nodeid>());
-						branchHeadCandidates.put(branchName, new LinkedList<Nodeid>());
+						branchHeads.put(branchName, _branchHeads = new LinkedList<Nodeid>());
 					} else {
-						final List<Nodeid> headCandidates = branchHeadCandidates.get(branchName);
-						if (headCandidates.remove(nodeid)) {
-							// likely we don't need to keep parent anymore, as we found at least 1 child thereof to be at the same branch
-							// however, it's possible the child we found is a result of an earlier fork, and revision in the 
-							// branchLastSeen is 'parallel' head, which needs to be kept
-							Nodeid lastSeenInBranch = branchLastSeen.get(branchName);
-							// check if current revision is on descendant line. Seems direct parents check is enough
-							if (pw.safeFirstParent(nodeid).equals(lastSeenInBranch) || pw.safeSecondParent(nodeid).equals(lastSeenInBranch)) {
-								branchLastSeen.remove(branchName);
+						_branchHeads = branchHeads.get(branchName);
+						if (_branchHeads == null) {
+							branchHeads.put(branchName, _branchHeads = new LinkedList<Nodeid>());
+						}
+					}
+					// so far present node is the best candidate for head
+					_branchHeads.add(nodeid);
+					parents.clear();
+					// parents of this node, however, cease to be heads (if they are from this branch)
+					pw.appendParentsOf(nodeid, parents);
+					_branchHeads.removeAll(parents);
+					ps.worked(1);
+				}
+			};
+			// XXX alternatively may iterate with pw.all().subList(lastCached)
+			// but need an effective way to find out branch of particular changeset
+			clog.range(lastCached == -1 ? 0 : lastCached+1, HgRepository.TIP, insp);
+			//
+			// build BranchInfo, based on found and cached 
+			for (String bn : branchStart.keySet()) {
+				BranchInfo bi = branches.get(bn);
+				if (bi != null) {
+					// combine heads found so far with those cached 
+					LinkedHashSet<Nodeid> oldHeads = new LinkedHashSet<Nodeid>(bi.getHeads());
+					// expect size of both oldHeads and newHeads sets to be small, and for x for hence acceptable.
+					for (Nodeid newHead : branchHeads.get(bn)) {
+						for (Iterator<Nodeid> it = oldHeads.iterator(); it.hasNext();) {
+							if (pw.isChild(it.next(), newHead)) {
+								it.remove();
 							}
 						}
 					}
-					List<Nodeid> immediateChildren = pw.directChildren(nodeid);
-					if (immediateChildren.size() > 0) {
-						// 1) children may be in another branch
-						// and unless we later came across another element from this branch,
-						// we need to record all these as potential heads
-						//
-						// 2) head1 with children in different branch, and head2 in this branch without children
-						branchLastSeen.put(branchName, nodeid);
-						branchHeadCandidates.get(branchName).addAll(immediateChildren);
-					} else {
-						// no more children known for this node, it's (one of the) head of the branch
-						branchHeads.get(branchName).add(nodeid);
-					}
-					ps.worked(1);
-				}
-			}; 
-			repo.getChangelog().range(lastCached == -1 ? 0 : lastCached+1, HgRepository.TIP, insp);
-			// those last seen revisions from the branch that had no children from the same branch are heads.
-			for (String bn : branchLastSeen.keySet()) {
-				// these are inactive branches? - there were children, but not from the same branch?
-				branchHeads.get(bn).add(branchLastSeen.get(bn));
-			}
-			for (String bn : branchStart.keySet()) {
-				BranchInfo bi = branches.get(bn);
-				if (bi != null) {
-					// although heads from cache shall not intersect with heads after lastCached,
-					// use of LHS doesn't hurt (and makes sense e.g. if cache is not completely correct in my tests) 
-					LinkedHashSet<Nodeid> heads = new LinkedHashSet<Nodeid>(bi.getHeads());
-					for (Nodeid oldHead : bi.getHeads()) {
-						// XXX perhaps, need pw.canReach(Nodeid from, Collection<Nodeid> to)
-						List<Nodeid> newChildren = pw.childrenOf(Collections.singletonList(oldHead));
-						if (!newChildren.isEmpty()) {
-							// likely not a head any longer,
-							// check if any new head can be reached from old one, and, if yes,
-							// do not consider that old head as head.
-							for (Nodeid newHead : branchHeads.get(bn)) {
-								if (newChildren.contains(newHead)) {
-									heads.remove(oldHead);
-									break;
-								}
-							}
-						} // else - oldHead still head for the branch
-					}
-					heads.addAll(branchHeads.get(bn));
-					bi = new BranchInfo(bn, bi.getStart(), heads.toArray(new Nodeid[0]));
+					oldHeads.addAll(branchHeads.get(bn));
+					assert oldHeads.size() > 0;
+					bi = new BranchInfo(bn, bi.getStart(), oldHeads.toArray(new Nodeid[oldHeads.size()]));
 				} else {
 					Nodeid[] heads = branchHeads.get(bn).toArray(new Nodeid[0]);
 					bi = new BranchInfo(bn, branchStart.get(bn), heads);
 				}
 				branches.put(bn, bi);
 			}
+			rmap = pw.getRevisionMap();
+		} else { // !cacheActual
+			rmap = new HgRevisionMap<HgChangelog>(clog).init(); 
 		}
-		final HgChangelog clog = repo.getChangelog();
-		final HgRevisionMap<HgChangelog> rmap = new HgRevisionMap<HgChangelog>(clog).init();
 		for (BranchInfo bi : branches.values()) {
 			bi.validate(clog, rmap);
 		}
+		repoChangeTracker.touch();
 		ps.done();
 	}
 
-	public List<BranchInfo> getAllBranches() {
+	public List<BranchInfo> getAllBranches() throws HgInvalidControlFileException {
 		return new LinkedList<BranchInfo>(branches.values());
 				
 	}
 
-	public BranchInfo getBranch(String name) {
+	public BranchInfo getBranch(String name) throws HgInvalidControlFileException {
 		return branches.get(name);
 	}
 
@@ -258,6 +235,7 @@
 		if (!branchheadsCache.canWrite()) {
 			return;
 		}
+		final HgRepository repo = internalRepo.getRepo();
 		final int lastRev = repo.getChangelog().getLastRevision();
 		final Nodeid lastNid = repo.getChangelog().getRevision(lastRev);
 		BufferedWriter bw = new BufferedWriter(new FileWriter(branchheadsCache));
@@ -280,6 +258,12 @@
 		// prior to 1.8 used to be .hg/branchheads.cache
 		return internalRepo.getFileFromRepoDir("cache/branchheads");
 	}
+	
+	/*package-local*/ void reloadIfChanged(ProgressSupport ps) throws HgRuntimeException {
+		if (repoChangeTracker.isChanged()) {
+			collect(ps);
+		}
+	}
 
 	public static class BranchInfo {
 		private final String name;
@@ -302,7 +286,7 @@
 			this(branchName, Nodeid.NULL, branchHeads);
 		}
 		
-		void validate(HgChangelog clog, HgRevisionMap<HgChangelog> rmap) throws HgInvalidControlFileException {
+		void validate(HgChangelog clog, HgRevisionMap<HgChangelog> rmap) throws HgRuntimeException {
 			int[] localCset = new int[heads.size()];
 			int i = 0;
 			for (Nodeid h : heads) {
@@ -385,6 +369,7 @@
 		}
 //		public Nodeid getTip() {
 //		}
+		// XXX Not public as there are chances for few possible branch starts, and I need to decide how to handle that
 		/*public*/ Nodeid getStart() {
 			// first node where branch appears
 			return start;
--- a/src/org/tmatesoft/hg/repo/HgBundle.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgBundle.java	Wed Jul 10 11:48:55 2013 +0200
@@ -17,9 +17,11 @@
 package org.tmatesoft.hg.repo;
 
 import java.io.File;
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.util.ConcurrentModificationException;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.internal.ByteArrayChannel;
@@ -27,8 +29,10 @@
 import org.tmatesoft.hg.internal.Callback;
 import org.tmatesoft.hg.internal.DataAccess;
 import org.tmatesoft.hg.internal.DataAccessProvider;
+import org.tmatesoft.hg.internal.DataSerializer;
 import org.tmatesoft.hg.internal.DigestHelper;
 import org.tmatesoft.hg.internal.Experimental;
+import org.tmatesoft.hg.internal.FileUtils;
 import org.tmatesoft.hg.internal.InflaterDataAccess;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.Lifecycle;
@@ -50,17 +54,17 @@
 
 	private final File bundleFile;
 	private final DataAccessProvider accessProvider;
-//	private final SessionContext sessionContext;
+	private final SessionContext ctx;
 	private Lifecycle.BasicCallback flowControl;
 
-	HgBundle(SessionContext ctx, DataAccessProvider dap, File bundle) {
-//		sessionContext = ctx;
+	HgBundle(SessionContext sessionContext, DataAccessProvider dap, File bundle) {
+		ctx = sessionContext;
 		accessProvider = dap;
 		bundleFile = bundle;
 	}
 
 	private DataAccess getDataStream() throws IOException {
-		DataAccess da = accessProvider.createReader(bundleFile);
+		DataAccess da = accessProvider.createReader(bundleFile, false);
 		byte[] signature = new byte[6];
 		if (da.length() > 6) {
 			da.readBytes(signature, 0, 6);
@@ -143,7 +147,7 @@
 
 To recreate 30bd..e5, one have to take content of 9429..e0, not its p1 f1db..5e
  */
-			public boolean element(GroupElement ge) {
+			public boolean element(GroupElement ge) throws HgRuntimeException {
 				emptyChangelog = false;
 				HgChangelog changelog = hgRepo.getChangelog();
 				try {
@@ -194,24 +198,24 @@
 	// callback to minimize amount of Strings and Nodeids instantiated
 	@Callback
 	public interface Inspector {
-		void changelogStart();
-
-		void changelogEnd();
+		void changelogStart() throws HgRuntimeException;
 
-		void manifestStart();
+		void changelogEnd() throws HgRuntimeException;
 
-		void manifestEnd();
+		void manifestStart() throws HgRuntimeException;
 
-		void fileStart(String name);
+		void manifestEnd() throws HgRuntimeException;
 
-		void fileEnd(String name);
+		void fileStart(String name) throws HgRuntimeException;
+
+		void fileEnd(String name) throws HgRuntimeException;
 
 		/**
 		 * XXX desperately need exceptions here
 		 * @param element data element, instance might be reused, don't keep a reference to it or its raw data
 		 * @return <code>true</code> to continue
 		 */
-		boolean element(GroupElement element);
+		boolean element(GroupElement element) throws HgRuntimeException;
 	}
 
 	/**
@@ -355,7 +359,7 @@
 		flowControl = null;
 	}
 
-	private void internalInspectChangelog(DataAccess da, Inspector inspector) throws IOException {
+	private void internalInspectChangelog(DataAccess da, Inspector inspector) throws IOException, HgRuntimeException {
 		if (da.isEmpty()) {
 			return;
 		}
@@ -370,7 +374,7 @@
 		inspector.changelogEnd();
 	}
 
-	private void internalInspectManifest(DataAccess da, Inspector inspector) throws IOException {
+	private void internalInspectManifest(DataAccess da, Inspector inspector) throws IOException, HgRuntimeException {
 		if (da.isEmpty()) {
 			return;
 		}
@@ -385,7 +389,7 @@
 		inspector.manifestEnd();
 	}
 
-	private void internalInspectFiles(DataAccess da, Inspector inspector) throws IOException {
+	private void internalInspectFiles(DataAccess da, Inspector inspector) throws IOException, HgRuntimeException {
 		while (!da.isEmpty()) {
 			int fnameLen = da.readInt();
 			if (fnameLen <= 4) {
@@ -406,7 +410,7 @@
 		}
 	}
 
-	private static void readGroup(DataAccess da, Inspector inspector) throws IOException {
+	private static void readGroup(DataAccess da, Inspector inspector) throws IOException, HgRuntimeException {
 		int len = da.readInt();
 		boolean good2go = true;
 		Nodeid prevNodeid = Nodeid.NULL;
@@ -533,4 +537,29 @@
 			return String.format("%s %s %s %s; patches:%d\n", node().shortNotation(), firstParent().shortNotation(), secondParent().shortNotation(), cset().shortNotation(), patchCount);
 		}
 	}
+
+	@Experimental(reason="Work in progress, not an API")
+	public class BundleSerializer implements DataSerializer.DataSource {
+
+		public void serialize(DataSerializer out) throws HgIOException, HgRuntimeException {
+			FileInputStream fis = null;
+			try {
+				fis = new FileInputStream(HgBundle.this.bundleFile);
+				byte[] buffer = new byte[8*1024];
+				int r;
+				while ((r = fis.read(buffer, 0, buffer.length)) > 0) {
+					out.write(buffer, 0, r);
+				}
+				
+			} catch (IOException ex) {
+				throw new HgIOException("Failed to serialize bundle", HgBundle.this.bundleFile);
+			} finally {
+				new FileUtils(HgBundle.this.ctx.getLog(), this).closeQuietly(fis, HgBundle.this.bundleFile);
+			}
+		}
+
+		public int serializeLength() throws HgRuntimeException {
+			return Internals.ltoi(HgBundle.this.bundleFile.length());
+		}
+	}
 }
--- a/src/org/tmatesoft/hg/repo/HgChangelog.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgChangelog.java	Wed Jul 10 11:48:55 2013 +0200
@@ -50,21 +50,43 @@
 public final class HgChangelog extends Revlog {
 
 	/* package-local */HgChangelog(HgRepository hgRepo, RevlogStream content) {
-		super(hgRepo, content);
+		super(hgRepo, content, true);
 	}
 
-	public void all(final HgChangelog.Inspector inspector) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	/**
+	 * Iterate over whole changelog
+	 * @param inspector callback to process entries
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
+	 */
+	public void all(final HgChangelog.Inspector inspector) throws HgRuntimeException {
 		range(0, getLastRevision(), inspector);
 	}
 
-	public void range(int start, int end, final HgChangelog.Inspector inspector) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	/**
+	 * Iterate over changelog part
+	 * @param start first changelog entry to process
+	 * @param end last changelog entry to process
+	 * @param inspector callback to process entries
+	 * @throws HgInvalidRevisionException if any supplied revision doesn't identify revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
+	 */
+	public void range(int start, int end, final HgChangelog.Inspector inspector) throws HgRuntimeException {
 		if (inspector == null) {
 			throw new IllegalArgumentException();
 		}
 		content.iterate(start, end, true, new RawCsetParser(inspector));
 	}
 
-	public List<RawChangeset> range(int start, int end) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	/**
+	 * @see #range(int, int, Inspector)
+	 * @return changeset entry objects, never <code>null</code>
+	 * @throws HgInvalidRevisionException if any supplied revision doesn't identify revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
+	 */
+	public List<RawChangeset> range(int start, int end) throws HgRuntimeException {
 		final RawCsetCollector c = new RawCsetCollector(end - start + 1);
 		range(start, end, c);
 		return c.result;
@@ -75,8 +97,11 @@
 	 * changesets strictly in the order they are in the changelog.
 	 * @param inspector callback to get changesets
 	 * @param revisions revisions to read, unrestricted ordering.
+	 * @throws HgInvalidRevisionException if any supplied revision doesn't identify revision from this revlog <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	public void range(final HgChangelog.Inspector inspector, final int... revisions) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	public void range(final HgChangelog.Inspector inspector, final int... revisions) throws HgRuntimeException {
 		Arrays.sort(revisions);
 		rangeInternal(inspector, revisions);
 	}
@@ -84,7 +109,7 @@
 	/**
 	 * Friends-only version of {@link #range(Inspector, int...)}, when callers know array is sorted
 	 */
-	/*package-local*/ void rangeInternal(HgChangelog.Inspector inspector, int[] sortedRevisions) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	/*package-local*/ void rangeInternal(HgChangelog.Inspector inspector, int[] sortedRevisions) throws HgRuntimeException {
 		if (sortedRevisions == null || sortedRevisions.length == 0) {
 			return;
 		}
@@ -95,10 +120,12 @@
 	}
 
 	/**
-	 * @throws HgInvalidRevisionException if supplied nodeid doesn't identify any revision from this revlog  
-	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
+	 * Get changeset entry object
+	 * @throws HgInvalidRevisionException if supplied nodeid doesn't identify any revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	public RawChangeset changeset(Nodeid nid)  throws HgInvalidControlFileException, HgInvalidRevisionException {
+	public RawChangeset changeset(Nodeid nid)  throws HgRuntimeException {
 		int x = getRevisionIndex(nid);
 		return range(x, x).get(0);
 	}
@@ -113,7 +140,7 @@
 		 * @param nodeid revision being inspected
 		 * @param cset changeset raw data
 		 */
-		void next(int revisionIndex, Nodeid nodeid, RawChangeset cset);
+		void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException;
 	}
 
 	/**
@@ -278,23 +305,7 @@
 			// unixTime is local time, and timezone records difference of the local time to UTC.
 			Date _time = new Date(unixTime * 1000);
 			String _extras = space2 < _timeString.length() ? _timeString.substring(space2 + 1) : null;
-			Map<String, String> _extrasMap;
-			final String extras_branch_key = "branch";
-			if (_extras == null || _extras.trim().length() == 0) {
-				_extrasMap = Collections.singletonMap(extras_branch_key, HgRepository.DEFAULT_BRANCH_NAME);
-			} else {
-				_extrasMap = new HashMap<String, String>();
-				for (String pair : _extras.split("\00")) {
-					pair = decode(pair);
-					int eq = pair.indexOf(':');
-					_extrasMap.put(pair.substring(0, eq), pair.substring(eq + 1));
-				}
-				if (!_extrasMap.containsKey(extras_branch_key)) {
-					_extrasMap.put(extras_branch_key, HgRepository.DEFAULT_BRANCH_NAME);
-				}
-				_extrasMap = Collections.unmodifiableMap(_extrasMap);
-			}
-
+			Map<String, String> _extrasMap = parseExtras(_extras);
 			//
 			int lastStart = breakIndex3 + 1;
 			int breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex);
@@ -302,6 +313,8 @@
 			if (breakIndex4 > lastStart) {
 				// if breakIndex4 == lastStart, we already found \n\n and hence there are no files (e.g. merge revision)
 				_files = new ArrayList<String>(5);
+				// TODO pool file names
+				// TODO encoding of filenames?
 				while (breakIndex4 != -1 && breakIndex4 + 1 < bufferEndIndex) {
 					_files.add(new String(data, lastStart, breakIndex4 - lastStart));
 					lastStart = breakIndex4 + 1;
@@ -337,6 +350,34 @@
 			this.extras = _extrasMap;
 		}
 
+		private Map<String, String> parseExtras(String _extras) {
+			final String extras_branch_key = "branch";
+			_extras = _extras == null ? null : _extras.trim();
+			if (_extras == null || _extras.length() == 0) {
+				return Collections.singletonMap(extras_branch_key, HgRepository.DEFAULT_BRANCH_NAME);
+			}
+			Map<String, String> _extrasMap = new HashMap<String, String>();
+			int lastIndex = 0;
+			do {
+				String pair;
+				int sp = _extras.indexOf('\0', lastIndex);
+				if (sp == -1) {
+					sp = _extras.length();
+				}
+				if (sp > lastIndex) {
+					pair = _extras.substring(lastIndex, sp);
+					pair = decode(pair);
+					int eq = pair.indexOf(':');
+					_extrasMap.put(pair.substring(0, eq), pair.substring(eq + 1));
+					lastIndex = sp + 1;
+				}
+			} while (lastIndex < _extras.length());
+			if (!_extrasMap.containsKey(extras_branch_key)) {
+				_extrasMap.put(extras_branch_key, HgRepository.DEFAULT_BRANCH_NAME);
+			}
+			return Collections.unmodifiableMap(_extrasMap);
+		}
+
 		private static int indexOf(byte[] src, byte what, int startOffset, int endIndex) {
 			for (int i = startOffset; i < endIndex; i++) {
 				if (src[i] == what) {
@@ -396,7 +437,7 @@
 			}
 		}
 
-		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) {
+		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) throws HgRuntimeException {
 			try {
 				byte[] data = da.byteArray();
 				cset.init(data, 0, data.length, usersPool);
--- a/src/org/tmatesoft/hg/repo/HgDataFile.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgDataFile.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2010-2012 TMate Software Ltd
+ * Copyright (c) 2010-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -18,25 +18,23 @@
 
 import static org.tmatesoft.hg.repo.HgInternals.wrongRevisionIndex;
 import static org.tmatesoft.hg.repo.HgRepository.*;
-import static org.tmatesoft.hg.util.LogFacility.Severity.*;
+import static org.tmatesoft.hg.util.LogFacility.Severity.Info;
 
-import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
-import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 
 import org.tmatesoft.hg.core.HgChangesetFileSneaker;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.DataAccess;
+import org.tmatesoft.hg.internal.FileUtils;
 import org.tmatesoft.hg.internal.FilterByteChannel;
 import org.tmatesoft.hg.internal.FilterDataAccess;
-import org.tmatesoft.hg.internal.IntMap;
 import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.Metadata;
 import org.tmatesoft.hg.internal.RevlogStream;
 import org.tmatesoft.hg.util.ByteChannel;
 import org.tmatesoft.hg.util.CancelSupport;
@@ -67,12 +65,7 @@
 	private Metadata metadata; // get initialized on first access to file content.
 	
 	/*package-local*/HgDataFile(HgRepository hgRepo, Path filePath, RevlogStream content) {
-		super(hgRepo, content);
-		path = filePath;
-	}
-
-	/*package-local*/HgDataFile(HgRepository hgRepo, Path filePath) {
-		super(hgRepo);
+		super(hgRepo, content, false);
 		path = filePath;
 	}
 
@@ -80,7 +73,7 @@
 	// it might be confused with files existed before but lately removed. TODO HgFileNode.exists makes more sense.
 	// or HgDataFile.known()
 	public boolean exists() {
-		return content != null; // XXX need better impl
+		return content.exists();
 	}
 
 	/**
@@ -163,9 +156,10 @@
 			final int bsize = (int) Math.min(flength, 32*1024);
 			progress.start((int) (flength > Integer.MAX_VALUE ? flength >>> 15 /*32 kb buf size*/ : flength));
 			ByteBuffer buf = ByteBuffer.allocate(bsize);
-			FileChannel fc = null;
+			FileInputStream fis = null;
 			try {
-				fc = new FileInputStream(f).getChannel();
+				fis = new FileInputStream(f);
+				FileChannel fc = fis.getChannel();
 				while (fc.read(buf) != -1) {
 					cs.checkCancelled();
 					buf.flip();
@@ -177,12 +171,8 @@
 				throw new HgInvalidFileException("Working copy read failed", ex, f);
 			} finally {
 				progress.done();
-				if (fc != null) {
-					try {
-						fc.close();
-					} catch (IOException ex) {
-						getRepo().getSessionContext().getLog().dump(getClass(), Warn, ex, null);
-					}
+				if (fis != null) {
+					new FileUtils(getRepo().getSessionContext().getLog(), this).closeQuietly(fis);
 				}
 			}
 		} else {
@@ -199,8 +189,10 @@
 	
 	/**
 	 * @return file revision as recorded in repository manifest for dirstate parent, or <code>null</code> if no file revision can be found 
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	private Nodeid getWorkingCopyRevision() throws HgInvalidControlFileException {
+	private Nodeid getWorkingCopyRevision() throws HgRuntimeException {
 		final Pair<Nodeid, Nodeid> wcParents = getRepo().getWorkingCopyParents();
 		Nodeid p = wcParents.first().isNull() ? wcParents.second() : wcParents.first();
 		final HgChangelog clog = getRepo().getChangelog();
@@ -222,7 +214,7 @@
 				csetRevIndex = clog.getLastRevision();
 			} else {
 				// bad luck, need to search honestly
-				csetRevIndex = getRepo().getChangelog().getRevisionIndex(p);
+				csetRevIndex = clog.getRevisionIndex(p);
 			}
 		}
 		Nodeid fileRev = getRepo().getManifest().getFileRevision(csetRevIndex, getPath());
@@ -279,7 +271,7 @@
 			throw new IllegalArgumentException();
 		}
 		if (metadata == null) {
-			metadata = new Metadata();
+			metadata = new Metadata(getRepo());
 		}
 		ErrorHandlingInspector insp;
 		final LogFacility lf = getRepo().getSessionContext().getLog();
@@ -289,7 +281,7 @@
 			insp = new ContentPipe(sink, metadata.dataOffset(fileRevisionIndex), lf);
 		} else {
 			// do not know if there's metadata
-			insp = new MetadataInspector(metadata, lf, new ContentPipe(sink, 0, lf));
+			insp = new MetadataInspector(metadata, new ContentPipe(sink, 0, lf));
 		}
 		insp.checkCancelled();
 		super.content.iterate(fileRevisionIndex, fileRevisionIndex, true, insp);
@@ -429,7 +421,7 @@
 		int changesetRevIndex = getChangesetRevisionIndex(fileRevisionIndex);
 		return getRepo().getManifest().getFileFlags(changesetRevIndex, getPath());
 	}
-	
+
 	@Override
 	public String toString() {
 		StringBuilder sb = new StringBuilder(getClass().getSimpleName());
@@ -439,136 +431,40 @@
 		return sb.toString();
 	}
 	
-	private void checkAndRecordMetadata(int localRev) throws HgInvalidControlFileException {
+	private void checkAndRecordMetadata(int localRev) throws HgRuntimeException {
 		if (metadata == null) {
-			metadata = new Metadata();
+			metadata = new Metadata(getRepo());
 		}
 		// use MetadataInspector without delegate to process metadata only
-		RevlogStream.Inspector insp = new MetadataInspector(metadata, getRepo().getSessionContext().getLog(), null);
+		RevlogStream.Inspector insp = new MetadataInspector(metadata, null);
 		super.content.iterate(localRev, localRev, true, insp);
 	}
 
-	private static final class MetadataEntry {
-		private final String entry;
-		private final int valueStart;
-
-		// key may be null
-		/*package-local*/MetadataEntry(String key, String value) {
-			if (key == null) {
-				entry = value;
-				valueStart = -1; // not 0 to tell between key == null and key == ""
-			} else {
-				entry = key + value;
-				valueStart = key.length();
-			}
-		}
-		/*package-local*/boolean matchKey(String key) {
-			return key == null ? valueStart == -1 : key.length() == valueStart && entry.startsWith(key);
-		}
-//		uncomment once/if needed
-//		public String key() {
-//			return entry.substring(0, valueStart);
-//		}
-		public String value() {
-			return valueStart == -1 ? entry : entry.substring(valueStart);
-		}
-	}
-
-	private static class Metadata {
-		private static class Record {
-			public final int offset;
-			public final MetadataEntry[] entries;
-			
-			public Record(int off, MetadataEntry[] entr) {
-				offset = off;
-				entries = entr;
-			}
-		}
-		// XXX sparse array needed
-		private final IntMap<Record> entries = new IntMap<Record>(5);
-		
-		private final Record NONE = new Record(-1, null); // don't want statics
-
-		// true when there's metadata for given revision
-		boolean known(int revision) {
-			Record i = entries.get(revision);
-			return i != null && NONE != i;
-		}
-
-		// true when revision has been checked for metadata presence.
-		public boolean checked(int revision) {
-			return entries.containsKey(revision);
-		}
-
-		// true when revision has been checked and found not having any metadata
-		boolean none(int revision) {
-			Record i = entries.get(revision);
-			return i == NONE;
-		}
-
-		// mark revision as having no metadata.
-		void recordNone(int revision) {
-			Record i = entries.get(revision);
-			if (i == NONE) {
-				return; // already there
-			} 
-			if (i != null) {
-				throw new IllegalStateException(String.format("Trying to override Metadata state for revision %d (known offset: %d)", revision, i));
-			}
-			entries.put(revision, NONE);
-		}
-
-		// since this is internal class, callers are supposed to ensure arg correctness (i.e. ask known() before)
-		int dataOffset(int revision) {
-			return entries.get(revision).offset;
-		}
-		void add(int revision, int dataOffset, Collection<MetadataEntry> e) {
-			assert !entries.containsKey(revision);
-			entries.put(revision, new Record(dataOffset, e.toArray(new MetadataEntry[e.size()])));
-		}
-
-		String find(int revision, String key) {
-			for (MetadataEntry me : entries.get(revision).entries) {
-				if (me.matchKey(key)) {
-					return me.value();
-				}
-			}
-			return null;
-		}
-	}
-
 	private static class MetadataInspector extends ErrorHandlingInspector implements RevlogStream.Inspector {
 		private final Metadata metadata;
 		private final RevlogStream.Inspector delegate;
-		private final LogFacility log;
 
 		/**
 		 * @param _metadata never <code>null</code>
-		 * @param logFacility log facility from the session context
 		 * @param chain <code>null</code> if no further data processing other than metadata is desired
 		 */
-		public MetadataInspector(Metadata _metadata, LogFacility logFacility, RevlogStream.Inspector chain) {
+		public MetadataInspector(Metadata _metadata, RevlogStream.Inspector chain) {
 			metadata = _metadata;
-			log = logFacility;
 			delegate = chain;
 			setCancelSupport(CancelSupport.Factory.get(chain));
 		}
 
-		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
+		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException {
 			try {
-				final int daLength = data.length();
-				if (daLength < 4 || data.readByte() != 1 || data.readByte() != 10) {
-					metadata.recordNone(revisionNumber);
-					data.reset();
-				} else {
-					ArrayList<MetadataEntry> _metadata = new ArrayList<MetadataEntry>();
-					int offset = parseMetadata(data, daLength, _metadata);
-					metadata.add(revisionNumber, offset, _metadata);
+				if (metadata.tryRead(revisionNumber, data)) {
 					// da is in prepared state (i.e. we consumed all bytes up to metadata end).
 					// However, it's not safe to assume delegate won't call da.reset() for some reason,
 					// and we need to ensure predictable result.
 					data.reset();
-					data = new FilterDataAccess(data, offset, daLength - offset);
+					int offset = metadata.dataOffset(revisionNumber);
+					data = new FilterDataAccess(data, offset, data.length() - offset);
+				} else {
+					data.reset();
 				}
 				if (delegate != null) {
 					delegate.next(revisionNumber, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, nodeid, data);
@@ -576,68 +472,10 @@
 			} catch (IOException ex) {
 				recordFailure(ex);
 			} catch (HgInvalidControlFileException ex) {
-				// TODO RevlogStream, where this RevlogStream.Inspector goes, shall set File (as it's the only one having access to it)
 				recordFailure(ex.isRevisionIndexSet() ? ex : ex.setRevisionIndex(revisionNumber));
 			}
 		}
 
-		private int parseMetadata(DataAccess data, final int daLength, ArrayList<MetadataEntry> _metadata) throws IOException, HgInvalidControlFileException {
-			int lastEntryStart = 2;
-			int lastColon = -1;
-			// XXX in fact, need smth like ByteArrayBuilder, similar to StringBuilder,
-			// which can't be used here because we can't convert bytes to chars as we read them
-			// (there might be multi-byte encoding), and we need to collect all bytes before converting to string 
-			ByteArrayOutputStream bos = new ByteArrayOutputStream();
-			String key = null, value = null;
-			boolean byteOne = false;
-			boolean metadataIsComplete = false;
-			for (int i = 2; i < daLength; i++) {
-				byte b = data.readByte();
-				if (b == '\n') {
-					if (byteOne) { // i.e. \n follows 1
-						lastEntryStart = i+1;
-						metadataIsComplete = true;
-						// XXX is it possible to have here incomplete key/value (i.e. if last pair didn't end with \n)
-						// if yes, need to set metadataIsComplete to true in that case as well
-						break;
-					}
-					if (key == null || lastColon == -1 || i <= lastColon) {
-						log.dump(getClass(), Error, "Missing key in file revision metadata at index %d", i);
-					}
-					value = new String(bos.toByteArray()).trim();
-					bos.reset();
-					_metadata.add(new MetadataEntry(key, value));
-					key = value = null;
-					lastColon = -1;
-					lastEntryStart = i+1;
-					continue;
-				} 
-				// byteOne has to be consumed up to this line, if not yet, consume it
-				if (byteOne) {
-					// insert 1 we've read on previous step into the byte builder
-					bos.write(1);
-					byteOne = false;
-					// fall-through to consume current byte
-				}
-				if (b == (int) ':') {
-					assert value == null;
-					key = new String(bos.toByteArray());
-					bos.reset();
-					lastColon = i;
-				} else if (b == 1) {
-					byteOne = true;
-				} else {
-					bos.write(b);
-				}
-			}
-			// data.isEmpty is not reliable, renamed files of size==0 keep only metadata
-			if (!metadataIsComplete) {
-				// XXX perhaps, worth a testcase (empty file, renamed, read or ask ifCopy
-				throw new HgInvalidControlFileException("Metadata is not closed properly", null, null);
-			}
-			return lastEntryStart;
-		}
-
 		@Override
 		public void checkFailed() throws HgRuntimeException, IOException, CancelledException {
 			super.checkFailed();
--- a/src/org/tmatesoft/hg/repo/HgIgnore.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgIgnore.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,6 +16,9 @@
  */
 package org.tmatesoft.hg.repo;
 
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.HgIgnore;
+import static org.tmatesoft.hg.util.LogFacility.Severity.Warn;
+
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileReader;
@@ -26,6 +29,8 @@
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
+import org.tmatesoft.hg.internal.FileChangeMonitor;
+import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.util.Path;
 import org.tmatesoft.hg.util.PathRewrite;
 
@@ -39,21 +44,47 @@
 
 	private List<Pattern> entries;
 	private final PathRewrite globPathHelper;
+	private FileChangeMonitor ignoreFileTracker; 
 
 	HgIgnore(PathRewrite globPathRewrite) {
 		entries = Collections.emptyList();
 		globPathHelper = globPathRewrite;
 	}
 
-	/* package-local */ List<String> read(File hgignoreFile) throws IOException {
-		if (!hgignoreFile.exists()) {
-			return null;
+	/* package-local */ void read(Internals repo) throws HgInvalidControlFileException {
+		File ignoreFile = repo.getRepositoryFile(HgIgnore);
+		BufferedReader fr = null;
+		try {
+			if (ignoreFile.canRead() && ignoreFile.isFile()) {
+				fr = new BufferedReader(new FileReader(ignoreFile));
+				final List<String> errors = read(fr);
+				if (errors != null) {
+					repo.getLog().dump(getClass(), Warn, "Syntax errors parsing %s:\n%s", ignoreFile.getName(), Internals.join(errors, ",\n"));
+				}
+			}
+			if (ignoreFileTracker == null) {
+				ignoreFileTracker = new FileChangeMonitor(ignoreFile);
+			}
+			ignoreFileTracker.touch(this);
+		} catch (IOException ex) {
+			final String m = String.format("Error reading %s file", ignoreFile);
+			throw new HgInvalidControlFileException(m, ex, ignoreFile);
+		} finally {
+			try {
+				if (fr != null) {
+					fr.close();
+				}
+			} catch (IOException ex) {
+				repo.getLog().dump(getClass(), Warn, ex, null); // it's read, don't treat as error
+			}
 		}
-		BufferedReader fr = new BufferedReader(new FileReader(hgignoreFile));
-		try {
-			return read(fr);
-		} finally {
-			fr.close();
+	}
+	
+	/*package-local*/ void reloadIfChanged(Internals repo) throws HgInvalidControlFileException {
+		assert ignoreFileTracker != null;
+		if (ignoreFileTracker.changed(this)) {
+			entries = Collections.emptyList();
+			read(repo);
 		}
 	}
 
--- a/src/org/tmatesoft/hg/repo/HgInternals.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgInternals.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -144,7 +144,7 @@
 		// Impl note: simple source is enough as files in the working dir are all unique
 		// even if they might get reused (i.e. after FileIterator#reset() and walking once again),
 		// path caching is better to be done in the code which knows that path are being reused 
-		return new FileWalker(repo.getSessionContext(), repoRoot, pathSrc, workindDirScope);
+		return new FileWalker(repo, repoRoot, pathSrc, workindDirScope);
 	}
 	
 	// Convenient check of revision index for validity (not all negative values are wrong as long as we use negative constants)
--- a/src/org/tmatesoft/hg/repo/HgInvalidControlFileException.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgInvalidControlFileException.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -18,6 +18,7 @@
 
 import java.io.File;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.util.Path;
 
@@ -35,6 +36,13 @@
 	public HgInvalidControlFileException(String message, Throwable th, File file) {
 		super(message, th, file);
 	}
+	
+	public HgInvalidControlFileException(HgIOException ex, boolean replaceStackTrace) {
+		super(ex.getMessage(), ex.getCause(), ex.getFile());
+		if (replaceStackTrace) {
+			setStackTrace(ex.getStackTrace());
+		}
+	}
 
 	@Override
 	public HgInvalidControlFileException setFile(File file) {
--- a/src/org/tmatesoft/hg/repo/HgInvalidFileException.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgInvalidFileException.java	Wed Jul 10 11:48:55 2013 +0200
@@ -19,6 +19,8 @@
 import java.io.File;
 import java.io.IOException;
 
+import org.tmatesoft.hg.util.Path;
+
 /**
  * Thrown when there are troubles working with local file. Most likely (but not necessarily) wraps IOException. Might be 
  * perceived as specialized IOException with optional File and other repository information.
@@ -59,6 +61,12 @@
 		details.setFile(file);
 		return this;
 	}
+	
+	@Override
+	public HgInvalidFileException setFileName(Path name) {
+		super.setFileName(name);
+		return this;
+	}
 
 	/**
 	 * @return file object that causes troubles, or <code>null</code> if specific file is unknown
--- a/src/org/tmatesoft/hg/repo/HgLookup.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgLookup.java	Wed Jul 10 11:48:55 2013 +0200
@@ -23,6 +23,7 @@
 import java.net.URL;
 
 import org.tmatesoft.hg.core.HgBadArgumentException;
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.HgRepositoryNotFoundException;
 import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.internal.BasicSessionContext;
@@ -38,7 +39,7 @@
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class HgLookup {
+public class HgLookup implements SessionContext.Source {
 
 	private ConfigFile globalCfg;
 	private SessionContext sessionContext;
@@ -80,20 +81,26 @@
 		if (repository == null) {
 			throw new HgRepositoryNotFoundException(String.format("Can't locate .hg/ directory of Mercurial repository in %s nor in parent dirs", location)).setLocation(location.getPath());
 		}
-		String repoPath = repository.getParentFile().getAbsolutePath();
-		HgRepository rv = new HgRepository(getContext(), repoPath, repository);
-		int requiresFlags = rv.getImplHelper().getRequiresFlags();
-		if ((requiresFlags & RequiresFile.REVLOGV1) == 0) {
-			throw new HgRepositoryNotFoundException(String.format("%s: repository version is not supported (Mercurial <0.9?)", repoPath)).setLocation(location.getPath());
+		try {
+			String repoPath = repository.getParentFile().getAbsolutePath();
+			HgRepository rv = new HgRepository(getSessionContext(), repoPath, repository);
+			int requiresFlags = rv.getImplHelper().getRequiresFlags();
+			if ((requiresFlags & RequiresFile.REVLOGV1) == 0) {
+				throw new HgRepositoryNotFoundException(String.format("%s: repository version is not supported (Mercurial <0.9?)", repoPath)).setLocation(location.getPath());
+			}
+			return rv;
+		} catch (HgRuntimeException ex) {
+			HgRepositoryNotFoundException e = new HgRepositoryNotFoundException("Failed to initialize Hg4J library").setLocation(location.getPath());
+			e.initCause(ex);
+			throw e;
 		}
-		return rv;
 	}
 	
 	public HgBundle loadBundle(File location) throws HgRepositoryNotFoundException {
 		if (location == null || !location.canRead()) {
 			throw new HgRepositoryNotFoundException(String.format("Can't read file %s", location)).setLocation(String.valueOf(location));
 		}
-		return new HgBundle(getContext(), new DataAccessProvider(getContext()), location).link();
+		return new HgBundle(getSessionContext(), new DataAccessProvider(getSessionContext()), location).link();
 	}
 	
 	/**
@@ -118,7 +125,7 @@
 			String server = null;
 			if (hgRepo != null && !hgRepo.isInvalid()) {
 				PathsSection ps = hgRepo.getConfiguration().getPaths();
-				server = key == null || key.trim().isEmpty() ? ps.getDefault() : ps.getString(key, null);
+				server = key == null || key.trim().length() == 0 ? ps.getDefault() : ps.getString(key, null); // XXX Java 1.5 isEmpty() 
 			} else if (key == null || key.trim().length() == 0) {
 				throw new HgBadArgumentException("Can't look up empty key in a global configuration", null);
 			}
@@ -134,7 +141,7 @@
 				throw new HgBadArgumentException(String.format("Found %s server spec in the config, but failed to initialize with it", key), ex);
 			}
 		}
-		return new HgRemoteRepository(getContext(), url);
+		return new HgRemoteRepository(getSessionContext(), url);
 	}
 	
 	public HgRemoteRepository detect(URL url) throws HgBadArgumentException {
@@ -144,23 +151,23 @@
 		if (Boolean.FALSE.booleanValue()) {
 			throw Internals.notImplemented();
 		}
-		return new HgRemoteRepository(getContext(), url);
+		return new HgRemoteRepository(getSessionContext(), url);
 	}
 
 	private ConfigFile getGlobalConfig() {
 		if (globalCfg == null) {
-			globalCfg = new ConfigFile(getContext());
+			globalCfg = new ConfigFile(getSessionContext());
 			try {
 				globalCfg.addLocation(new File(System.getProperty("user.home"), ".hgrc"));
-			} catch (HgInvalidFileException ex) {
+			} catch (HgIOException ex) {
 				// XXX perhaps, makes sense to let caller/client know that we've failed to read global config? 
-				getContext().getLog().dump(getClass(), Warn, ex, null);
+				getSessionContext().getLog().dump(getClass(), Warn, ex, null);
 			}
 		}
 		return globalCfg;
 	}
 
-	private SessionContext getContext() {
+	public SessionContext getSessionContext() {
 		if (sessionContext == null) {
 			sessionContext = new BasicSessionContext(null);
 		}
--- a/src/org/tmatesoft/hg/repo/HgManifest.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgManifest.java	Wed Jul 10 11:48:55 2013 +0200
@@ -21,7 +21,6 @@
 import static org.tmatesoft.hg.util.LogFacility.Severity.Info;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Arrays;
 
 import org.tmatesoft.hg.core.HgChangesetFileSneaker;
@@ -33,9 +32,12 @@
 import org.tmatesoft.hg.internal.EncodingHelper;
 import org.tmatesoft.hg.internal.IdentityPool;
 import org.tmatesoft.hg.internal.IntMap;
+import org.tmatesoft.hg.internal.IntVector;
 import org.tmatesoft.hg.internal.IterateControlMediator;
 import org.tmatesoft.hg.internal.Lifecycle;
+import org.tmatesoft.hg.internal.RevisionLookup;
 import org.tmatesoft.hg.internal.RevlogStream;
+import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
 import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.LogFacility.Severity;
 import org.tmatesoft.hg.util.Path;
@@ -53,6 +55,14 @@
 	private RevisionMapper revisionMap;
 	private final EncodingHelper encodingHelper;
 	private final Path.Source pathFactory; 
+	private final RevlogStream.Observer revisionMapCleaner = new RevlogStream.Observer() {
+		public void reloaded(RevlogStream src) {
+			revisionMap = null;
+			// TODO RevlogDerivedCache<T> class, to wrap revisionMap and super.revisionLookup
+			// and their respective cleanup observers, or any other all-in-one alternative
+			// not to keep both field and it's cleaner
+		}
+	};
 	
 	/**
 	 * File flags recorded in manifest
@@ -99,7 +109,19 @@
 			}
 			throw new IllegalStateException(new String(data, start, length));
 		}
-
+		
+		static Flags parse(int dirstateFileMode) {
+			// source/include/linux/stat.h
+			final int S_IFLNK = 0120000, S_IXUSR = 00100;
+			if ((dirstateFileMode & S_IFLNK) == S_IFLNK) {
+				return Link;
+			}
+			if ((dirstateFileMode & S_IXUSR) == S_IXUSR) {
+				return Exec;
+			}
+			return RegularFile;
+		}
+		
 		String nativeString() {
 			if (this == Exec) {
 				return "x";
@@ -122,7 +144,7 @@
 	}
 
 	/*package-local*/ HgManifest(HgRepository hgRepo, RevlogStream content, EncodingHelper eh) {
-		super(hgRepo, content);
+		super(hgRepo, content, true);
 		encodingHelper = eh;
 		pathFactory = hgRepo.getSessionContext().getPathFactory();
 	}
@@ -215,7 +237,9 @@
 	 * 
 	 * @param inspector manifest revision visitor, can't be <code>null</code>
 	 * @param revisionIndexes local indexes of changesets to visit, non-<code>null</code>
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidRevisionException if method argument specifies non-existent revision index. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 * @throws InvalidArgumentException if supplied arguments are <code>null</code>s
 	 */
 	public void walk(final Inspector inspector, int... revisionIndexes) throws HgRuntimeException, IllegalArgumentException {
@@ -231,10 +255,11 @@
 	 * Tells manifest revision number that corresponds to the given changeset. May return {@link HgRepository#BAD_REVISION} 
 	 * if changeset has no associated manifest (cset records NULL nodeid for manifest).
 	 * @return manifest revision index, non-negative, or {@link HgRepository#BAD_REVISION}.
-	 * @throws HgInvalidRevisionException if method argument specifies non-existent revision index
-	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
+	 * @throws HgInvalidRevisionException if method argument specifies non-existent revision index. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	/*package-local*/ int fromChangelog(int changesetRevisionIndex) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	/*package-local*/ int fromChangelog(int changesetRevisionIndex) throws HgRuntimeException {
 		if (HgInternals.wrongRevisionIndex(changesetRevisionIndex)) {
 			throw new HgInvalidRevisionException(changesetRevisionIndex);
 		}
@@ -242,9 +267,24 @@
 			throw new HgInvalidRevisionException("Can't use constants like WORKING_COPY or BAD_REVISION", null, changesetRevisionIndex);
 		}
 		// revisionNumber == TIP is processed by RevisionMapper 
-		if (revisionMap == null) {
-			revisionMap = new RevisionMapper(getRepo());
-			content.iterate(0, TIP, false, revisionMap);
+		if (revisionMap == null || content.shallDropDerivedCaches()) {
+			content.detach(revisionMapCleaner);
+			final boolean buildOwnLookup = super.revisionLookup == null;
+			RevisionMapper rmap = new RevisionMapper(buildOwnLookup);
+			content.iterate(0, TIP, false, rmap);
+			rmap.fixReusedManifests();
+			if (buildOwnLookup && super.useRevisionLookup) {
+				// reuse RevisionLookup if there's none yet
+				super.setRevisionLookup(rmap.manifestNodeids);
+			}
+			rmap.manifestNodeids = null;
+			revisionMap = rmap;
+			// although in most cases modified manifest is accessed through one of the methods in this class
+			// and hence won't have a chance till this moment to be reloaded via revisionMapCleaner
+			// (RevlogStream sends events on attempt to read revlog, and so far we haven't tried to read anything,
+			// it's still reasonable to have this cleaner attached, just in case any method from Revlog base class
+			// has been called (e.g. getLastRevision())
+			content.attach(revisionMapCleaner);
 		}
 		return revisionMap.at(changesetRevisionIndex);
 	}
@@ -259,9 +299,11 @@
 	 * @param changelogRevisionIndex local changeset index 
 	 * @param file path to file in question
 	 * @return file revision or <code>null</code> if manifest at specified revision doesn't list such file
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidRevisionException if supplied revision doesn't identify revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	public Nodeid getFileRevision(int changelogRevisionIndex, final Path file) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	public Nodeid getFileRevision(int changelogRevisionIndex, final Path file) throws HgRuntimeException {
 		// there's no need for HgDataFile to own this method, or get a delegate
 		// as most of HgDataFile API is using file revision indexes, and there's easy step from file revision index to
 		// both file revision and changeset revision index. But there's no easy way to go from changesetRevisionIndex to
@@ -293,7 +335,9 @@
 	 * @param file path of interest
 	 * @param inspector callback to receive details about selected file
 	 * @param changelogRevisionIndexes changeset indexes to visit
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidRevisionException if supplied revision doesn't identify revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
 	public void walkFileRevisions(Path file, Inspector inspector, int... changelogRevisionIndexes) throws HgRuntimeException {
 		if (file == null || inspector == null || changelogRevisionIndexes == null) {
@@ -312,9 +356,11 @@
 	 * @param changesetRevIndex changeset revision index
 	 * @param file path to look up
 	 * @return one of predefined enum values, or <code>null</code> if file was not known in the specified revision
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidRevisionException if supplied revision doesn't identify revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	public Flags getFileFlags(int changesetRevIndex, Path file) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	public Flags getFileFlags(int changesetRevIndex, Path file) throws HgRuntimeException {
 		int manifestRevIdx = fromChangelog(changesetRevIndex);
 		IntMap<Flags> resMap = new IntMap<Flags>(2);
 		FileLookupInspector parser = new FileLookupInspector(encodingHelper, file, null, resMap);
@@ -325,14 +371,23 @@
 	}
 
 
+	/*package-local*/ void dropCachesOnChangelogChange() {
+		// sort of a hack as it may happen that #fromChangelog()
+		// is invoked for modified repository where revisionMap still points to an old state
+		// Since there's no access to RevlogStream in #fromChangelog() if there's revisionMap 
+		// in place, there's no chance for RevlogStream to detect the change and to dispatch 
+		// change notification so that revisionMap got cleared.
+		revisionMap = null;
+	}
+
 	/**
 	 * @param changelogRevisionIndexes non-null
 	 * @param inspector may be null if reporting of missing manifests is not needed
-	 * @throws HgInvalidRevisionException if arguments specify non-existent revision index
-	 * @throws IllegalArgumentException if any index argument is not a revision index
-	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
+	 * @throws HgInvalidRevisionException if supplied revision doesn't identify revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	private int[] toManifestRevisionIndexes(int[] changelogRevisionIndexes, Inspector inspector) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	private int[] toManifestRevisionIndexes(int[] changelogRevisionIndexes, Inspector inspector) throws HgRuntimeException {
 		int[] manifestRevs = new int[changelogRevisionIndexes.length];
 		boolean needsSort = false;
 		int j = 0;
@@ -375,8 +430,9 @@
 		 * @param manifestRevision revision of the manifest we're about to iterate through
 		 * @param changelogRevisionIndex local revision index of changelog this manifest points to 
 		 * @return <code>true</code> to continue iteration, <code>false</code> to stop
+		 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 		 */
-		boolean begin(int manifestRevisionIndex, Nodeid manifestRevision, int changelogRevisionIndex);
+		boolean begin(int manifestRevisionIndex, Nodeid manifestRevision, int changelogRevisionIndex) throws HgRuntimeException;
 
 		
 		/**
@@ -386,16 +442,18 @@
 		 * @param fname file name
 		 * @param flags one of {@link HgManifest.Flags} constants, not <code>null</code>
 		 * @return <code>true</code> to continue iteration, <code>false</code> to stop
+		 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 		 */
-		boolean next(Nodeid nid, Path fname, Flags flags);
+		boolean next(Nodeid nid, Path fname, Flags flags) throws HgRuntimeException;
 
 		/**
 		 * Denotes leaving specific manifest revision, after all entries were reported using {@link #next(Nodeid, Path, Flags)}
 		 *   
 		 * @param manifestRevisionIndex indicates manifest revision, corresponds to opening {@link #begin(int, Nodeid, int)}
 		 * @return <code>true</code> to continue iteration, <code>false</code> to stop
+		 * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em>
 		 */
-		boolean end(int manifestRevisionIndex);
+		boolean end(int manifestRevisionIndex) throws HgRuntimeException;
 	}
 	
 	/**
@@ -485,7 +543,7 @@
 			progressHelper = ProgressSupport.Factory.get(delegate);
 		}
 		
-		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) {
+		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) throws HgRuntimeException {
 			try {
 				if (!inspector.begin(revisionNumber, new Nodeid(nodeid, true), linkRevision)) {
 					iterateControl.stop();
@@ -571,17 +629,19 @@
 		}
 	}
 	
-	private static class RevisionMapper implements RevlogStream.Inspector, Lifecycle {
+	private class RevisionMapper implements RevlogStream.Inspector, Lifecycle {
 		
 		private final int changelogRevisionCount;
 		private int[] changelog2manifest;
-		private final HgRepository repo;
+		RevisionLookup manifestNodeids;
 
-		public RevisionMapper(HgRepository hgRepo) {
-			repo = hgRepo;
-			changelogRevisionCount = repo.getChangelog().getRevisionCount();
+		private RevisionMapper(boolean useOwnRevisionLookup) throws HgRuntimeException {
+			changelogRevisionCount = HgManifest.this.getRepo().getChangelog().getRevisionCount();
+			if (useOwnRevisionLookup) {
+				manifestNodeids = new RevisionLookup(HgManifest.this.content);
+			}
 		}
-
+		
 		/**
 		 * Get index of manifest revision that corresponds to specified changeset
 		 * @param changesetRevisionIndex non-negative index of changelog revision, or {@link HgRepository#TIP}
@@ -601,10 +661,10 @@
 			return changesetRevisionIndex;
 		}
 
-		// XXX likely can be replaced with Revlog.RevisionInspector
+		// XXX can be replaced with Revlog.RevisionInspector, but I don't want Nodeid instances
 		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgInvalidRevisionException {
 			if (linkRevision >= changelogRevisionCount) {
-				String storeLock = repo.getStoreLock().readLockInfo();
+				String storeLock = HgManifest.this.getRepo().getStoreLock().readLockInfo();
 				String message = String.format("Manifest revision %d references changeset %d, which is beyond known scope [0..%d). Lock: %s", revisionNumber, linkRevision, changelogRevisionCount, storeLock);
 				throw new HgInvalidRevisionException(message, null, linkRevision);
 			}
@@ -623,6 +683,9 @@
 					changelog2manifest[linkRevision] = revisionNumber;
 				}
 			}
+			if (manifestNodeids != null) {
+				manifestNodeids.next(revisionNumber, nodeid);
+			}
 		}
 		
 		public void start(int count, Callback callback, Object token) {
@@ -635,28 +698,56 @@
 				changelog2manifest = new int[changelogRevisionCount];
 				Arrays.fill(changelog2manifest, BAD_REVISION);
 			}
+			if (manifestNodeids != null) {
+				manifestNodeids.prepare(count);
+			}
 		}
 
 		public void finish(Object token) {
+			// it's not a nice idea to fix changesets that reuse existing manifest entries from inside
+			// #finish, as the manifest read operation is not complete at the moment.
+		}
+		
+		public void fixReusedManifests() throws HgRuntimeException {
 			if (changelog2manifest == null) {
+				// direct, 1-1 mapping of changeset indexes to manifest
 				return;
 			}
 			// I assume there'd be not too many revisions we don't know manifest of
-			ArrayList<Integer> undefinedChangelogRevision = new ArrayList<Integer>();
+			IntVector undefinedChangelogRevision = new IntVector();
 			for (int i = 0; i < changelog2manifest.length; i++) {
 				if (changelog2manifest[i] == BAD_REVISION) {
 					undefinedChangelogRevision.add(i);
 				}
 			}
-			for (int u : undefinedChangelogRevision) {
-				Nodeid manifest = repo.getChangelog().range(u, u).get(0).manifest();
-				// TODO calculate those missing effectively (e.g. cache and sort nodeids to speed lookup
-				// right away in the #next (may refactor ParentWalker's sequential and sorted into dedicated helper and reuse here)
-				if (manifest.isNull()) {
-					repo.getSessionContext().getLog().dump(getClass(), Severity.Warn, "Changeset %d has no associated manifest entry", u);
-					// keep -1 in the changelog2manifest map.
-				} else {
-					changelog2manifest[u] = repo.getManifest().getRevisionIndex(manifest);
+			if (undefinedChangelogRevision.size() > 0) {
+				final IntMap<Nodeid> missingCsetToManifest = new IntMap<Nodeid>(undefinedChangelogRevision.size());
+				int[] undefinedClogRevs = undefinedChangelogRevision.toArray();
+				// undefinedChangelogRevision is sorted by the nature it's created
+				HgManifest.this.getRepo().getChangelog().rangeInternal(new HgChangelog.Inspector() {
+					
+					public void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) {
+						missingCsetToManifest.put(revisionIndex, cset.manifest());
+					}
+				}, undefinedClogRevs);
+				assert missingCsetToManifest.size() == undefinedChangelogRevision.size();
+				for (int u : undefinedClogRevs) {
+					Nodeid manifest = missingCsetToManifest.get(u);
+					if (manifest == null || manifest.isNull()) {
+						HgManifest.this.getRepo().getSessionContext().getLog().dump(getClass(), Severity.Warn, "Changeset %d has no associated manifest entry", u);
+						// keep BAD_REVISION in the changelog2manifest map.
+						continue;
+					}
+					if (manifestNodeids != null) {
+						int manifestRevIndex = manifestNodeids.findIndex(manifest);
+						// mimic HgManifest#getRevisionIndex() to keep behavior the same 
+						if (manifestRevIndex == BAD_REVISION) {
+							throw new HgInvalidRevisionException(String.format("Can't find index of revision %s", manifest.shortNotation()), manifest, null);
+						}
+						changelog2manifest[u] = manifestRevIndex;
+					} else {
+						changelog2manifest[u] = HgManifest.this.getRevisionIndex(manifest);
+					}
 				}
 			}
 		}
@@ -694,15 +785,15 @@
 			csetIndex2Flags = null;
 		}
 		
-		void walk(int manifestRevIndex, RevlogStream content) {
+		void walk(int manifestRevIndex, RevlogStream content) throws HgRuntimeException {
 			content.iterate(manifestRevIndex, manifestRevIndex, true, this); 
 		}
 
-		void walk(int[] manifestRevIndexes, RevlogStream content) {
+		void walk(int[] manifestRevIndexes, RevlogStream content) throws HgRuntimeException {
 			content.iterate(manifestRevIndexes, true, this);
 		}
 		
-		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
+		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException {
 			ByteVector byteVector = new ByteVector(256, 128); // allocate for long paths right away
 			try {
 				byte b;
--- a/src/org/tmatesoft/hg/repo/HgMergeState.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgMergeState.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -185,9 +185,7 @@
 	 * @return <code>true</code> when recorded merge state doesn't seem to correspond to present working copy
 	 */
 	public boolean isStale() {
-		if (wcp1 == null) {
-			refresh();
-		}
+		assert wcp1 != null;
 		return !stateParent.isNull() /*there's merge state*/ && !wcp1.equals(stateParent) /*and it doesn't match*/; 
 	}
 
@@ -198,9 +196,7 @@
 	 * @return first parent of the working copy, never <code>null</code>
 	 */
 	public Nodeid getFirstParent() {
-		if (wcp1 == null) {
-			refresh();
-		}
+		assert wcp1 != null;
 		return wcp1;
 	}
 	
@@ -208,9 +204,7 @@
 	 * @return second parent of the working copy, never <code>null</code>
 	 */
 	public Nodeid getSecondParent() {
-		if (wcp2 == null) {
-			refresh();
-		}
+		assert wcp2 != null;
 		return wcp2;
 	}
 	
@@ -218,9 +212,7 @@
 	 * @return revision of the merge state or {@link Nodeid#NULL} if there's no merge state
 	 */
 	public Nodeid getStateParent() {
-		if (stateParent == null) {
-			refresh();
-		}
+		assert stateParent != null;
 		return stateParent;
 	}
 
--- a/src/org/tmatesoft/hg/repo/HgParentChildMap.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgParentChildMap.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -18,13 +18,18 @@
 
 import static org.tmatesoft.hg.repo.HgRepository.TIP;
 
+import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.BitSet;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.ArrayHelper;
+import org.tmatesoft.hg.internal.IntMap;
 import org.tmatesoft.hg.repo.Revlog.ParentInspector;
 
 /**
@@ -56,15 +61,18 @@
  */
 public final class HgParentChildMap<T extends Revlog> implements ParentInspector {
 
-	
+	// IMPORTANT: Nodeid instances shall be shared between all arrays
+
+	private final T revlog;
 	private Nodeid[] sequential; // natural repository order, childrenOf rely on ordering
-	private Nodeid[] sorted; // for binary search
-	private int[] sorted2natural;
-	private Nodeid[] firstParent;
+	private Nodeid[] sorted; // for binary search, just an origin of the actual value in use, the one inside seqWrapper
+	private Nodeid[] firstParent; // parents by natural order (i.e. firstParent[A] is parent of revision with index A)
 	private Nodeid[] secondParent;
-	private final T revlog;
+	private IntMap<Nodeid> heads;
+	private BitSet headsBitSet; // 1 indicates revision got children, != null only during init;
+	private HgRevisionMap<T> revisionIndexMap;
+	private ArrayHelper<Nodeid> seqWrapper; 
 
-	// Nodeid instances shall be shared between all arrays
 
 	public HgParentChildMap(T owner) {
 		revlog = owner;
@@ -82,31 +90,49 @@
 		sequential[ix] = sorted[ix] = revision;
 		if (parent1Revision != -1) {
 			firstParent[ix] = sequential[parent1Revision];
+			headsBitSet.set(parent1Revision);
 		}
 		if (parent2Revision != -1) { // revlog of DataAccess.java has p2 set when p1 is -1
 			secondParent[ix] = sequential[parent2Revision];
+			headsBitSet.set(parent2Revision);
 		}
 	}
 	
-	public void init() throws HgInvalidControlFileException {
+	/**
+	 * Prepare the map 
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
+	 */
+	public void init() throws HgRuntimeException {
 		final int revisionCount = revlog.getRevisionCount();
 		firstParent = new Nodeid[revisionCount];
-		// TODO [post 1.0] Branches/merges are less frequent, and most of secondParent would be -1/null, hence 
+		// TODO [post 1.1] Branches/merges are less frequent, and most of secondParent would be -1/null, hence 
 		// IntMap might be better alternative here, but need to carefully analyze (test) whether this brings
-		// real improvement (IntMap has 2n capacity, and element lookup is log(n) instead of array's constant)
+		// real improvement (IntMap has 2n capacity, and element lookup is log(n) instead of array's constant).
+		// FWIW: in cpython's repo, with 70k+ revisions, there are 2618 values in secondParent 
 		secondParent = new Nodeid[revisionCount];
 		//
 		sequential = new Nodeid[revisionCount];
-		sorted = new Nodeid[revisionCount];
+		sorted = new Nodeid[revisionCount]; 
+		headsBitSet = new BitSet(revisionCount);
 		revlog.indexWalk(0, TIP, this);
-		Arrays.sort(sorted);
-		sorted2natural = new int[revisionCount];
-		for (int i = 0; i < revisionCount; i++) {
-			Nodeid n = sequential[i];
-			int x = Arrays.binarySearch(sorted, n);
-			assertSortedIndex(x);
-			sorted2natural[x] = i;
-		}
+		seqWrapper = new ArrayHelper<Nodeid>(sequential);
+		// HgRevisionMap doesn't keep sorted, try alternative here.
+		// reference this.sorted (not only from ArrayHelper) helps to track ownership in hprof/mem dumps
+		seqWrapper.sort(sorted, false, true);
+		// no reason to keep BitSet, number of heads is usually small
+		IntMap<Nodeid> _heads = new IntMap<Nodeid>(headsBitSet.size() - headsBitSet.cardinality());
+		int index = 0;
+		while (index < sequential.length) {
+			index = headsBitSet.nextClearBit(index);
+			// nextClearBit(length-1) gives length when bit is set,
+			// however, last revision can't be a parent of any other, and
+			// the last bit would be always 0, and no AIOOBE 
+			_heads.put(index, sequential[index]);
+			index++;
+		} 
+		headsBitSet = null;
+		heads = _heads;
 	}
 	
 	private void assertSortedIndex(int x) {
@@ -122,16 +148,16 @@
 	 * @return <code>true</code> if revision matches any revision in this revlog
 	 */
 	public boolean knownNode(Nodeid nid) {
-		return Arrays.binarySearch(sorted, nid) >= 0;
+		return seqWrapper.binarySearchSorted(nid) >= 0;
 	}
 
 	/**
 	 * null if none. only known nodes (as per #knownNode) are accepted as arguments
 	 */
 	public Nodeid firstParent(Nodeid nid) {
-		int x = Arrays.binarySearch(sorted, nid);
+		int x = seqWrapper.binarySearchSorted(nid);
 		assertSortedIndex(x);
-		int i = sorted2natural[x];
+		int i = seqWrapper.getReverseIndex(x);
 		return firstParent[i];
 	}
 
@@ -142,9 +168,9 @@
 	}
 	
 	public Nodeid secondParent(Nodeid nid) {
-		int x = Arrays.binarySearch(sorted, nid);
+		int x = seqWrapper.binarySearchSorted(nid);
 		assertSortedIndex(x);
-		int i = sorted2natural[x];
+		int i = seqWrapper.getReverseIndex(x);
 		return secondParent[i];
 	}
 
@@ -154,9 +180,9 @@
 	}
 
 	public boolean appendParentsOf(Nodeid nid, Collection<Nodeid> c) {
-		int x = Arrays.binarySearch(sorted, nid);
+		int x = seqWrapper.binarySearchSorted(nid);
 		assertSortedIndex(x);
-		int i = sorted2natural[x];
+		int i = seqWrapper.getReverseIndex(x);
 		Nodeid p1 = firstParent[i];
 		boolean modified = false;
 		if (p1 != null) {
@@ -174,7 +200,10 @@
 	
 	// @return ordered collection of all children rooted at supplied nodes. Nodes shall not be descendants of each other!
 	// Nodeids shall belong to this revlog
-	public List<Nodeid> childrenOf(List<Nodeid> roots) {
+	public List<Nodeid> childrenOf(Collection<Nodeid> roots) {
+		if (roots.isEmpty()) {
+			return Collections.emptyList();
+		}
 		HashSet<Nodeid> parents = new HashSet<Nodeid>();
 		LinkedList<Nodeid> result = new LinkedList<Nodeid>();
 		int earliestRevision = Integer.MAX_VALUE;
@@ -182,9 +211,9 @@
 		// first, find earliest index of roots in question, as there's  no sense 
 		// to check children among nodes prior to branch's root node
 		for (Nodeid r : roots) {
-			int x = Arrays.binarySearch(sorted, r);
+			int x = seqWrapper.binarySearchSorted(r);
 			assertSortedIndex(x);
-			int i = sorted2natural[x];
+			int i = seqWrapper.getReverseIndex(x);
 			if (i < earliestRevision) {
 				earliestRevision = i;
 			}
@@ -203,11 +232,14 @@
 	 * @return revisions that have supplied revision as their immediate parent
 	 */
 	public List<Nodeid> directChildren(Nodeid nid) {
-		LinkedList<Nodeid> result = new LinkedList<Nodeid>();
-		int x = Arrays.binarySearch(sorted, nid);
+		int x = seqWrapper.binarySearchSorted(nid);
 		assertSortedIndex(x);
-		nid = sorted[x]; // canonical instance
-		int start = sorted2natural[x];
+		int start = seqWrapper.getReverseIndex(x);
+		nid = sequential[start]; // canonical instance
+		if (!hasChildren(start)) {
+			return Collections.emptyList();
+		}
+		ArrayList<Nodeid> result = new ArrayList<Nodeid>(5);
 		for (int i = start + 1; i < sequential.length; i++) {
 			if (nid == firstParent[i] || nid == secondParent[i]) {
 				result.add(sequential[i]);
@@ -221,22 +253,72 @@
 	 * @return <code>true</code> if there's any node in this revlog that has specified node as one of its parents. 
 	 */
 	public boolean hasChildren(Nodeid nid) {
-		int x = Arrays.binarySearch(sorted, nid);
+		int x = seqWrapper.binarySearchSorted(nid);
 		assertSortedIndex(x);
-		int i = sorted2natural[x];
-		assert firstParent.length == secondParent.length; // just in case later I implement sparse array for secondParent
-		assert firstParent.length == sequential.length;
-		// to use == instead of equals, take the same Nodeid instance we used to fill all the arrays.
-		final Nodeid canonicalNode = sequential[i];
-		i++; // no need to check node itself. child nodes may appear in sequential only after revision in question
-		for (; i < sequential.length; i++) {
-			// TODO [post 1.0] likely, not very effective. 
-			// May want to optimize it with another (Tree|Hash)Set, created on demand on first use, 
-			// however, need to be careful with memory usage
-			if (firstParent[i] == canonicalNode || secondParent[i] == canonicalNode) {
-				return true;
+		int i = seqWrapper.getReverseIndex(x);
+		return hasChildren(i);
+	}
+
+	/**
+	 * @return all revisions this map knows about
+	 */
+	public List<Nodeid> all() {
+		return Arrays.asList(sequential);
+	}
+
+	/**
+	 * Find out whether a given node is among descendants of another.
+	 * 
+	 * @param root revision to check for being (grand-)*parent of a child
+	 * @param wannaBeChild candidate descendant revision
+	 * @return <code>true</code> if <code>wannaBeChild</code> is among children of <code>root</code>
+	 */
+	public boolean isChild(Nodeid root, Nodeid wannaBeChild) {
+		int x = seqWrapper.binarySearchSorted(root);
+		assertSortedIndex(x);
+		final int start = seqWrapper.getReverseIndex(x);
+		root = sequential[start]; // canonical instance
+		if (!hasChildren(start)) {
+			return false; // root got no children at all
+		}
+		int y = seqWrapper.binarySearchSorted(wannaBeChild);
+		if (y < 0) {
+			return false; // not found
+		}
+		final int end = seqWrapper.getReverseIndex(y);
+		wannaBeChild = sequential[end]; // canonicalize
+		if (end <= start) {
+			return false; // potential child was in repository earlier than root
+		}
+		HashSet<Nodeid> parents = new HashSet<Nodeid>();
+		parents.add(root);
+		for (int i = start + 1; i < end; i++) {
+			if (parents.contains(firstParent[i]) || parents.contains(secondParent[i])) {
+				parents.add(sequential[i]); // collect ancestors line
 			}
 		}
-		return false;
+		return parents.contains(firstParent[end]) || parents.contains(secondParent[end]);
+	}
+	
+	/**
+	 * @return elements of this map that do not have a child recorded therein.
+	 */
+	public Collection<Nodeid> heads() {
+		return heads.values();
+	}
+	
+	/**
+	 * @return map of revision to indexes
+	 */
+	public HgRevisionMap<T> getRevisionMap() {
+		if (revisionIndexMap == null) {
+			revisionIndexMap = new HgRevisionMap<T>(revlog);
+			revisionIndexMap.init(seqWrapper);
+		}
+		return revisionIndexMap;
+	}
+
+	private boolean hasChildren(int sequentialIndex) {
+		return !heads.containsKey(sequentialIndex);
 	}
 }
\ No newline at end of file
--- a/src/org/tmatesoft/hg/repo/HgPhase.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgPhase.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -19,6 +19,7 @@
 /**
  * Phases for a changeset is a new functionality in Mercurial 2.1
  * 
+ * @since 1.1
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
@@ -57,4 +58,18 @@
 		}
 		throw new IllegalArgumentException(String.format("Bad phase name: %d", value));
 	}
+	
+	/**
+	 * @return integer value Mercurial uses to identify the phase
+	 */
+	public int mercurialOrdinal() {
+		if (this == Undefined) {
+			throw new IllegalStateException("Undefined phase is an artifical value, which doesn't possess a valid native mercurial ordinal");
+		}
+		return ordinal(); // what a coincidence
+	}
+	
+	public String mercurialString() {
+		return hgString;
+	}
 }
--- a/src/org/tmatesoft/hg/repo/HgRemoteRepository.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgRemoteRepository.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -17,8 +17,11 @@
 package org.tmatesoft.hg.repo;
 
 import static org.tmatesoft.hg.util.LogFacility.Severity.Info;
+import static org.tmatesoft.hg.util.Outcome.Kind.Failure;
+import static org.tmatesoft.hg.util.Outcome.Kind.Success;
 
 import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
@@ -26,6 +29,8 @@
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.StreamTokenizer;
+import java.net.ContentHandler;
+import java.net.ContentHandlerFactory;
 import java.net.HttpURLConnection;
 import java.net.MalformedURLException;
 import java.net.URL;
@@ -53,16 +58,25 @@
 import javax.net.ssl.X509TrustManager;
 
 import org.tmatesoft.hg.core.HgBadArgumentException;
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.HgRemoteConnectionException;
 import org.tmatesoft.hg.core.HgRepositoryNotFoundException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.internal.DataSerializer;
+import org.tmatesoft.hg.internal.DataSerializer.OutputStreamSerializer;
+import org.tmatesoft.hg.internal.EncodingHelper;
+import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.PropertyMarshal;
+import org.tmatesoft.hg.util.LogFacility.Severity;
+import org.tmatesoft.hg.util.Outcome;
+import org.tmatesoft.hg.util.Pair;
 
 /**
  * WORK IN PROGRESS, DO NOT USE
  * 
  * @see http://mercurial.selenic.com/wiki/WireProtocol
+ * @see http://mercurial.selenic.com/wiki/HttpCommandProtocol
  * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
@@ -77,6 +91,33 @@
 	private final SessionContext sessionContext;
 	private Set<String> remoteCapabilities;
 	
+	static {
+		URLConnection.setContentHandlerFactory(new ContentHandlerFactory() {
+			
+			public ContentHandler createContentHandler(String mimetype) {
+				if ("application/mercurial-0.1".equals(mimetype)) {
+					return new ContentHandler() {
+						
+						@Override
+						public Object getContent(URLConnection urlc) throws IOException {
+							if (urlc.getContentLength() > 0) {
+								ByteArrayOutputStream bos = new ByteArrayOutputStream();
+								InputStream is = urlc.getInputStream();
+								int r;
+								while ((r = is.read()) != -1) {
+									bos.write(r);
+								}
+								return new String(bos.toByteArray());
+							}
+							return "<empty>";
+						}
+					};
+				}
+				return null;
+			}
+		});
+	}
+	
 	HgRemoteRepository(SessionContext ctx, URL url) throws HgBadArgumentException {
 		if (url == null || ctx == null) {
 			throw new IllegalArgumentException();
@@ -128,48 +169,7 @@
 	}
 	
 	public boolean isInvalid() throws HgRemoteConnectionException {
-		if (remoteCapabilities == null) {
-			remoteCapabilities = new HashSet<String>();
-			// say hello to server, check response
-			try {
-				URL u = new URL(url, url.getPath() + "?cmd=hello");
-				HttpURLConnection c = setupConnection(u.openConnection());
-				c.connect();
-				if (debug) {
-					dumpResponseHeader(u, c);
-				}
-				BufferedReader r = new BufferedReader(new InputStreamReader(c.getInputStream(), "US-ASCII"));
-				String line = r.readLine();
-				c.disconnect();
-				final String capsPrefix = "capabilities:";
-				if (line == null || !line.startsWith(capsPrefix)) {
-					// for whatever reason, some servers do not respond to hello command (e.g. svnkit)
-					// but respond to 'capabilities' instead. Try it.
-					// TODO [post-1.0] tests needed
-					u = new URL(url, url.getPath() + "?cmd=capabilities");
-					c = setupConnection(u.openConnection());
-					c.connect();
-					if (debug) {
-						dumpResponseHeader(u, c);
-					}
-					r = new BufferedReader(new InputStreamReader(c.getInputStream(), "US-ASCII"));
-					line = r.readLine();
-					c.disconnect();
-					if (line == null || line.trim().length() == 0) {
-						return true;
-					}
-				} else {
-					line = line.substring(capsPrefix.length()).trim();
-				}
-				String[] caps = line.split("\\s");
-				remoteCapabilities.addAll(Arrays.asList(caps));
-				c.disconnect();
-			} catch (MalformedURLException ex) {
-				throw new HgRemoteConnectionException("Bad URL", ex).setRemoteCommand("hello").setServerInfo(getLocation());
-			} catch (IOException ex) {
-				throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("hello").setServerInfo(getLocation());
-			}
-		}
+		initCapabilities();
 		return remoteCapabilities.isEmpty();
 	}
 
@@ -192,9 +192,10 @@
 	}
 
 	public List<Nodeid> heads() throws HgRemoteConnectionException {
+		HttpURLConnection c = null;
 		try {
 			URL u = new URL(url, url.getPath() + "?cmd=heads");
-			HttpURLConnection c = setupConnection(u.openConnection());
+			c = setupConnection(u.openConnection());
 			c.connect();
 			if (debug) {
 				dumpResponseHeader(u, c);
@@ -213,6 +214,10 @@
 			throw new HgRemoteConnectionException("Bad URL", ex).setRemoteCommand("heads").setServerInfo(getLocation());
 		} catch (IOException ex) {
 			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("heads").setServerInfo(getLocation());
+		} finally {
+			if (c != null) {
+				c.disconnect();
+			}
 		}
 	}
 	
@@ -245,10 +250,11 @@
 			// strip last space 
 			sb.setLength(sb.length() - 1);
 		}
+		HttpURLConnection c = null;
 		try {
 			boolean usePOST = ranges.size() > 3;
 			URL u = new URL(url, url.getPath() + "?cmd=between" + (usePOST ? "" : '&' + sb.toString()));
-			HttpURLConnection c = setupConnection(u.openConnection());
+			c = setupConnection(u.openConnection());
 			if (usePOST) {
 				c.setRequestMethod("POST");
 				c.setRequestProperty("Content-Length", String.valueOf(sb.length()/*nodeids are ASCII, bytes == characters */));
@@ -314,23 +320,19 @@
 			throw new HgRemoteConnectionException("Bad URL", ex).setRemoteCommand("between").setServerInfo(getLocation());
 		} catch (IOException ex) {
 			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("between").setServerInfo(getLocation());
+		} finally {
+			if (c != null) {
+				c.disconnect();
+			}
 		}
 	}
 
 	public List<RemoteBranch> branches(List<Nodeid> nodes) throws HgRemoteConnectionException {
-		StringBuilder sb = new StringBuilder(20 + nodes.size() * 41);
-		sb.append("nodes=");
-		for (Nodeid n : nodes) {
-			sb.append(n.toString());
-			sb.append('+');
-		}
-		if (sb.charAt(sb.length() - 1) == '+') {
-			// strip last space 
-			sb.setLength(sb.length() - 1);
-		}
+		StringBuilder sb = appendNodeidListArgument("nodes", nodes, null);
+		HttpURLConnection c = null;
 		try {
 			URL u = new URL(url, url.getPath() + "?cmd=branches&" + sb.toString());
-			HttpURLConnection c = setupConnection(u.openConnection());
+			c = setupConnection(u.openConnection());
 			c.connect();
 			if (debug) {
 				dumpResponseHeader(u, c);
@@ -357,6 +359,10 @@
 			throw new HgRemoteConnectionException("Bad URL", ex).setRemoteCommand("branches").setServerInfo(getLocation());
 		} catch (IOException ex) {
 			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("branches").setServerInfo(getLocation());
+		} finally {
+			if (c != null) {
+				c.disconnect();
+			}
 		}
 	}
 
@@ -378,19 +384,11 @@
 	 */
 	public HgBundle getChanges(List<Nodeid> roots) throws HgRemoteConnectionException, HgRuntimeException {
 		List<Nodeid> _roots = roots.isEmpty() ? Collections.singletonList(Nodeid.NULL) : roots;
-		StringBuilder sb = new StringBuilder(20 + _roots.size() * 41);
-		sb.append("roots=");
-		for (Nodeid n : _roots) {
-			sb.append(n.toString());
-			sb.append('+');
-		}
-		if (sb.charAt(sb.length() - 1) == '+') {
-			// strip last space 
-			sb.setLength(sb.length() - 1);
-		}
+		StringBuilder sb = appendNodeidListArgument("roots", _roots, null);
+		HttpURLConnection c = null;
 		try {
 			URL u = new URL(url, url.getPath() + "?cmd=changegroup&" + sb.toString());
-			HttpURLConnection c = setupConnection(u.openConnection());
+			c = setupConnection(u.openConnection());
 			c.connect();
 			if (debug) {
 				dumpResponseHeader(u, c);
@@ -407,13 +405,168 @@
 			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("changegroup").setServerInfo(getLocation());
 		} catch (HgRepositoryNotFoundException ex) {
 			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("changegroup").setServerInfo(getLocation());
+		} finally {
+			if (c != null) {
+				c.disconnect();
+			}
 		}
 	}
+	
+	public void unbundle(HgBundle bundle, List<Nodeid> remoteHeads) throws HgRemoteConnectionException, HgRuntimeException {
+		if (remoteHeads == null) {
+			// TODO collect heads from bundle:
+			// bundle.inspectChangelog(new HeadCollector(for each c : if collected has c.p1 or c.p2, remove them. Add c))
+			// or get from remote server???
+			throw Internals.notImplemented();
+		}
+		StringBuilder sb = appendNodeidListArgument("heads", remoteHeads, null);
+		
+		HttpURLConnection c = null;
+		DataSerializer.DataSource bundleData = bundle.new BundleSerializer();
+		try {
+			URL u = new URL(url, url.getPath() + "?cmd=unbundle&" + sb.toString());
+			c = setupConnection(u.openConnection());
+			c.setRequestMethod("POST");
+			c.setRequestProperty("Content-Length", String.valueOf(bundleData.serializeLength()));
+			c.setRequestProperty("Content-Type", "application/mercurial-0.1");
+			c.setDoOutput(true);
+			c.connect();
+			OutputStream os = c.getOutputStream();
+			bundleData.serialize(new OutputStreamSerializer(os));
+			os.flush();
+			os.close();
+			if (debug) {
+				dumpResponseHeader(u, c);
+				dumpResponse(c);
+			}
+			checkResponseOk(c, "Push", "unbundle");
+		} catch (MalformedURLException ex) {
+			throw new HgRemoteConnectionException("Bad URL", ex).setRemoteCommand("unbundle").setServerInfo(getLocation());
+		} catch (IOException ex) {
+			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("unbundle").setServerInfo(getLocation());
+		} catch (HgIOException ex) {
+			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("unbundle").setServerInfo(getLocation());
+		} finally {
+			if (c != null) {
+				c.disconnect();
+			}
+		}
+	}
+
+	public Bookmarks getBookmarks() throws HgRemoteConnectionException, HgRuntimeException {
+		final String actionName = "Get remote bookmarks";
+		final List<Pair<String, String>> values = listkeys("bookmarks", actionName);
+		ArrayList<Pair<String, Nodeid>> rv = new ArrayList<Pair<String, Nodeid>>();
+		for (Pair<String, String> l : values) {
+			if (l.second().length() != Nodeid.SIZE_ASCII) {
+				sessionContext.getLog().dump(getClass(), Severity.Warn, "%s: bad nodeid '%s', ignored", actionName, l.second());
+				continue;
+			}
+			Nodeid n = Nodeid.fromAscii(l.second());
+			String bm = new String(l.first());
+			rv.add(new Pair<String, Nodeid>(bm, n));
+		}
+		return new Bookmarks(rv);
+	}
+
+	public Outcome updateBookmark(String name, Nodeid oldRev, Nodeid newRev) throws HgRemoteConnectionException, HgRuntimeException {
+		initCapabilities();
+		if (!remoteCapabilities.contains("pushkey")) {
+			return new Outcome(Failure, "Server doesn't support pushkey protocol");
+		}
+		if (pushkey("Update remote bookmark", "bookmarks", name, oldRev.toString(), newRev.toString())) {
+			return new Outcome(Success, String.format("Bookmark %s updated to %s", name, newRev.shortNotation()));
+		}
+		return new Outcome(Failure, String.format("Bookmark update (%s: %s -> %s) failed", name, oldRev.shortNotation(), newRev.shortNotation()));
+	}
+	
+	public Phases getPhases() throws HgRemoteConnectionException, HgRuntimeException {
+		initCapabilities();
+		if (!remoteCapabilities.contains("pushkey")) {
+			// old server defaults to publishing
+			return new Phases(true, Collections.<Nodeid>emptyList());
+		}
+		final List<Pair<String, String>> values = listkeys("phases", "Get remote phases");
+		boolean publishing = false;
+		ArrayList<Nodeid> draftRoots = new ArrayList<Nodeid>();
+		for (Pair<String, String> l : values) {
+			if ("publishing".equalsIgnoreCase(l.first())) {
+				publishing = Boolean.parseBoolean(l.second());
+				continue;
+			}
+			Nodeid root = Nodeid.fromAscii(l.first());
+			int ph = Integer.parseInt(l.second());
+			if (ph == HgPhase.Draft.mercurialOrdinal()) {
+				draftRoots.add(root);
+			} else {
+				assert false;
+				sessionContext.getLog().dump(getClass(), Severity.Error, "Unexpected phase value %d for revision %s", ph, root);
+			}
+		}
+		return new Phases(publishing, draftRoots);
+	}
+	
+	public Outcome updatePhase(HgPhase from, HgPhase to, Nodeid n) throws HgRemoteConnectionException, HgRuntimeException {
+		initCapabilities();
+		if (!remoteCapabilities.contains("pushkey")) {
+			return new Outcome(Failure, "Server doesn't support pushkey protocol");
+		}
+		if (pushkey("Update remote phases", "phases", n.toString(), String.valueOf(from.mercurialOrdinal()), String.valueOf(to.mercurialOrdinal()))) {
+			return new Outcome(Success, String.format("Phase of %s updated to %s", n.shortNotation(), to.name()));
+		}
+		return new Outcome(Failure, String.format("Phase update (%s: %s -> %s) failed", n.shortNotation(), from.name(), to.name()));
+	}
 
 	@Override
 	public String toString() {
 		return getClass().getSimpleName() + '[' + getLocation() + ']';
 	}
+	
+	
+	private void initCapabilities() throws HgRemoteConnectionException {
+		if (remoteCapabilities == null) {
+			remoteCapabilities = new HashSet<String>();
+			// say hello to server, check response
+			try {
+				URL u = new URL(url, url.getPath() + "?cmd=hello");
+				HttpURLConnection c = setupConnection(u.openConnection());
+				c.connect();
+				if (debug) {
+					dumpResponseHeader(u, c);
+				}
+				BufferedReader r = new BufferedReader(new InputStreamReader(c.getInputStream(), "US-ASCII"));
+				String line = r.readLine();
+				c.disconnect();
+				final String capsPrefix = "capabilities:";
+				if (line == null || !line.startsWith(capsPrefix)) {
+					// for whatever reason, some servers do not respond to hello command (e.g. svnkit)
+					// but respond to 'capabilities' instead. Try it.
+					// TODO [post-1.0] tests needed
+					u = new URL(url, url.getPath() + "?cmd=capabilities");
+					c = setupConnection(u.openConnection());
+					c.connect();
+					if (debug) {
+						dumpResponseHeader(u, c);
+					}
+					r = new BufferedReader(new InputStreamReader(c.getInputStream(), "US-ASCII"));
+					line = r.readLine();
+					c.disconnect();
+					if (line == null || line.trim().length() == 0) {
+						return;
+					}
+				} else {
+					line = line.substring(capsPrefix.length()).trim();
+				}
+				String[] caps = line.split("\\s");
+				remoteCapabilities.addAll(Arrays.asList(caps));
+				c.disconnect();
+			} catch (MalformedURLException ex) {
+				throw new HgRemoteConnectionException("Bad URL", ex).setRemoteCommand("hello").setServerInfo(getLocation());
+			} catch (IOException ex) {
+				throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("hello").setServerInfo(getLocation());
+			}
+		}
+	}
 
 	private HgLookup getLookupHelper() {
 		if (lookupHelper == null) {
@@ -421,9 +574,78 @@
 		}
 		return lookupHelper;
 	}
+
+	private List<Pair<String,String>> listkeys(String namespace, String actionName) throws HgRemoteConnectionException, HgRuntimeException {
+		HttpURLConnection c = null;
+		try {
+			URL u = new URL(url, url.getPath() + "?cmd=listkeys&namespace=" + namespace);
+			c = setupConnection(u.openConnection());
+			c.connect();
+			if (debug) {
+				dumpResponseHeader(u, c);
+			}
+			checkResponseOk(c, actionName, "listkeys");
+			ArrayList<Pair<String, String>> rv = new ArrayList<Pair<String, String>>();
+			BufferedReader r = new BufferedReader(new InputStreamReader(c.getInputStream(), EncodingHelper.getUTF8()));
+			String l;
+			while ((l = r.readLine()) != null) {
+				int sep = l.indexOf('\t');
+				if (sep == -1) {
+					sessionContext.getLog().dump(getClass(), Severity.Warn, "%s: bad line '%s', ignored", actionName, l);
+					continue;
+				}
+				rv.add(new Pair<String,String>(l.substring(0, sep), l.substring(sep+1)));
+			}
+			r.close();
+			return rv;
+		} catch (MalformedURLException ex) {
+			throw new HgRemoteConnectionException("Bad URL", ex).setRemoteCommand("listkeys").setServerInfo(getLocation());
+		} catch (IOException ex) {
+			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("listkeys").setServerInfo(getLocation());
+		} finally {
+			if (c != null) {
+				c.disconnect();
+			}
+		}
+	}
 	
+	private boolean pushkey(String opName, String namespace, String key, String oldValue, String newValue) throws HgRemoteConnectionException, HgRuntimeException {
+		HttpURLConnection c = null;
+		try {
+			final String p = String.format("%s?cmd=pushkey&namespace=%s&key=%s&old=%s&new=%s", url.getPath(), namespace, key, oldValue, newValue);
+			URL u = new URL(url, p);
+			c = setupConnection(u.openConnection());
+			c.setRequestMethod("POST");
+			c.connect();
+			if (debug) {
+				dumpResponseHeader(u, c);
+			}
+			checkResponseOk(c, opName, "pushkey");
+			final InputStream is = c.getInputStream();
+			int rv = is.read();
+			is.close();
+			return rv == '1';
+		} catch (MalformedURLException ex) {
+			throw new HgRemoteConnectionException("Bad URL", ex).setRemoteCommand("pushkey").setServerInfo(getLocation());
+		} catch (IOException ex) {
+			throw new HgRemoteConnectionException("Communication failure", ex).setRemoteCommand("pushkey").setServerInfo(getLocation());
+		} finally {
+			if (c != null) {
+				c.disconnect();
+			}
+		}
+	}
+	
+	private void checkResponseOk(HttpURLConnection c, String opName, String remoteCmd) throws HgRemoteConnectionException, IOException {
+		if (c.getResponseCode() != 200) {
+			String m = c.getResponseMessage() == null ? "unknown reason" : c.getResponseMessage();
+			String em = String.format("%s failed: %s (HTTP error:%d)", opName, m, c.getResponseCode());
+			throw new HgRemoteConnectionException(em).setRemoteCommand(remoteCmd).setServerInfo(getLocation());
+		}
+	}
+
 	private HttpURLConnection setupConnection(URLConnection urlConnection) {
-		urlConnection.setRequestProperty("User-Agent", "hg4j/0.5.0");
+		urlConnection.setRequestProperty("User-Agent", "hg4j/1.0.0");
 		urlConnection.addRequestProperty("Accept", "application/mercurial-0.1");
 		if (authInfo != null) {
 			urlConnection.addRequestProperty("Authorization", "Basic " + authInfo);
@@ -433,6 +655,23 @@
 		}
 		return (HttpURLConnection) urlConnection;
 	}
+	
+	private StringBuilder appendNodeidListArgument(String key, List<Nodeid> values, StringBuilder sb) {
+		if (sb == null) {
+			sb = new StringBuilder(20 + values.size() * 41);
+		}
+		sb.append(key);
+		sb.append('=');
+		for (Nodeid n : values) {
+			sb.append(n.toString());
+			sb.append('+');
+		}
+		if (sb.charAt(sb.length() - 1) == '+') {
+			// strip last space 
+			sb.setLength(sb.length() - 1);
+		}
+		return sb;
+	}
 
 	private void dumpResponseHeader(URL u, HttpURLConnection c) {
 		System.out.printf("Query (%d bytes):%s\n", u.getQuery().length(), u.getQuery());
@@ -443,9 +682,16 @@
 		}
 	}
 	
+	private void dumpResponse(HttpURLConnection c) throws IOException {
+		if (c.getContentLength() > 0) {
+			final Object content = c.getContent();
+			System.out.println(content);
+		}
+	}
+	
 	private static File writeBundle(InputStream is, boolean decompress, String header) throws IOException {
 		InputStream zipStream = decompress ? new InflaterInputStream(is) : is;
-		File tf = File.createTempFile("hg-bundle-", null);
+		File tf = File.createTempFile("hg4j-bundle-", null);
 		FileOutputStream fos = new FileOutputStream(tf);
 		fos.write(header.getBytes());
 		int r;
@@ -502,4 +748,44 @@
 			return head.equals(o.head) && root.equals(o.root) && (p1 == null && o.p1 == null || p1.equals(o.p1)) && (p2 == null && o.p2 == null || p2.equals(o.p2));
 		}
 	}
+
+	public static final class Bookmarks implements Iterable<Pair<String, Nodeid>> {
+		private final List<Pair<String, Nodeid>> bm;
+
+		private Bookmarks(List<Pair<String, Nodeid>> bookmarks) {
+			bm = bookmarks;
+		}
+
+		public Iterator<Pair<String, Nodeid>> iterator() {
+			return bm.iterator();
+		}
+	}
+	
+	public static final class Phases {
+		private final boolean pub;
+		private final List<Nodeid> droots;
+		
+		private Phases(boolean publishing, List<Nodeid> draftRoots) {
+			pub = publishing;
+			droots = draftRoots;
+		}
+		
+		/**
+		 * Non-publishing servers may (shall?) respond with a list of draft roots.
+		 * This method doesn't make sense when {@link #isPublishingServer()} is <code>true</code>
+		 * 
+		 * @return list of draft roots on remote server
+		 */
+		public List<Nodeid> draftRoots() {
+			return droots;
+		}
+
+		/**
+		 * @return <code>true</code> if revisions on remote server shall be deemed published (either 
+		 * old server w/o explicit setting, or a new one with <code>phases.publish == true</code>)
+		 */
+		public boolean isPublishingServer() {
+			return pub;
+		}
+	}
 }
--- a/src/org/tmatesoft/hg/repo/HgRepository.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgRepository.java	Wed Jul 10 11:48:55 2013 +0200
@@ -17,32 +17,28 @@
 package org.tmatesoft.hg.repo;
 
 import static org.tmatesoft.hg.repo.HgRepositoryFiles.*;
-import static org.tmatesoft.hg.util.LogFacility.Severity.*;
+import static org.tmatesoft.hg.util.LogFacility.Severity.Warn;
 
 import java.io.File;
 import java.io.FileReader;
 import java.io.IOException;
-import java.io.StringReader;
-import java.lang.ref.SoftReference;
 import java.nio.CharBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.core.SessionContext;
-import org.tmatesoft.hg.internal.ByteArrayChannel;
 import org.tmatesoft.hg.internal.ConfigFile;
 import org.tmatesoft.hg.internal.DirstateReader;
-import org.tmatesoft.hg.internal.Experimental;
+import org.tmatesoft.hg.internal.FileUtils;
 import org.tmatesoft.hg.internal.Filter;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.PropertyMarshal;
 import org.tmatesoft.hg.internal.RevlogStream;
 import org.tmatesoft.hg.internal.SubrepoManager;
 import org.tmatesoft.hg.repo.ext.HgExtensionsManager;
-import org.tmatesoft.hg.util.CancelledException;
 import org.tmatesoft.hg.util.Pair;
 import org.tmatesoft.hg.util.Path;
 import org.tmatesoft.hg.util.PathRewrite;
@@ -94,7 +90,6 @@
 	 */
 	public static final String DEFAULT_BRANCH_NAME = "default";
 
-	private final File repoDir; // .hg folder
 	private final File workingDir; // .hg/../
 	private final String repoLocation;
 	/*
@@ -113,24 +108,14 @@
 	private SubrepoManager subRepos;
 	private HgBookmarks bookmarks;
 	private HgExtensionsManager extManager;
-
-	// XXX perhaps, shall enable caching explicitly
-	private final HashMap<Path, SoftReference<RevlogStream>> streamsCache = new HashMap<Path, SoftReference<RevlogStream>>();
-	
-	private final org.tmatesoft.hg.internal.Internals impl;
 	private HgIgnore ignore;
 	private HgRepoConfig repoConfig;
 	
-	/*
-	 * TODO [post-1.0] move to a better place, e.g. WorkingCopy container that tracks both dirstate and branches 
-	 * (and, perhaps, undo, lastcommit and other similar information), and is change listener so that we don't need to
-	 * worry about this cached value become stale
-	 */
-	private String wcBranch;
+	private HgRepositoryLock wdLock, storeLock;
 
+	private final org.tmatesoft.hg.internal.Internals impl;
 	
 	HgRepository(String repositoryPath) {
-		repoDir = null;
 		workingDir = null;
 		repoLocation = repositoryPath;
 		normalizePath = null;
@@ -146,15 +131,25 @@
 		assert repositoryPath != null; 
 		assert repositoryRoot != null;
 		assert ctx != null;
-		repoDir = repositoryRoot;
-		workingDir = repoDir.getParentFile();
+		workingDir = repositoryRoot.getParentFile();
 		if (workingDir == null) {
-			throw new IllegalArgumentException(repoDir.toString());
+			throw new IllegalArgumentException(repositoryRoot.toString());
 		}
 		repoLocation = repositoryPath;
 		sessionContext = ctx;
-		impl = new org.tmatesoft.hg.internal.Internals(this, repositoryRoot);
-		normalizePath = impl.buildNormalizePathRewrite(); 
+		impl = new Internals(this, repositoryRoot, new Internals.ImplAccess() {
+			
+			public RevlogStream getStream(HgDataFile df) {
+				return df.content;
+			}
+			public RevlogStream getManifestStream() {
+				return HgRepository.this.getManifest().content;
+			}
+			public RevlogStream getChangelogStream() {
+				return HgRepository.this.getChangelog().content;
+			}
+		});
+		normalizePath = impl.buildNormalizePathRewrite();
 	}
 
 	@Override
@@ -174,7 +169,7 @@
 	 * @return repository location information, never <code>null</code>
 	 */
 	public String getLocation() {
-		return repoLocation;
+		return repoLocation; // XXX field to keep this is bit too much 
 	}
 
 	public boolean isInvalid() {
@@ -183,8 +178,7 @@
 	
 	public HgChangelog getChangelog() {
 		if (changelog == null) {
-			File chlogFile = impl.getFileFromStoreDir("00changelog.i");
-			RevlogStream content = new RevlogStream(impl.getDataAccess(), chlogFile);
+			RevlogStream content = impl.createChangelogStream();
 			changelog = new HgChangelog(this, content);
 		}
 		return changelog;
@@ -192,63 +186,41 @@
 	
 	public HgManifest getManifest() {
 		if (manifest == null) {
-			File manifestFile = impl.getFileFromStoreDir("00manifest.i");
-			RevlogStream content = new RevlogStream(impl.getDataAccess(), manifestFile);
+			RevlogStream content = impl.createManifestStream();
 			manifest = new HgManifest(this, content, impl.buildFileNameEncodingHelper());
 		}
 		return manifest;
 	}
 	
 	/**
+	 * Access snapshot of repository tags.
+	 * 
 	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
-	public HgTags getTags() throws HgInvalidControlFileException {
+	public HgTags getTags() throws HgRuntimeException {
 		if (tags == null) {
-			tags = new HgTags(this);
-			HgDataFile hgTags = getFileNode(HgTags.getPath());
-			if (hgTags.exists()) {
-				for (int i = 0; i <= hgTags.getLastRevision(); i++) { // TODO post-1.0 in fact, would be handy to have walk(start,end) 
-					// method for data files as well, though it looks odd.
-					try {
-						ByteArrayChannel sink = new ByteArrayChannel();
-						hgTags.content(i, sink);
-						final String content = new String(sink.toArray(), "UTF8");
-						tags.readGlobal(new StringReader(content));
-					} catch (CancelledException ex) {
-						 // IGNORE, can't happen, we did not configure cancellation
-						getSessionContext().getLog().dump(getClass(), Debug, ex, null);
-					} catch (IOException ex) {
-						// UnsupportedEncodingException can't happen (UTF8)
-						// only from readGlobal. Need to reconsider exceptions thrown from there:
-						// BufferedReader wraps String and unlikely to throw IOException, perhaps, log is enough?
-						getSessionContext().getLog().dump(getClass(), Error, ex, null);
-						// XXX need to decide what to do this. failure to read single revision shall not break complete cycle
-					}
-				}
-			}
-			File file2read = null;
-			try {
-				file2read = new File(getWorkingDir(), HgTags.getPath());
-				tags.readGlobal(file2read); // XXX replace with HgDataFile.workingCopy
-				file2read = impl.getFileFromRepoDir(HgLocalTags.getName()); // XXX pass internalrepo to readLocal, keep filename there
-				tags.readLocal(file2read);
-			} catch (IOException ex) {
-				getSessionContext().getLog().dump(getClass(), Error, ex, null);
-				throw new HgInvalidControlFileException("Failed to read tags", ex, file2read);
-			}
+			tags = new HgTags(impl);
+			tags.read();
+		} else {
+			tags.reloadIfChanged();
 		}
 		return tags;
 	}
 	
 	/**
-	 * Access branch information
+	 * Access branch information. Returns a snapshot of branch information as it's available at the time of the call.
+	 * If repository get changed, use this method to obtain an up-to-date state. 
+	 * 
 	 * @return branch manager instance, never <code>null</code>
 	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
-	public HgBranches getBranches() throws HgInvalidControlFileException {
+	public HgBranches getBranches() throws HgRuntimeException {
+		final ProgressSupport ps = ProgressSupport.Factory.get(null);
 		if (branches == null) {
 			branches = new HgBranches(impl);
-			branches.collect(ProgressSupport.Factory.get(null));
+			branches.collect(ps);
+		} else {
+			branches.reloadIfChanged(ps);
 		}
 		return branches;
 	}
@@ -256,10 +228,12 @@
 	/**
 	 * Access state of the recent merge
 	 * @return merge state facility, never <code>null</code> 
+	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
-	public HgMergeState getMergeState() {
+	public HgMergeState getMergeState() throws HgRuntimeException {
 		if (mergeState == null) {
 			mergeState = new HgMergeState(impl);
+			mergeState.refresh();
 		}
 		return mergeState;
 	}
@@ -271,10 +245,8 @@
 	}
 
 	public HgDataFile getFileNode(Path path) {
-		RevlogStream content = resolveStoreFile(path);
-		if (content == null) {
-			return new HgDataFile(this, path);
-		}
+		RevlogStream content = impl.resolveStoreFile(path);
+		assert content != null;
 		return new HgDataFile(this, path, content);
 	}
 
@@ -296,9 +268,13 @@
 	 * @throws HgInvalidControlFileException if attempt to read branch name failed.
 	 */
 	public String getWorkingCopyBranchName() throws HgInvalidControlFileException {
-		if (wcBranch == null) {
-			wcBranch = DirstateReader.readBranch(impl);
-		}
+		/*
+		 * TODO [post-1.1] 1) cache value (now if cached, is not updated after commit)
+		 * 2) move to a better place, e.g. WorkingCopy container that tracks both dirstate and branches 
+		 * (and, perhaps, undo, lastcommit and other similar information), and is change listener so that we don't need to
+		 * worry about this cached value become stale
+		 */
+		String wcBranch = DirstateReader.readBranch(impl);
 		return wcBranch;
 	}
 
@@ -333,7 +309,7 @@
 			try {
 				ConfigFile configFile = impl.readConfiguration();
 				repoConfig = new HgRepoConfig(configFile);
-			} catch (IOException ex) {
+			} catch (HgIOException ex) {
 				String m = "Errors while reading user configuration file";
 				getSessionContext().getLog().dump(getClass(), Warn, ex, m);
 				return new HgRepoConfig(new ConfigFile(getSessionContext())); // empty config, do not cache, allow to try once again
@@ -369,16 +345,9 @@
 		// TODO read config for additional locations
 		if (ignore == null) {
 			ignore = new HgIgnore(getToRepoPathHelper());
-			File ignoreFile = new File(getWorkingDir(), HgIgnore.getPath());
-			try {
-				final List<String> errors = ignore.read(ignoreFile);
-				if (errors != null) {
-					getSessionContext().getLog().dump(getClass(), Warn, "Syntax errors parsing %s:\n%s", ignoreFile.getName(), Internals.join(errors, ",\n"));
-				}
-			} catch (IOException ex) {
-				final String m = String.format("Error reading %s file", ignoreFile);
-				throw new HgInvalidControlFileException(m, ex, ignoreFile);
-			}
+			ignore.read(impl);
+		} else {
+			ignore.reloadIfChanged(impl);
 		}
 		return ignore;
 	}
@@ -391,7 +360,7 @@
 	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
 	public String getCommitLastMessage() throws HgInvalidControlFileException {
-		File lastMessage = impl.getFileFromRepoDir(LastMessage.getPath());
+		File lastMessage = impl.getRepositoryFile(LastMessage);
 		if (!lastMessage.canRead()) {
 			return null;
 		}
@@ -404,21 +373,11 @@
 		} catch (IOException ex) {
 			throw new HgInvalidControlFileException("Can't retrieve message of last commit attempt", ex, lastMessage);
 		} finally {
-			if (fr != null) {
-				try {
-					fr.close();
-				} catch (IOException ex) {
-					getSessionContext().getLog().dump(getClass(), Warn, "Failed to close %s after read", lastMessage);
-				}
-			}
+			new FileUtils(getSessionContext().getLog(), this).closeQuietly(fr, lastMessage);
 		}
 	}
 
-	private HgRepositoryLock wdLock, storeLock;
-
 	/**
-	 * PROVISIONAL CODE, DO NOT USE
-	 * 
 	 * Access repository lock that covers non-store parts of the repository (dirstate, branches, etc - 
 	 * everything that has to do with working directory state).
 	 * 
@@ -427,11 +386,10 @@
 	 *   
 	 * @return lock object, never <code>null</code>
 	 */
-	@Experimental(reason="WORK IN PROGRESS")
 	public HgRepositoryLock getWorkingDirLock() {
 		if (wdLock == null) {
 			int timeout = getLockTimeout();
-			File lf = impl.getFileFromRepoDir("wlock");
+			File lf = impl.getRepositoryFile(WorkingCopyLock);
 			synchronized (this) {
 				if (wdLock == null) {
 					wdLock = new HgRepositoryLock(lf, timeout);
@@ -441,11 +399,15 @@
 		return wdLock;
 	}
 
-	@Experimental(reason="WORK IN PROGRESS")
+	/**
+	 * Access repository lock that covers repository intrinsic files, unrelated to 
+	 * the state of working directory
+	 * @return lock object, never <code>null</code>
+	 */
 	public HgRepositoryLock getStoreLock() {
 		if (storeLock == null) {
 			int timeout = getLockTimeout();
-			File fl = impl.getFileFromStoreDir("lock");
+			File fl = impl.getRepositoryFile(StoreLock);
 			synchronized (this) {
 				if (storeLock == null) {
 					storeLock = new HgRepositoryLock(fl, timeout);
@@ -460,10 +422,12 @@
 	 * @return facility to manage bookmarks, never <code>null</code>
 	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
 	 */
-	public HgBookmarks getBookmarks() throws HgInvalidControlFileException {
+	public HgBookmarks getBookmarks() throws HgRuntimeException {
 		if (bookmarks == null) {
 			bookmarks = new HgBookmarks(impl);
 			bookmarks.read();
+		} else {
+			bookmarks.reloadIfChanged();
 		}
 		return bookmarks;
 	}
@@ -486,45 +450,6 @@
 	public SessionContext getSessionContext() {
 		return sessionContext;
 	}
-
-	/**
-	 * Perhaps, should be separate interface, like ContentLookup
-	 * @param path - normalized file name
-	 * @return <code>null</code> if path doesn't resolve to a existing file
-	 */
-	/*package-local*/ RevlogStream resolveStoreFile(Path path) {
-		final SoftReference<RevlogStream> ref = streamsCache.get(path);
-		RevlogStream cached = ref == null ? null : ref.get();
-		if (cached != null) {
-			return cached;
-		}
-		File f = impl.getFileFromDataDir(path);
-		if (f.exists()) {
-			RevlogStream s = new RevlogStream(impl.getDataAccess(), f);
-			if (impl.shallCacheRevlogs()) {
-				streamsCache.put(path, new SoftReference<RevlogStream>(s));
-			}
-			return s;
-		}
-		return null;
-	}
-	
-	/*package-local*/ RevlogStream createStoreFile(Path path) throws HgInvalidControlFileException {
-		File f = impl.getFileFromDataDir(path);
-		try {
-			if (!f.exists()) {
-				f.getParentFile().mkdirs();
-				f.createNewFile();
-			}
-			RevlogStream s = new RevlogStream(impl.getDataAccess(), f);
-			if (impl.shallCacheRevlogs()) {
-				streamsCache.put(path, new SoftReference<RevlogStream>(s));
-			}
-			return s;
-		} catch (IOException ex) {
-			throw new HgInvalidControlFileException("Can't create a file in the storage", ex, f);
-		}
-	}
 	
 	/*package-local*/ List<Filter> getFiltersFromRepoToWorkingDir(Path p) {
 		return instantiateFilters(p, new Filter.Options(Filter.Direction.FromRepo));
--- a/src/org/tmatesoft/hg/repo/HgRepositoryFiles.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgRepositoryFiles.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -25,34 +25,46 @@
  */
 public enum HgRepositoryFiles {
 
-	HgIgnore(".hgignore"), HgTags(".hgtags"), HgEol(".hgeol"), 
-	Dirstate(false, "dirstate"), HgLocalTags(false, "localtags"),
-	HgSub(".hgsub"), HgSubstate(".hgsubstate"),
-	LastMessage(false, "last-message.txt"),
-	Bookmarks(false, "bookmarks"), BookmarksCurrent(false, "bookmarks.current"),
-	Branch(false, "branch");
+	HgIgnore(Home.Root, ".hgignore"), HgTags(Home.Root, ".hgtags"), HgEol(Home.Root, ".hgeol"), 
+	Dirstate(Home.Repo, "dirstate"), HgLocalTags(Home.Repo, "localtags"),
+	HgSub(Home.Root, ".hgsub"), HgSubstate(Home.Root, ".hgsubstate"),
+	LastMessage(Home.Repo, "last-message.txt"),
+	Bookmarks(Home.Repo, "bookmarks"), BookmarksCurrent(Home.Repo, "bookmarks.current"),
+	Branch(Home.Repo, "branch"), 
+	UndoBranch(Home.Repo, "undo.branch"), UndoDirstate(Home.Repo, "undo.dirstate"),
+	Phaseroots(Home.Store, "phaseroots"), FNCache(Home.Store, "fncache"),
+	WorkingCopyLock(Home.Repo, "wlock"), StoreLock(Home.Store, "lock");
+
+	/**
+	 * Possible file locations 
+	 */
+	public enum Home {
+		Root, Repo, Store
+	}
 
 	private final String fname;
-	private final boolean livesInWC; 
+	private final Home residesIn; 
 	
-	private HgRepositoryFiles(String filename) {
-		this(true, filename);
-	}
-
-	private HgRepositoryFiles(boolean wcNotRepoRoot, String filename) {
+	private HgRepositoryFiles(Home home, String filename) {
 		fname = filename;
-		livesInWC = wcNotRepoRoot;
+		residesIn = home;
 	}
 
 	/**
-	 * Path to the file, relative to the parent it lives in.
+	 * Path to the file, relative to the repository root.
 	 * 
 	 * For repository files that reside in working directory, return their location relative to the working dir.
-	 * For files that reside under repository root, path returned would include '.hg/' prefix.
+	 * For files that reside under repository root, path returned includes '.hg/' prefix.
+	 * For files from {@link Home#Store} storage area, path starts with '.hg/store/', although actual use of 'store' folder
+	 * is controlled by repository requirements. Returned value shall be deemed as 'most likely' path in a general environment.
 	 * @return file location, never <code>null</code>
 	 */
 	public String getPath() {
-		return livesInWC ? getName() : ".hg/" + getName();
+		switch (residesIn) {
+			case Store : return ".hg/store/" + getName();
+			case Repo : return ".hg/" + getName();
+			default : return getName();
+		}
 	}
 
 	/**
@@ -72,13 +84,20 @@
 	 * @return <code>true</code> if file lives in working tree
 	 */
 	public boolean residesUnderWorkingDir() {
-		return livesInWC;
+		return residesIn == Home.Root;
 	}
 
 	/**
 	 * @return <code>true</code> if file lives under '.hg/' 
 	 */
 	public boolean residesUnderRepositoryRoot() {
-		return !livesInWC;
+		return residesIn == Home.Repo;
+	}
+	
+	/**
+	 * Identify a root the file lives under
+	 */
+	public Home getHome() {
+		return residesIn;
 	}
 }
--- a/src/org/tmatesoft/hg/repo/HgRepositoryLock.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgRepositoryLock.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -26,12 +26,9 @@
 import java.nio.channels.FileChannel;
 
 import org.tmatesoft.hg.core.HgRepositoryLockException;
-import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.internal.Internals;
 
 /**
- * NOT SAFE FOR MULTITHREAD USE!
- * 
  * <p>Usage:
  * <pre>
  * HgRepositoryLock lock = hgRepo.getWorkingDirLock();
@@ -52,11 +49,14 @@
  * 
  * Unlike original mechanism, we don't use symlinks, rather files, as it's easier to implement
  * 
+ * <p>
+ * NOT SAFE FOR MULTITHREAD USE!
+ * 
  * @see http://code.google.com/p/hg4j/issues/detail?id=35
+ * @since 1.1
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-@Experimental(reason="Work in progress")
 public class HgRepositoryLock {
 	/*
 	 * Lock .hg/ except .hg/store/      .hg/wlock (new File(hgRepo.getRepoRoot(),"wlock"))
@@ -99,7 +99,7 @@
 
 	/**
 	 * Perform actual locking. Waits for timeout (if specified at construction time)
-	 * before throwing {@link HgInvalidStateException} in case lock is not available 
+	 * before throwing {@link HgRepositoryLockException} in case lock is not available 
 	 * immediately.
 	 * 
 	 * <p>Multiple calls are possible, but corresponding number of {@link #release()} 
@@ -192,10 +192,11 @@
 	}
 
 	private static byte[] read(File f) throws IOException {
-		FileChannel fc = new FileInputStream(f).getChannel();
+		FileInputStream fis = new FileInputStream(f);
+		FileChannel fc = fis.getChannel();
 		ByteBuffer bb = ByteBuffer.allocate(Internals.ltoi(fc.size()));
 		fc.read(bb);
-		fc.close();
+		fis.close();
 		return bb.array();
 	}
 }
--- a/src/org/tmatesoft/hg/repo/HgRevisionMap.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgRevisionMap.java	Wed Jul 10 11:48:55 2013 +0200
@@ -19,8 +19,6 @@
 import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION;
 import static org.tmatesoft.hg.repo.HgRepository.TIP;
 
-import java.util.Arrays;
-
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.ArrayHelper;
 import org.tmatesoft.hg.repo.Revlog.RevisionInspector;
@@ -60,15 +58,14 @@
 	 * for complete changelog iteration. 
 	 */
 	
+	private final T revlog;
 	/*
 	 * XXX 3 * (x * 4) bytes. Can I do better?
 	 * It seems, yes. Don't need to keep sorted, always can emulate it with indirect access to sequential through sorted2natural.
 	 * i.e. instead sorted[mid].compareTo(toFind), do sequential[sorted2natural[mid]].compareTo(toFind) 
 	 */
-	private Nodeid[] sequential; // natural repository order, childrenOf rely on ordering
-	private Nodeid[] sorted; // for binary search
-	private int[] sorted2natural;
-	private final T revlog;
+	private Nodeid[] sequential; // natural repository order
+	private ArrayHelper<Nodeid> seqWrapper;
 
 	public HgRevisionMap(T owner) {
 		revlog = owner;
@@ -79,38 +76,39 @@
 	}
 	
 	public void next(int revisionIndex, Nodeid revision, int linkedRevision) {
-		sequential[revisionIndex] = sorted[revisionIndex] = revision;
+		sequential[revisionIndex] = revision;
 	}
 
 	/**
 	 * @return <code>this</code> for convenience.
 	 */
-	public HgRevisionMap<T> init(/*XXX Pool<Nodeid> to reuse nodeids, if possible. */) throws HgInvalidControlFileException{
+	public HgRevisionMap<T> init(/*XXX Pool<Nodeid> to reuse nodeids, if possible. */) throws HgRuntimeException {
 		// XXX HgRepository.register((RepoChangeListener) this); // listen to changes in repo, re-init if needed?
 		final int revisionCount = revlog.getRevisionCount();
 		sequential = new Nodeid[revisionCount];
-		sorted = new Nodeid[revisionCount];
 		revlog.indexWalk(0, TIP, this);
 		// next is alternative to Arrays.sort(sorted), and build sorted2natural looking up each element of sequential in sorted.
 		// the way sorted2natural was build is O(n*log n).  
-		final ArrayHelper ah = new ArrayHelper();
-		ah.sort(sorted);
-		// note, values in ArrayHelper#getReversed are 1-based indexes, not 0-based 
-		sorted2natural = ah.getReverse();
+		seqWrapper = new ArrayHelper<Nodeid>(sequential);
+		seqWrapper.sort(null, true, false);
 		return this;
 	}
+
+	/* friendly initializer to use from HgParentChildMap
+	/*package*/ void init(ArrayHelper<Nodeid> _seqWrapper) {
+		assert _seqWrapper.getData().length == revlog.getRevisionCount();
+		sequential = _seqWrapper.getData();
+		seqWrapper = _seqWrapper;
+	}
 	
 	public Nodeid revision(int revisionIndex) {
 		return sequential[revisionIndex];
 	}
+
 	public int revisionIndex(Nodeid revision) {
 		if (revision == null || revision.isNull()) {
 			return BAD_REVISION;
 		}
-		int x = Arrays.binarySearch(sorted, revision);
-		if (x < 0) {
-			return BAD_REVISION;
-		}
-		return sorted2natural[x]-1;
+		return seqWrapper.binarySearch(revision, BAD_REVISION);
 	}
 }
\ No newline at end of file
--- a/src/org/tmatesoft/hg/repo/HgStatusCollector.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgStatusCollector.java	Wed Jul 10 11:48:55 2013 +0200
@@ -73,7 +73,7 @@
 		return repo;
 	}
 	
-	private ManifestRevision get(int rev) throws HgInvalidControlFileException {
+	private ManifestRevision get(int rev) throws HgRuntimeException {
 		ManifestRevision i = cache.get(rev);
 		if (i == null) {
 			if (rev == NO_REVISION) {
@@ -98,7 +98,7 @@
 		}
 	}
 	
-	private void initCacheRange(int minRev, int maxRev) throws HgInvalidControlFileException {
+	private void initCacheRange(int minRev, int maxRev) throws HgRuntimeException {
 		ensureCacheSize();
 		// In fact, walk(minRev, maxRev) doesn't imply
 		// there would be maxRev-minRev+1 revisions visited. For example,
@@ -159,7 +159,7 @@
 	 * @return
 	 * @throws HgInvalidControlFileException
 	 */
-	/*package-local*/ ManifestRevision raw(int rev) throws HgInvalidControlFileException {
+	/*package-local*/ ManifestRevision raw(int rev) throws HgRuntimeException {
 		return get(rev);
 	}
 	/*package-local*/ Convertor<Path> getPathPool() {
@@ -344,9 +344,11 @@
 	 * @param rev1 <em>from</em> changeset index 
 	 * @param rev2 <em>to</em> changeset index
 	 * @return information object that describes change between the revisions
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidRevisionException if any supplied revision doesn't identify changeset revision. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	public Record status(int rev1, int rev2) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	public Record status(int rev1, int rev2) throws HgRuntimeException {
 		Record rv = new Record();
 		try {
 			walk(rev1, rev2, rv);
@@ -359,7 +361,7 @@
 		return rv;
 	}
 	
-	/*package-local*/static Path getOriginIfCopy(HgRepository hgRepo, Path fname, Collection<Path> originals, int originalChangelogRevision) throws HgInvalidFileException {
+	/*package-local*/static Path getOriginIfCopy(HgRepository hgRepo, Path fname, Collection<Path> originals, int originalChangelogRevision) throws HgRuntimeException {
 		HgDataFile df = hgRepo.getFileNode(fname);
 		if (!df.exists()) {
 			String msg = String.format("Didn't find file '%s' in the repo. Perhaps, bad storage name conversion?", fname);
@@ -417,7 +419,7 @@
 			statusHelper = self;
 		}
 		
-		public Nodeid nodeidBeforeChange(Path fname) throws HgInvalidControlFileException {
+		public Nodeid nodeidBeforeChange(Path fname) throws HgRuntimeException {
 			if (statusHelper == null || startRev == BAD_REVISION) {
 				return null;
 			}
@@ -426,7 +428,7 @@
 			}
 			return statusHelper.raw(startRev).nodeid(fname);
 		}
-		public Nodeid nodeidAfterChange(Path fname) throws HgInvalidControlFileException {
+		public Nodeid nodeidAfterChange(Path fname) throws HgRuntimeException {
 			if (statusHelper == null || endRev == BAD_REVISION) {
 				return null;
 			}
--- a/src/org/tmatesoft/hg/repo/HgTags.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgTags.java	Wed Jul 10 11:48:55 2013 +0200
@@ -16,6 +16,9 @@
  */
 package org.tmatesoft.hg.repo;
 
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.HgLocalTags;
+import static org.tmatesoft.hg.repo.HgRepositoryFiles.HgTags;
+import static org.tmatesoft.hg.util.LogFacility.Severity.*;
 import static org.tmatesoft.hg.util.LogFacility.Severity.Error;
 import static org.tmatesoft.hg.util.LogFacility.Severity.Warn;
 
@@ -24,6 +27,7 @@
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.Reader;
+import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -34,6 +38,11 @@
 
 import org.tmatesoft.hg.core.HgBadNodeidFormatException;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.ByteArrayChannel;
+import org.tmatesoft.hg.internal.ChangelogMonitor;
+import org.tmatesoft.hg.internal.FileChangeMonitor;
+import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.util.CancelledException;
 
 /**
  * @see http://mercurial.selenic.com/wiki/TagDesign
@@ -45,38 +54,83 @@
 	// global tags come from ".hgtags"
 	// local come from ".hg/localtags"
 
-	private final HgRepository repo;
+	private final Internals repo;
 
 	private final Map<Nodeid, List<String>> globalToName;
 	private final Map<Nodeid, List<String>> localToName;
 	private final Map<String, List<Nodeid>> globalFromName;
 	private final Map<String, List<Nodeid>> localFromName;
 	
+	private FileChangeMonitor globalTagsFileMonitor, localTagsFileMonitor;
+	private ChangelogMonitor repoChangeMonitor;
+	
 	private Map<String, TagInfo> tags;
 	
-	/*package-local*/ HgTags(HgRepository hgRepo) {
-		repo = hgRepo;
+	/*package-local*/ HgTags(Internals internalRepo) {
+		repo = internalRepo;
 		globalToName =  new HashMap<Nodeid, List<String>>();
 		localToName  =  new HashMap<Nodeid, List<String>>();
 		globalFromName = new TreeMap<String, List<Nodeid>>();
 		localFromName  = new TreeMap<String, List<Nodeid>>();
 	}
 	
-	/*package-local*/ void readLocal(File localTags) throws IOException {
-		if (localTags == null || localTags.isDirectory()) {
-			throw new IllegalArgumentException(String.valueOf(localTags));
-		}
-		read(localTags, localToName, localFromName);
+	/*package-local*/ void read() throws HgRuntimeException {
+		readTagsFromHistory();
+		readGlobal();
+		readLocal();
 	}
 	
-	/*package-local*/ void readGlobal(File globalTags) throws IOException {
-		if (globalTags == null || globalTags.isDirectory()) {
-			throw new IllegalArgumentException(String.valueOf(globalTags));
+	private void readTagsFromHistory() throws HgRuntimeException {
+		HgDataFile hgTags = repo.getRepo().getFileNode(HgTags.getPath());
+		if (hgTags.exists()) {
+			for (int i = 0; i <= hgTags.getLastRevision(); i++) { // TODO post-1.0 in fact, would be handy to have walk(start,end) 
+				// method for data files as well, though it looks odd.
+				try {
+					ByteArrayChannel sink = new ByteArrayChannel();
+					hgTags.content(i, sink);
+					final String content = new String(sink.toArray(), "UTF8");
+					readGlobal(new StringReader(content));
+				} catch (CancelledException ex) {
+					 // IGNORE, can't happen, we did not configure cancellation
+					repo.getLog().dump(getClass(), Debug, ex, null);
+				} catch (IOException ex) {
+					// UnsupportedEncodingException can't happen (UTF8)
+					// only from readGlobal. Need to reconsider exceptions thrown from there:
+					// BufferedReader wraps String and unlikely to throw IOException, perhaps, log is enough?
+					repo.getLog().dump(getClass(), Error, ex, null);
+					// XXX need to decide what to do this. failure to read single revision shall not break complete cycle
+				}
+			}
 		}
-		read(globalTags, globalToName, globalFromName);
+		if (repoChangeMonitor == null) {
+			repoChangeMonitor = new ChangelogMonitor(repo.getRepo());
+		}
+		repoChangeMonitor.touch();
+	}
+	
+	private void readLocal() throws HgInvalidControlFileException {
+		File localTags = repo.getRepositoryFile(HgLocalTags);
+		if (localTags.canRead() && localTags.isFile()) {
+			read(localTags, localToName, localFromName);
+		}
+		if (localTagsFileMonitor == null) { 
+			localTagsFileMonitor = new FileChangeMonitor(localTags);
+		}
+		localTagsFileMonitor.touch(this);
+	}
+	
+	private void readGlobal() throws HgInvalidControlFileException {
+		File globalTags = repo.getRepositoryFile(HgTags); // XXX replace with HgDataFile.workingCopy
+		if (globalTags.canRead() && globalTags.isFile()) {
+			read(globalTags, globalToName, globalFromName);
+		}
+		if (globalTagsFileMonitor == null) {
+			globalTagsFileMonitor = new FileChangeMonitor(globalTags);
+		}
+		globalTagsFileMonitor.touch(this);
 	}
 
-	/*package-local*/ void readGlobal(Reader globalTags) throws IOException {
+	private void readGlobal(Reader globalTags) throws IOException {
 		BufferedReader r = null;
 		try {
 			r = new BufferedReader(globalTags);
@@ -88,7 +142,7 @@
 		}
 	}
 	
-	private void read(File f, Map<Nodeid,List<String>> nid2name, Map<String, List<Nodeid>> name2nid) throws IOException {
+	private void read(File f, Map<Nodeid,List<String>> nid2name, Map<String, List<Nodeid>> name2nid) throws HgInvalidControlFileException {
 		if (!f.canRead()) {
 			return;
 		}
@@ -96,9 +150,17 @@
 		try {
 			r = new BufferedReader(new FileReader(f));
 			read(r, nid2name, name2nid);
+		} catch (IOException ex) {
+			repo.getLog().dump(getClass(), Error, ex, null);
+			throw new HgInvalidControlFileException("Failed to read tags", ex, f);
 		} finally {
 			if (r != null) {
-				r.close();
+				try {
+					r.close();
+				} catch (IOException ex) {
+					// since it's read operation, do not treat close failure as error, but let user know, anyway
+					repo.getLog().dump(getClass(), Warn, ex, null);
+				}
 			}
 		}
 	}
@@ -112,7 +174,7 @@
 			}
 			final int spacePos = line.indexOf(' ');
 			if (line.length() < 40+2 /*nodeid, space and at least single-char tagname*/ || spacePos != 40) {
-				repo.getSessionContext().getLog().dump(getClass(), Warn, "Bad tags line: %s", line); 
+				repo.getLog().dump(getClass(), Warn, "Bad tags line: %s", line); 
 				continue;
 			}
 			try {
@@ -154,7 +216,7 @@
 					revTags.add(tagName);
 				}
 			} catch (HgBadNodeidFormatException ex) {
-				repo.getSessionContext().getLog().dump(getClass(), Error, "Bad revision '%s' in line '%s':%s", line.substring(0, spacePos), line, ex.getMessage()); 
+				repo.getLog().dump(getClass(), Error, "Bad revision '%s' in line '%s':%s", line.substring(0, spacePos), line, ex.getMessage()); 
 			}
 		}
 	}
@@ -217,6 +279,23 @@
 		return rv;
 	}
 
+	// can be called only after instance has been initialized (#read() invoked) 
+	/*package-local*/void reloadIfChanged() throws HgRuntimeException {
+		assert repoChangeMonitor != null;
+		assert localTagsFileMonitor != null;
+		assert globalTagsFileMonitor != null;
+		if (repoChangeMonitor.isChanged() || globalTagsFileMonitor.changed(this)) {
+			globalFromName.clear();
+			globalToName.clear();
+			readTagsFromHistory();
+			readGlobal();
+			tags = null;
+		}
+		if (localTagsFileMonitor.changed(this)) {
+			readLocal();
+			tags = null;
+		}
+	}
 	
 	public final class TagInfo {
 		private final String name;
@@ -233,10 +312,16 @@
 			return localFromName.containsKey(name);
 		}
 
-		public String branch() throws HgInvalidControlFileException {
+		/**
+		 * @return name of the branch this tag belongs to, never <code>null</code>
+		 * @throws HgInvalidRevisionException if revision of the tag is not a valid changeset revision. <em>Runtime exception</em>
+		 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+		 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
+		 */
+		public String branch() throws HgRuntimeException {
 			if (branch == null) {
-				int x = repo.getChangelog().getRevisionIndex(revision());
-				branch = repo.getChangelog().range(x, x).get(0).branch();
+				int x = repo.getRepo().getChangelog().getRevisionIndex(revision());
+				branch = repo.getRepo().getChangelog().range(x, x).get(0).branch();
 			}
 			return branch;
 		}
--- a/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java	Wed Jul 10 11:48:55 2013 +0200
@@ -34,7 +34,7 @@
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.internal.ByteArrayChannel;
-import org.tmatesoft.hg.internal.Experimental;
+import org.tmatesoft.hg.internal.FileUtils;
 import org.tmatesoft.hg.internal.FilterByteChannel;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.ManifestRevision;
@@ -131,7 +131,7 @@
 		return dirstate;
 	}
 	
-	private ManifestRevision getManifest(int changelogLocalRev) throws HgInvalidControlFileException {
+	private ManifestRevision getManifest(int changelogLocalRev) throws HgRuntimeException {
 		assert changelogLocalRev >= 0;
 		ManifestRevision mr;
 		if (baseRevisionCollector != null) {
@@ -143,7 +143,7 @@
 		return mr;
 	}
 
-	private void initDirstateParentManifest() throws HgInvalidControlFileException {
+	private void initDirstateParentManifest() throws HgRuntimeException {
 		Nodeid dirstateParent = getDirstateImpl().parents().first();
 		if (dirstateParent.isNull()) {
 			dirstateParentManifest = baseRevisionCollector != null ? baseRevisionCollector.raw(NO_REVISION) : HgStatusCollector.createEmptyManifestRevision();
@@ -333,7 +333,6 @@
 	 * @param fileInfo file content mediator 
 	 * @return <code>true</code> when content in working dir differs from that of manifest-recorded revision 
 	 */
-	@Experimental(reason="Perhaps, HgDataFile#isWorkingCopyChanged() would be better - no need to pass any arguments?")
 	public boolean hasTangibleChanges(Path fname, FileInfo fileInfo) throws HgRuntimeException {
 		// see #checkLocalStatusAgainstFile() below for the origin of changed file check
 		HgDataFile df = repo.getFileNode(fname);
@@ -375,8 +374,7 @@
 					} else {
 						HgDataFile df = repo.getFileNode(fname);
 						if (!df.exists()) {
-							// TODO pass Internals right into HgWCSC cons
-							Internals implRepo = HgInternals.getImplementationRepo(repo);
+							Internals implRepo = repo.getImplHelper();
 							String msg = String.format("File %s known as normal in dirstate (%d, %d), doesn't exist at %s", fname, r.modificationTime(), r.size(), implRepo.getStoragePath(df));
 							throw new HgInvalidFileException(msg, null).setFileName(fname);
 						}
@@ -410,7 +408,7 @@
 	}
 	
 	// XXX refactor checkLocalStatus methods in more OO way
-	private void checkLocalStatusAgainstBaseRevision(Set<Path> baseRevNames, ManifestRevision collect, int baseRevision, Path fname, FileInfo f, HgStatusInspector inspector) {
+	private void checkLocalStatusAgainstBaseRevision(Set<Path> baseRevNames, ManifestRevision collect, int baseRevision, Path fname, FileInfo f, HgStatusInspector inspector) throws HgRuntimeException {
 		// fname is in the dirstate, either Normal, Added, Removed or Merged
 		Nodeid nid1 = collect.nodeid(fname);
 		HgManifest.Flags flags = collect.flags(fname);
@@ -496,7 +494,7 @@
 			// only those left in baseRevNames after processing are reported as removed 
 		}
 
-		// TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
+		// TODO [post-1.1] think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
 		// we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively 
 		// cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: 
 		// changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
@@ -504,7 +502,7 @@
 		// The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
 	}
 
-	private boolean areTheSame(FileInfo f, HgDataFile dataFile, Nodeid revision) throws HgInvalidFileException {
+	private boolean areTheSame(FileInfo f, HgDataFile dataFile, Nodeid revision) throws HgRuntimeException {
 		// XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
 		ByteArrayChannel bac = new ByteArrayChannel();
 		try {
@@ -591,13 +589,7 @@
 		} catch (IOException ex) {
 			throw new HgInvalidFileException("File comparison failed", ex).setFileName(p);
 		} finally {
-			if (is != null) {
-				try {
-					is.close();
-				} catch (IOException ex) {
-					repo.getSessionContext().getLog().dump(getClass(), Info, ex, null);
-				}
-			}
+			new FileUtils(repo.getSessionContext().getLog(), this).closeQuietly(is);
 		}
 	}
 
@@ -624,16 +616,7 @@
 	}
 	
 	private boolean checkFlagsEqual(FileInfo f, int dirstateFileMode) {
-		// source/include/linux/stat.h
-		final int S_IFLNK = 0120000, S_IXUSR = 00100;
-		// TODO post-1.0 HgManifest.Flags.parse(int)
-		if ((dirstateFileMode & S_IFLNK) == S_IFLNK) {
-			return checkFlagsEqual(f, HgManifest.Flags.Link);
-		}
-		if ((dirstateFileMode & S_IXUSR) == S_IXUSR) {
-			return checkFlagsEqual(f, HgManifest.Flags.Exec);
-		}
-		return checkFlagsEqual(f, HgManifest.Flags.RegularFile); // no flags
+		return checkFlagsEqual(f, HgManifest.Flags.parse(dirstateFileMode)); 
 	}
 
 	/**
--- a/src/org/tmatesoft/hg/repo/Revlog.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/Revlog.java	Wed Jul 10 11:48:55 2013 +0200
@@ -30,6 +30,7 @@
 import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.internal.IntMap;
 import org.tmatesoft.hg.internal.Preview;
+import org.tmatesoft.hg.internal.RevisionLookup;
 import org.tmatesoft.hg.internal.RevlogStream;
 import org.tmatesoft.hg.util.Adaptable;
 import org.tmatesoft.hg.util.ByteChannel;
@@ -53,8 +54,11 @@
 
 	private final HgRepository repo;
 	protected final RevlogStream content;
+	protected final boolean useRevisionLookup;
+	protected RevisionLookup revisionLookup;
+	private final RevlogStream.Observer revisionLookupCleaner;
 
-	protected Revlog(HgRepository hgRepo, RevlogStream contentStream) {
+	protected Revlog(HgRepository hgRepo, RevlogStream contentStream, boolean needRevisionLookup) {
 		if (hgRepo == null) {
 			throw new IllegalArgumentException();
 		}
@@ -63,12 +67,25 @@
 		}
 		repo = hgRepo;
 		content = contentStream;
+		useRevisionLookup = needRevisionLookup;
+		if (needRevisionLookup) {
+			revisionLookupCleaner = new RevlogStream.Observer() {
+				
+				public void reloaded(RevlogStream src) {
+					revisionLookup = null;
+				}
+			};
+		} else {
+			revisionLookupCleaner = null;
+		}
 	}
 	
 	// invalid Revlog
 	protected Revlog(HgRepository hgRepo) {
 		repo = hgRepo;
 		content = null;
+		useRevisionLookup = false;
+		revisionLookupCleaner = null;
 	}
 
 	public final HgRepository getRepo() {
@@ -77,7 +94,8 @@
 
 	/**
 	 * @return total number of revisions kept in this revlog
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
 	public final int getRevisionCount() throws HgRuntimeException {
 		return content.revisionCount();
@@ -85,7 +103,8 @@
 	
 	/**
 	 * @return index of last known revision, a.k.a. {@link HgRepository#TIP}, or {@link HgRepository#NO_REVISION} if revlog is empty
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
 	public final int getLastRevision() throws HgRuntimeException {
 		// although old code gives correct result when revlog is empty (NO_REVISION deliberately == -1), 
@@ -100,7 +119,9 @@
 	 * @param revisionIndex index of the entry in this revlog, may be {@link HgRepository#TIP}
 	 * @return revision nodeid of the entry
 	 * 
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidRevisionException if any supplied revision doesn't identify revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
 	public final Nodeid getRevision(int revisionIndex) throws HgRuntimeException {
 		// XXX cache nodeids? Rather, if context.getCache(this).getRevisionMap(create == false) != null, use it
@@ -113,7 +134,9 @@
 	 * <li>ordering of the revisions in the return list is unspecified, it's likely won't match that of the method argument
 	 * <li>supplied array get modified (sorted)</ul>
 	 * @return list of mapped revisions in no particular order
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidRevisionException if any supplied revision doesn't identify revision from this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
 	public final List<Nodeid> getRevisions(int... revisions) throws HgRuntimeException {
 		ArrayList<Nodeid> rv = new ArrayList<Nodeid>(revisions.length);
@@ -122,7 +145,7 @@
 		return rv;
 	}
 	
-	/*package-local*/ void getRevisionsInternal(final List<Nodeid> retVal, int[] sortedRevs) throws HgInvalidRevisionException, HgInvalidControlFileException {
+	/*package-local*/ void getRevisionsInternal(final List<Nodeid> retVal, int[] sortedRevs) throws HgRuntimeException {
 		// once I have getRevisionMap and may find out whether it is avalable from cache,
 		// may use it, perhaps only for small number of revisions
 		content.iterate(sortedRevs, false, new RevlogStream.Inspector() {
@@ -142,10 +165,12 @@
 	 * 
 	 * @param nid revision to look up 
 	 * @return revision local index in this revlog
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidRevisionException if revision was not found in this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
 	public final int getRevisionIndex(Nodeid nid) throws HgRuntimeException {
-		int revision = content.findRevisionIndex(nid);
+		final int revision = doFindWithCache(nid);
 		if (revision == BAD_REVISION) {
 			// using toString() to identify revlog. HgDataFile.toString includes path, HgManifest and HgChangelog instances 
 			// are fine with default (class name)
@@ -155,15 +180,38 @@
 		return revision;
 	}
 	
+	private int doFindWithCache(Nodeid nid) throws HgRuntimeException {
+		if (useRevisionLookup) {
+			if (revisionLookup == null || content.shallDropDerivedCaches()) {
+				content.detach(revisionLookupCleaner);
+				setRevisionLookup(RevisionLookup.createFor(content));
+			}
+			return revisionLookup.findIndex(nid);
+		} else {
+			return content.findRevisionIndex(nid);
+		}
+	}
+	
+	/**
+	 * use selected helper for revision lookup, register appropriate listeners to clear cache on revlog changes
+	 * @param rl not <code>null</code>
+	 */
+	protected void setRevisionLookup(RevisionLookup rl) {
+		assert rl != null;
+		revisionLookup = rl;
+		content.attach(revisionLookupCleaner);
+	}
+	
 	/**
 	 * Note, {@link Nodeid#NULL} nodeid is not reported as known in any revlog.
 	 * 
 	 * @param nodeid
 	 * @return <code>true</code> if revision is part of this revlog
-	 * @throws HgRuntimeException subclass thereof to indicate issues with the library. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
 	public final boolean isKnown(Nodeid nodeid) throws HgRuntimeException {
-		final int rn = content.findRevisionIndex(nodeid);
+		final int rn = doFindWithCache(nodeid);
 		if (BAD_REVISION == rn) {
 			return false;
 		}
@@ -180,13 +228,14 @@
 	 * @param nodeid revision to retrieve
 	 * @param sink data destination
 	 * 
-	 * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog
-	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
+	 * @see #rawContent(int, ByteChannel)
+	 * 
 	 * @throws CancelledException if content retrieval operation was cancelled
-	 * 
-	 * @see #rawContent(int, ByteChannel)
+	 * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	protected void rawContent(Nodeid nodeid, ByteChannel sink) throws HgInvalidControlFileException, CancelledException, HgInvalidRevisionException {
+	protected void rawContent(Nodeid nodeid, ByteChannel sink) throws CancelledException, HgRuntimeException {
 		rawContent(getRevisionIndex(nodeid), sink);
 	}
 	
@@ -196,11 +245,12 @@
 	 * @param revisionIndex index of this revlog change (not a changelog revision index), non-negative. From predefined constants, only {@link HgRepository#TIP} makes sense.
 	 * @param sink data destination
 	 * 
-	 * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog
-	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
 	 * @throws CancelledException if content retrieval operation was cancelled
+	 * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog. <em>Runtime exception</em>
+	 * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
+	 * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
 	 */
-	protected void rawContent(int revisionIndex, ByteChannel sink) throws HgInvalidControlFileException, CancelledException, HgInvalidRevisionException {
+	protected void rawContent(int revisionIndex, ByteChannel sink) throws CancelledException, HgRuntimeException {
 		if (sink == null) {
 			throw new IllegalArgumentException();
 		}
@@ -212,7 +262,7 @@
 		} catch (IOException ex) {
 			HgInvalidControlFileException e = new HgInvalidControlFileException(String.format("Access to revision %d content failed", revisionIndex), ex, null);
 			e.setRevisionIndex(revisionIndex);
-			// TODO post 1.0 e.setFileName(content.getIndexFile() or this.getHumanFriendlyPath()) - shall decide whether 
+			// TODO [post 1.1] e.setFileName(content.getIndexFile() or this.getHumanFriendlyPath()) - shall decide whether 
 			// protected abstract getHFPath() with impl in HgDataFile, HgManifest and HgChangelog or path is data of either Revlog or RevlogStream
 			// Do the same (add file name) below
 			throw e;
@@ -309,7 +359,7 @@
 		content.iterate(_start, end, false, new RevlogStream.Inspector() {
 			private int i = 0;
 			
-			public void next(int revisionIndex, int actualLen, int baseRevIndex, int linkRevIndex, int parent1RevIndex, int parent2RevIndex, byte[] nodeid, DataAccess data) {
+			public void next(int revisionIndex, int actualLen, int baseRevIndex, int linkRevIndex, int parent1RevIndex, int parent2RevIndex, byte[] nodeid, DataAccess data) throws HgRuntimeException {
 				Nodeid nid = Nodeid.fromBinary(nodeid, 0);
 				if (revisionInsp != null) {
 					revisionInsp.next(revisionIndex, nid, linkRevIndex);
@@ -336,7 +386,7 @@
 		});
 		if (parentInsp != null && _start > 0) {
 			assert missingParents.size() > 0; // in fact, more relaxed than assert. rather 'assume'
-			// TODO int[] IntMap#keys() or even sort of iterator that can modify values
+			// TODO [post-1.1] int[] IntMap#keys() or even sort of iterator that can modify values
 			for (int k = missingParents.firstKey(), l = missingParents.lastKey(); k <= l; k++) {
 				if (missingParents.containsKey(k)) {
 					Nodeid nid = getRepo().getChangelog().getRevision(k);
@@ -378,23 +428,24 @@
 
 	@Experimental
 	public interface RevisionInspector extends Inspector {
-		void next(int revisionIndex, Nodeid revision, int linkedRevisionIndex);
+		void next(int revisionIndex, Nodeid revision, int linkedRevisionIndex) throws HgRuntimeException;
 	}
 
 	@Experimental
 	public interface ParentInspector extends Inspector {
 		// XXX document whether parentX is -1 or a constant (BAD_REVISION? or dedicated?)
-		void next(int revisionIndex, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2);
+		void next(int revisionIndex, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2) throws HgRuntimeException;
 	}
 	
-	protected HgParentChildMap<? extends Revlog> getParentWalker() {
+	protected HgParentChildMap<? extends Revlog> getParentWalker() throws HgRuntimeException {
 		HgParentChildMap<Revlog> pw = new HgParentChildMap<Revlog>(this);
 		pw.init();
 		return pw;
 	}
 	
 	/*
-	 * class with cancel and few other exceptions support. TODO consider general superclass to share with e.g. HgManifestCommand.Mediator
+	 * class with cancel and few other exceptions support. 
+	 * TODO [post-1.1] consider general superclass to share with e.g. HgManifestCommand.Mediator
 	 */
 	protected abstract static class ErrorHandlingInspector implements RevlogStream.Inspector, CancelSupport {
 		private Exception failure;
--- a/src/org/tmatesoft/hg/repo/ext/MqManager.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/ext/MqManager.java	Wed Jul 10 11:48:55 2013 +0200
@@ -27,11 +27,11 @@
 import java.util.List;
 import java.util.Map;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.LineReader;
 import org.tmatesoft.hg.repo.HgInvalidControlFileException;
-import org.tmatesoft.hg.repo.HgInvalidFileException;
 import org.tmatesoft.hg.util.LogFacility;
 import org.tmatesoft.hg.util.Path;
 
@@ -39,9 +39,6 @@
  * Mercurial Queues Support. 
  * Access to MqExtension functionality.
  * 
- * FIXME check we don't hold any mq files for too long, close them, use
- * the same lock mechanism as mq does (if any). Check if MQ uses Mercurial's store lock
- * 
  * @since 1.1
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
@@ -65,6 +62,8 @@
 	 * @return <code>this</code> for convenience
 	 */
 	public MqManager refresh() throws HgInvalidControlFileException {
+		// MQ doesn't seem to use any custom lock mechanism.
+		// MQ uses Mercurial's wc/store lock when updating repository (strip/new queue)
 		applied = allKnown = Collections.emptyList();
 		queueNames = Collections.emptyList();
 		final LogFacility log = repo.getSessionContext().getLog();
@@ -137,10 +136,8 @@
 					allKnown.add(pr);
 				}
 			}
-		} catch (HgInvalidFileException ex) {
-			HgInvalidControlFileException th = new HgInvalidControlFileException(ex.getMessage(), ex.getCause(), ex.getFile());
-			th.setStackTrace(ex.getStackTrace());
-			throw th;
+		} catch (HgIOException ex) {
+			throw new HgInvalidControlFileException(ex, true);
 		}
 		return this;
 	}
--- a/src/org/tmatesoft/hg/repo/ext/Rebase.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/ext/Rebase.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -28,7 +28,6 @@
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.LineReader;
-import org.tmatesoft.hg.repo.HgInvalidFileException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
 
 /**
@@ -94,8 +93,6 @@
 			throw new HgIOException("Bad format of rebase state file", f);
 		} catch (HgBadNodeidFormatException ex) {
 			throw new HgIOException("Bad format of rebase state file", ex, f);
-		} catch (HgInvalidFileException ex) {
-			throw new HgIOException("Bad format of rebase state file", ex, f);
 		}
 		return this;
 	}
--- a/src/org/tmatesoft/hg/util/FileWalker.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/util/FileWalker.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -41,19 +41,25 @@
 	private RegularFileInfo nextFile;
 	private Path nextPath;
 
-	// TODO FileWalker to accept SessionContext.Source and SessionContext to implement SessionContext.Source
-	// (if it doesn't break binary compatibility)
 	public FileWalker(SessionContext ctx, File dir, Path.Source pathFactory) {
 		this(ctx, dir, pathFactory, null);
 	}
+	
+	/**
+	 * @see FileWalker#FileWalker(SessionContext, File, Path.Source, Matcher)
+	 */
+	public FileWalker(SessionContext.Source ctxSource, File dir, Path.Source pathFactory, Path.Matcher scopeMatcher) {
+		this(ctxSource.getSessionContext(), dir, pathFactory, scopeMatcher);
+	}
 
 	/**
+	 * Implementation of {@link FileIterator} with regular {@link java.io.File}.
 	 * 
-	 * @param dir
-	 * @param pathFactory
+	 * @param dir directory to start at, not <code>null</code>
+	 * @param pathFactory factory to create {@link Path} instances, not <code>null</code>
 	 * @param scopeMatcher - this matcher shall be capable to tell not only files of interest, but
 	 * also whether directories shall be traversed or not (Paths it gets in {@link Path.Matcher#accept(Path)} may 
-	 * point to directories)   
+	 * point to directories); may be <code>null</code>
 	 */
 	public FileWalker(SessionContext ctx, File dir, Path.Source pathFactory, Path.Matcher scopeMatcher) {
 		sessionContext = ctx;
--- a/src/org/tmatesoft/hg/util/Path.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/util/Path.java	Wed Jul 10 11:48:55 2013 +0200
@@ -167,7 +167,7 @@
 		}
 		String p = path.toString();
 		if (runningOnWindows && p.indexOf('\\') != -1) {
-			throw new IllegalArgumentException();
+			throw new IllegalArgumentException(String.format("Path '%s' contains illegal char at %d", p, p.indexOf('\\')));
 		}
 		Path rv = new Path(p);
 		return rv;
--- a/src/org/tmatesoft/hg/util/RegularFileInfo.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/src/org/tmatesoft/hg/util/RegularFileInfo.java	Wed Jul 10 11:48:55 2013 +0200
@@ -66,7 +66,7 @@
 	}
 
 	public int lastModified() {
-		// TODO post-1.0 for symlinks, this returns incorrect mtime of the target file, not that of link itself
+		// TODO [post-1.1] for symlinks, this returns incorrect mtime of the target file, not that of link itself
 		// Besides, timestame if link points to non-existing file is 0.
 		// However, it result only in slowdown in WCStatusCollector, as it need to perform additional content check
 		return (int) (file.lastModified() / 1000);
@@ -84,6 +84,10 @@
 			if (isSymlink()) {
 				return new ByteArrayReadableChannel(getLinkTargetBytes());
 			} else {
+				// TODO [2.0 API break]  might be good idea replace channel with smth 
+				// else, to ensure #close() disposes FileDescriptor. Now
+				// FD has usage count of two (new FileInputStream + getChannel),
+				// and FileChannel#close decrements only 1, second has to wait FIS#finalize() 
 				return new FileInputStream(file).getChannel();
 			}
 		} catch (FileNotFoundException ex) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/included.rc	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,6 @@
+[section1]
+key2 = alternative value 2
+
+
+[section2]
+key1=value 1-2 # comment 
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sample.rc	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,15 @@
+[section1]
+# comment
+key1=value 1
+key2=value 2
+key3=value 3#comment
+
+%include ./included.rc
+
+[section3]
+key1=value 1-3
+
+[section1]
+key4=value 4
+%unset key1 
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/org/tmatesoft/hg/test/ComplexTest.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.test;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.tmatesoft.hg.core.HgAddRemoveCommand;
+import org.tmatesoft.hg.core.HgCheckoutCommand;
+import org.tmatesoft.hg.core.HgCommitCommand;
+import org.tmatesoft.hg.core.HgInitCommand;
+import org.tmatesoft.hg.core.HgRevertCommand;
+import org.tmatesoft.hg.repo.HgManifest;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.util.Path;
+
+/**
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class ComplexTest {
+	
+	@Rule
+	public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
+
+	/**
+	 * Regular work sequence with checkout, add, remove, revert and commit
+	 */
+	@Test
+	public void testLocalScenario1() throws Exception {
+		File repoLoc = RepoUtils.createEmptyDir("composite-scenario-1");
+		// init empty
+		HgRepository hgRepo = new HgInitCommand().location(repoLoc).revlogV1().execute();
+		assertFalse("[sanity]", hgRepo.isInvalid());
+		assertEquals("[sanity]", 0, hgRepo.getChangelog().getRevisionCount());
+		// add 2 files
+		Path fa = Path.create("a"), fb = Path.create("b");
+		final File fileA = new File(repoLoc, fa.toString());
+		final File fileB = new File(repoLoc, fb.toString());
+		RepoUtils.createFile(fileA, "first file");
+		RepoUtils.createFile(fileB, "second file");
+		new HgAddRemoveCommand(hgRepo).add(fa, fb).execute();
+		new HgCommitCommand(hgRepo).message("FIRST").execute();
+		// add one more file
+		// remove one initial file
+		Path fc = Path.create("c");
+		final File fileC = new File(repoLoc, fc.toString());
+		RepoUtils.createFile(fileC, "third file");
+		fileB.delete();
+		// TODO HgAddRemoveCommand needs #copy(from, to) method 
+		new HgAddRemoveCommand(hgRepo).add(fc).remove(fb).execute();
+		new HgCommitCommand(hgRepo).message("SECOND").execute();
+		//
+		assertEquals(2, hgRepo.getChangelog().getRevisionCount());
+		errorCollector.assertEquals("SECOND", hgRepo.getCommitLastMessage());
+		// checkout previous version
+		new HgCheckoutCommand(hgRepo).changeset(0).clean(true).execute();
+		assertTrue(fileA.isFile());
+		assertTrue(fileB.isFile());
+		assertFalse(fileC.isFile());
+		// branch/two heads
+		RepoUtils.modifyFileAppend(fileA, "A1");
+		RepoUtils.modifyFileAppend(fileB, "B1");
+		new HgCommitCommand(hgRepo).message("THIRD").execute();
+		//
+		new HgCheckoutCommand(hgRepo).changeset(1).clean(true).execute();
+		assertTrue(fileA.isFile());
+		assertFalse(fileB.isFile());
+		assertTrue(fileC.isFile());
+		RepoUtils.modifyFileAppend(fileA, "A2");
+		RepoUtils.modifyFileAppend(fileC, "C1");
+		new HgRevertCommand(hgRepo).changeset(1).file(fa).execute();
+		errorCollector.assertTrue(new File(fileA.getParent(), fileA.getName() + ".orig").isFile());
+		new HgCommitCommand(hgRepo).message("FOURTH").execute();
+		// TODO merge and HgMergeCommand
+		
+		errorCollector.assertEquals(2, hgRepo.getFileNode(fa).getRevisionCount());
+		errorCollector.assertEquals(2, hgRepo.getFileNode(fb).getRevisionCount());
+		errorCollector.assertEquals(2, hgRepo.getFileNode(fc).getRevisionCount());
+		final HgManifest mf = hgRepo.getManifest();
+		errorCollector.assertEquals(mf.getFileRevision(0, fa), mf.getFileRevision(3, fa)); // "A2" was reverted
+	}
+}
--- a/test/org/tmatesoft/hg/test/Configuration.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/Configuration.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -23,6 +23,7 @@
 import java.util.Arrays;
 import java.util.List;
 
+import org.tmatesoft.hg.core.HgException;
 import org.tmatesoft.hg.repo.HgLookup;
 import org.tmatesoft.hg.repo.HgRemoteRepository;
 import org.tmatesoft.hg.repo.HgRepository;
@@ -67,7 +68,7 @@
 	}
 
 	// fails if repo not found
-	public HgRepository find(String key) throws Exception {
+	public HgRepository find(String key) throws HgException {
 		HgRepository rv = lookup.detect(new File(getRoot(), key));
 		assertNotNull(rv);
 		assertFalse(rv.isInvalid());
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/org/tmatesoft/hg/test/HgServer.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.test;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Wraps hg server
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+class HgServer {
+	private Process serverProcess;
+	private boolean publish = true;
+	
+	public HgServer publishing(boolean pub) {
+		publish = pub;
+		return this;
+	}
+
+	public HgServer start(File dir) throws IOException, InterruptedException {
+		if (serverProcess != null) {
+			stop();
+		}
+		List<String> cmdline = new ArrayList<String>();
+		cmdline.add("hg");
+		cmdline.add("--config");
+		cmdline.add("web.allow_push=*");
+		cmdline.add("--config");
+		cmdline.add("web.push_ssl=False");
+		cmdline.add("--config");
+		cmdline.add("server.validate=True");
+		cmdline.add("--config");
+		cmdline.add(String.format("web.port=%d", port()));
+		if (!publish) {
+			cmdline.add("--config");
+			cmdline.add("phases.publish=False");
+		}
+		cmdline.add("serve");
+		serverProcess = new ProcessBuilder(cmdline).directory(dir).start();
+		Thread.sleep(500);
+		return this;
+	}
+	
+	public URL getURL() throws MalformedURLException {
+		return new URL(String.format("http://localhost:%d/", port()));
+	}
+
+	public int port() {
+		return 9090;
+	}
+	
+	public void stop() {
+		if (serverProcess == null) {
+			return;
+		}
+		// if Process#destroy() doesn't perform well with scripts and child processes
+		// may need to write server pid to a file and send a kill <pid> here
+		serverProcess.destroy();
+		serverProcess = null;
+	}
+}
\ No newline at end of file
--- a/test/org/tmatesoft/hg/test/MapTagsToFileRevisions.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/MapTagsToFileRevisions.java	Wed Jul 10 11:48:55 2013 +0200
@@ -26,6 +26,7 @@
 import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.repo.HgTags;
 import org.tmatesoft.hg.repo.HgTags.TagInfo;
 import org.tmatesoft.hg.repo.HgRevisionMap;
@@ -160,7 +161,7 @@
 			final Map<Nodeid, Nodeid> changesetToNodeid_3 = new HashMap<Nodeid, Nodeid>();
 			fileNode.indexWalk(0, TIP, new HgDataFile.RevisionInspector() {
 	
-				public void next(int fileRevisionIndex, Nodeid revision, int linkedRevisionIndex) {
+				public void next(int fileRevisionIndex, Nodeid revision, int linkedRevisionIndex) throws HgRuntimeException {
 					changesetToNodeid_3.put(clog.getRevision(linkedRevisionIndex), revision);
 				}
 			});
@@ -273,7 +274,7 @@
 		return tagLocalRevs;
 	}
 
-	public void collectTagsPerFile() throws HgException, CancelledException {
+	public void collectTagsPerFile() throws HgException, CancelledException, HgRuntimeException {
 		final long start = System.currentTimeMillis();
 		final HgRepository repository = new HgLookup().detect(new File("/home/artem/hg/cpython"));
 		final HgTags tags = repository.getTags();
@@ -301,7 +302,7 @@
 		
 	// Approach 1. Build map with all files, their revisions and corresponding tags
 	//
-	private void collectTagsPerFile_Approach_1(final HgRevisionMap<HgChangelog> clogrmap, final int[] tagLocalRevs, final TagInfo[] allTags, Path targetPath) throws HgException {
+	private void collectTagsPerFile_Approach_1(final HgRevisionMap<HgChangelog> clogrmap, final int[] tagLocalRevs, final TagInfo[] allTags, Path targetPath) throws HgException, IllegalArgumentException, HgRuntimeException {
 		HgRepository repository = clogrmap.getRepo();
 		final long start = System.currentTimeMillis();
 		// file2rev2tag value is array of revisions, always of allTags.length. Revision index in the array
@@ -373,7 +374,7 @@
 		}
 	}
 	
-	private void collectTagsPerFile_Approach_2(HgRepository repository, final int[] tagLocalRevs, final IntMap<List<TagInfo>> tagRevIndex2TagInfo, Path targetPath) throws HgException {
+	private void collectTagsPerFile_Approach_2(HgRepository repository, final int[] tagLocalRevs, final IntMap<List<TagInfo>> tagRevIndex2TagInfo, Path targetPath) throws HgException, HgRuntimeException {
 		//
 		// Approach 2. No all-file map. Collect file revisions recorded at the time of tagging,
 		// then for each file revision check if it is among those above, and if yes, take corresponding tags
@@ -457,7 +458,7 @@
 		}
 	}
 	
-	public static void main2(String[] args) throws HgCallbackTargetException, HgException, CancelledException {
+	public static void main2(String[] args) throws HgCallbackTargetException, HgException, CancelledException, HgRuntimeException {
 		final HgRepository repository = new HgLookup().detect(new File("/temp/hg/cpython"));
 		final Path targetPath = Path.create("README");
 		final HgTags tags = repository.getTags();
--- a/test/org/tmatesoft/hg/test/RepoUtils.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/RepoUtils.java	Wed Jul 10 11:48:55 2013 +0200
@@ -18,16 +18,27 @@
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.tmatesoft.hg.internal.RequiresFile.*;
+import static org.tmatesoft.hg.util.LogFacility.Severity.Debug;
 
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
 
-import org.tmatesoft.hg.internal.RepoInitializer;
+import junit.framework.Assert;
+
+import org.tmatesoft.hg.core.HgException;
+import org.tmatesoft.hg.core.HgInitCommand;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.FileUtils;
+import org.tmatesoft.hg.internal.StreamLogFacility;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.util.CancelledException;
 
 /**
  * 
@@ -36,28 +47,34 @@
  */
 public class RepoUtils {
 
-	static File initEmptyTempRepo(String dirName) throws IOException {
+	static File initEmptyTempRepo(String dirName) throws IOException, HgException {
 		File dest = createEmptyDir(dirName);
-		RepoInitializer ri = new RepoInitializer();
-		ri.setRequires(STORE | FNCACHE | DOTENCODE);
-		ri.initEmptyRepository(new File(dest, ".hg"));
+		try {
+			new HgInitCommand().location(dest).revlogV1().execute();
+		} catch (CancelledException ex) {
+			Assert.fail(ex.toString());
+		}
 		return dest;
 	}
 
 	static File createEmptyDir(String dirName) throws IOException {
 		File dest = new File(Configuration.get().getTempDir(), dirName);
 		if (dest.exists()) {
-			TestClone.rmdir(dest);
+			rmdir(dest);
 		}
 		dest.mkdirs();
 		return dest;
 	}
 
-	static File cloneRepoToTempLocation(String configRepoName, String name, boolean noupdate) throws Exception, InterruptedException {
+	static File cloneRepoToTempLocation(String configRepoName, String name, boolean noupdate) throws HgException, IOException, InterruptedException {
 		return cloneRepoToTempLocation(Configuration.get().find(configRepoName), name, noupdate);
 	}
 
 	static File cloneRepoToTempLocation(HgRepository repo, String name, boolean noupdate) throws IOException, InterruptedException {
+		return cloneRepoToTempLocation(repo.getWorkingDir(), name, noupdate, false);
+	}
+
+	static File cloneRepoToTempLocation(File repoLoc, String name, boolean noupdate, boolean usePull) throws IOException, InterruptedException {
 		File testRepoLoc = createEmptyDir(name);
 		ExecHelper eh = new ExecHelper(new OutputParser.Stub(), testRepoLoc.getParentFile());
 		ArrayList<String> cmd = new ArrayList<String>();
@@ -66,12 +83,54 @@
 		if (noupdate) {
 			cmd.add("--noupdate");
 		}
-		cmd.add(repo.getWorkingDir().toString());
+		if (usePull) {
+			cmd.add("--pull");
+		}
+		cmd.add(repoLoc.toString());
 		cmd.add(testRepoLoc.getName());
 		eh.run(cmd.toArray(new String[cmd.size()]));
 		assertEquals("[sanity]", 0, eh.getExitValue());
 		return testRepoLoc;
 	}
+	
+	static File copyRepoToTempLocation(String configRepoName, String newRepoName) throws HgException, IOException {
+		File testRepoLoc = createEmptyDir(newRepoName);
+		final File srcDir = Configuration.get().find(configRepoName).getWorkingDir();
+		Iterator<File> it = new Iterator<File>() {
+			private final LinkedList<File> queue = new LinkedList<File>();
+			{
+				queue.addAll(Arrays.asList(srcDir.listFiles()));
+			}
+			public boolean hasNext() {
+				return !queue.isEmpty();
+			}
+			public File next() {
+				File n = queue.removeFirst();
+				if (n.isDirectory()) {
+					queue.addAll(Arrays.asList(n.listFiles()));
+				}
+				return n;
+			}
+			public void remove() {
+				throw new UnsupportedOperationException();
+			}
+		};
+		FileUtils fu = new FileUtils(new StreamLogFacility(Debug, true, System.err), RepoUtils.class);
+		String srcPrefix = srcDir.getAbsolutePath();
+		while (it.hasNext()) {
+			File next = it.next();
+			assert next.getAbsolutePath().startsWith(srcPrefix);
+			String relPath = next.getAbsolutePath().substring(srcPrefix.length());
+			File dest = new File(testRepoLoc, relPath);
+			if (next.isDirectory()) {
+				dest.mkdir();
+			} else {
+				fu.copy(next, dest);
+				dest.setLastModified(next.lastModified());
+			}
+		}
+		return testRepoLoc;
+	}
 
 	static void modifyFileAppend(File f, Object content) throws IOException {
 		assertTrue(f.isFile());
@@ -102,4 +161,48 @@
 			fw.close();
 		}
 	}
+
+	static void exec(File wd, int expectedRetVal, String... args) throws Exception {
+		OutputParser.Stub s = new OutputParser.Stub();
+		try {
+			ExecHelper eh = new ExecHelper(s, wd);
+			eh.run(args);
+			Assert.assertEquals(expectedRetVal, eh.getExitValue());
+		} catch (Exception ex) {
+			System.err.println(s.result());
+			throw ex;
+		}
+	}
+
+	static void rmdir(File dest) throws IOException {
+		LinkedList<File> queue = new LinkedList<File>();
+		queue.addAll(Arrays.asList(dest.listFiles()));
+		while (!queue.isEmpty()) {
+			File next = queue.removeFirst();
+			if (next.isDirectory()) {
+				List<File> files = Arrays.asList(next.listFiles());
+				if (!files.isEmpty()) {
+					queue.addAll(files);
+					queue.add(next);
+				}
+				// fall through
+			} 
+			next.delete();
+		}
+		dest.delete();
+	}
+
+	static Nodeid[] allRevisions(HgRepository repo) {
+		Nodeid[] allRevs = new Nodeid[repo.getChangelog().getRevisionCount()];
+		for (int i = 0; i < allRevs.length; i++) {
+			allRevs[i] = repo.getChangelog().getRevision(i);
+		}
+		return allRevs;
+	}
+
+	static void assertHgVerifyOk(ErrorCollectorExt errorCollector, File repoLoc) throws InterruptedException, IOException {
+		ExecHelper verifyRun = new ExecHelper(new OutputParser.Stub(), repoLoc);
+		verifyRun.run("hg", "verify");
+		errorCollector.assertEquals("hg verify", 0, verifyRun.getExitValue());
+	}
 }
--- a/test/org/tmatesoft/hg/test/TestAuxUtilities.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestAuxUtilities.java	Wed Jul 10 11:48:55 2013 +0200
@@ -23,9 +23,9 @@
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.Arrays;
 
 import org.junit.Assert;
-import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.tmatesoft.hg.core.HgCatCommand;
@@ -41,15 +41,12 @@
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
-import org.tmatesoft.hg.repo.HgRepoConfig;
-import org.tmatesoft.hg.repo.HgRepoConfig.PathsSection;
-import org.tmatesoft.hg.repo.HgRepoConfig.Section;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Adaptable;
 import org.tmatesoft.hg.util.ByteChannel;
 import org.tmatesoft.hg.util.CancelSupport;
 import org.tmatesoft.hg.util.CancelledException;
-import org.tmatesoft.hg.util.Pair;
 import org.tmatesoft.hg.util.Path;
 import org.tmatesoft.hg.util.ProgressSupport;
 
@@ -66,25 +63,31 @@
 	@Test
 	public void testArrayHelper() {
 		String[] initial = {"d", "w", "k", "b", "c", "i", "a", "r", "e", "h" };
-		ArrayHelper ah = new ArrayHelper();
+		ArrayHelper<String> ah = new ArrayHelper<String>(initial);
 		String[] result = initial.clone();
-		ah.sort(result);
-		String[] restored = restore(result, ah.getReverse());
+		ah.sort(result, false, false);
+		String[] restored = restore(result, ah.getReverseIndexes());
 		assertArrayEquals(initial, restored);
 		//
 		// few elements are on the right place from the very start and do not shift during sort.
 		// make sure for them we've got correct reversed indexes as well
 		initial = new String[] {"d", "h", "c", "b", "k", "i", "a", "r", "e", "w" };
-		ah.sort(result = initial.clone());
-		restored = restore(result, ah.getReverse());
+		ah = new ArrayHelper<String>(initial);
+		ah.sort(result = new String[initial.length], true, true);
+		restored = restore(result, ah.getReverseIndexes());
 		assertArrayEquals(initial, restored);
+		for (int i = 0; i < initial.length; i++) {
+			String s = initial[i];
+			errorCollector.assertEquals(i, ah.binarySearch(s, -1));
+			errorCollector.assertEquals(Arrays.binarySearch(result, s), ah.binarySearchSorted(s));
+		}
 	}
 
 	private static String[] restore(String[] sorted, int[] sortReverse) {
 		String[] rebuilt = new String[sorted.length];
 		for (int i = 0; i < sorted.length; i++) {
 			int indexInOriginal = sortReverse[i];
-			rebuilt[indexInOriginal-1] = sorted[i];
+			rebuilt[indexInOriginal] = sorted[i];
 		}
 		return rebuilt;
 	}
@@ -290,7 +293,7 @@
 		fileNode.indexWalk(0, TIP, new HgDataFile.RevisionInspector() {
 			int i = 0;
 
-			public void next(int localRevision, Nodeid revision, int linkedRevision) {
+			public void next(int localRevision, Nodeid revision, int linkedRevision) throws HgRuntimeException {
 				assertEquals(i++, localRevision);
 				assertEquals(fileNode.getChangesetRevisionIndex(localRevision), linkedRevision);
 				assertEquals(fileNode.getRevision(localRevision), revision);
@@ -388,29 +391,6 @@
 			}
 		}
 	}
-
-
-	@Test
-	@Ignore("just a dump for now, to compare values visually")
-	public void testRepositoryConfig() throws Exception {
-		HgRepository repo = Configuration.get().own();
-		final HgRepoConfig cfg = repo.getConfiguration();
-		Assert.assertNotNull(cfg.getPaths());
-		Assert.assertNotNull(cfg.getExtensions());
-		final Section dne = cfg.getSection("does-not-exist");
-		Assert.assertNotNull(dne);
-		Assert.assertFalse(dne.exists());
-		for (Pair<String, String> p : cfg.getSection("ui")) {
-			System.out.printf("%s = %s\n", p.first(), p.second());
-		}
-		final PathsSection p = cfg.getPaths();
-		System.out.printf("Known paths: %d. default: %s(%s), default-push: %s(%s)\n", p.getKeys().size(), p.getDefault(), p.hasDefault(), p.getDefaultPush(), p.hasDefaultPush());
-		for (String k : cfg.getPaths().getKeys()) {
-			System.out.println(k);
-		}
-		Assert.assertFalse(p.hasDefault() ^ p.getDefault() != null);
-		Assert.assertFalse(p.hasDefaultPush() ^ p.getDefaultPush() != null);
-	}
 	
 	@Test
 	public void testChangelogExtrasDecode() {
--- a/test/org/tmatesoft/hg/test/TestBlame.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestBlame.java	Wed Jul 10 11:48:55 2013 +0200
@@ -42,20 +42,16 @@
 import org.junit.Test;
 import org.tmatesoft.hg.core.HgAnnotateCommand;
 import org.tmatesoft.hg.core.HgAnnotateCommand.LineInfo;
+import org.tmatesoft.hg.core.HgBlameInspector;
 import org.tmatesoft.hg.core.HgCallbackTargetException;
-import org.tmatesoft.hg.core.HgIterateDirection;
+import org.tmatesoft.hg.core.HgDiffCommand;
 import org.tmatesoft.hg.core.HgRepoFacade;
+import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.FileAnnotation;
 import org.tmatesoft.hg.internal.FileAnnotation.LineDescriptor;
 import org.tmatesoft.hg.internal.FileAnnotation.LineInspector;
 import org.tmatesoft.hg.internal.IntVector;
-import org.tmatesoft.hg.repo.HgBlameFacility;
-import org.tmatesoft.hg.repo.HgBlameFacility.AddBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.Block;
-import org.tmatesoft.hg.repo.HgBlameFacility.BlockData;
-import org.tmatesoft.hg.repo.HgBlameFacility.ChangeBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.DeleteBlock;
-import org.tmatesoft.hg.repo.HgBlameFacility.EqualBlock;
+import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgLookup;
 import org.tmatesoft.hg.repo.HgRepository;
@@ -76,10 +72,12 @@
 	public void testSingleParentBlame() throws Exception {
 		HgRepository repo = new HgLookup().detectFromWorkingDir();
 		final String fname = "src/org/tmatesoft/hg/internal/PatchGenerator.java";
-		final int checkChangeset = 539;
+		final int checkChangeset = repo.getChangelog().getRevisionIndex(Nodeid.fromAscii("946b131962521f9199e1fedbdc2487d3aaef5e46")); // 539
 		HgDataFile df = repo.getFileNode(fname);
 		ByteArrayOutputStream bos = new ByteArrayOutputStream();
-		new HgBlameFacility(df).annotateSingleRevision(checkChangeset, new DiffOutInspector(new PrintStream(bos)));
+		HgDiffCommand diffCmd = new HgDiffCommand(repo);
+		diffCmd.file(df).changeset(checkChangeset);
+		diffCmd.executeParentsAnnotate(new DiffOutInspector(new PrintStream(bos)));
 		LineGrepOutputParser gp = new LineGrepOutputParser("^@@.+");
 		ExecHelper eh = new ExecHelper(gp, null);
 		eh.run("hg", "diff", "-c", String.valueOf(checkChangeset), "-U", "0", fname);
@@ -96,10 +94,18 @@
 		HgDataFile df = repo.getFileNode(fname);
 		AnnotateRunner ar = new AnnotateRunner(df.getPath(), null);
 
-		for (int cs : new int[] { 539, 541 /*, TIP */}) {
+		final HgDiffCommand diffCmd = new HgDiffCommand(repo);
+		diffCmd.file(df).order(NewToOld);
+		final HgChangelog clog = repo.getChangelog();
+		final int[] toTest = new int[] { 
+			clog.getRevisionIndex(Nodeid.fromAscii("946b131962521f9199e1fedbdc2487d3aaef5e46")), // 539
+			clog.getRevisionIndex(Nodeid.fromAscii("1e95f48d9886abe79b9711ab371bc877ca5e773e")), // 541 
+			/*, TIP */};
+		for (int cs : toTest) {
 			ar.run(cs, false);
 			FileAnnotateInspector fa = new FileAnnotateInspector();
-			FileAnnotation.annotate(df, cs, fa);
+			diffCmd.range(0, cs);
+			diffCmd.executeAnnotate(new FileAnnotation(fa));
 			doAnnotateLineCheck(cs, ar.getLines(), Arrays.asList(fa.lineRevisions), Arrays.asList(fa.lines));
 		}
 	}
@@ -110,10 +116,12 @@
 		HgDataFile df = repo.getFileNode("file1");
 		AnnotateRunner ar = new AnnotateRunner(df.getPath(), repo.getWorkingDir());
 
+		final HgDiffCommand diffCmd = new HgDiffCommand(repo).file(df).order(NewToOld);
 		for (int cs : new int[] { 4, 6 /*, 8 see below*/, TIP}) {
 			ar.run(cs, false);
 			FileAnnotateInspector fa = new FileAnnotateInspector();
-			FileAnnotation.annotate(df, cs, fa);
+			diffCmd.range(0, cs);
+			diffCmd.executeAnnotate(new FileAnnotation(fa));
 			doAnnotateLineCheck(cs, ar.getLines(), Arrays.asList(fa.lineRevisions), Arrays.asList(fa.lines));
 		}
 		/*`hg annotate -r 8` and HgBlameFacility give different result
@@ -135,10 +143,11 @@
 	public void testComplexHistoryAnnotate() throws Exception {
 		HgRepository repo = Configuration.get().find("test-annotate");
 		HgDataFile df = repo.getFileNode("file1");
-		HgBlameFacility af = new HgBlameFacility(df);
 		ByteArrayOutputStream bos = new ByteArrayOutputStream();
 		DiffOutInspector dump = new DiffOutInspector(new PrintStream(bos));
-		af.annotate(TIP, dump, HgIterateDirection.OldToNew);
+		HgDiffCommand diffCmd = new HgDiffCommand(repo);
+		diffCmd.file(df).range(0, TIP).order(OldToNew);
+		diffCmd.executeAnnotate(dump);
 		LinkedList<String> apiResult = new LinkedList<String>(Arrays.asList(splitLines(bos.toString())));
 		
 		/*
@@ -196,7 +205,6 @@
 		HgRepository repo = Configuration.get().find("test-annotate2");
 		HgDataFile df = repo.getFileNode("file1b.txt");
 		// rev3: file1 -> file1a,  rev7: file1a -> file1b, tip: rev10
-		HgBlameFacility bf = new HgBlameFacility(df);
 		DiffOutInspector insp = new DiffOutInspector(new PrintStream(new OutputStream() {
 			@Override
 			public void write(int b) throws IOException {
@@ -207,19 +215,22 @@
 		// earlier than rev2 shall be reported as new from change3
 		int[] change_2_8_new2old = new int[] {4, 6, 3, 4, -1, 3}; 
 		int[] change_2_8_old2new = new int[] {-1, 3, 3, 4, 4, 6 };
-		bf.annotate(2, 8, insp, NewToOld);
+		final HgDiffCommand cmd = new HgDiffCommand(repo);
+		cmd.file(df);
+		cmd.range(2, 8).order(NewToOld);
+		cmd.executeAnnotate(insp);
 		Assert.assertArrayEquals(change_2_8_new2old, insp.getReportedRevisionPairs());
 		insp.reset();
-		bf.annotate(2, 8, insp, OldToNew);
+		cmd.order(OldToNew).executeAnnotate(insp);
 		Assert.assertArrayEquals(change_2_8_old2new, insp.getReportedRevisionPairs());
 		// same as 2 to 8, with addition of rev9 changes rev7  (rev6 to rev7 didn't change content, only name)
 		int[] change_3_9_new2old = new int[] {7, 9, 4, 6, 3, 4, -1, 3 }; 
 		int[] change_3_9_old2new = new int[] {-1, 3, 3, 4, 4, 6, 7, 9 };
 		insp.reset();
-		bf.annotate(3, 9, insp, NewToOld);
+		cmd.range(3, 9).order(NewToOld).executeAnnotate(insp);
 		Assert.assertArrayEquals(change_3_9_new2old, insp.getReportedRevisionPairs());
 		insp.reset();
-		bf.annotate(3, 9, insp, OldToNew);
+		cmd.order(OldToNew).executeAnnotate(insp);
 		Assert.assertArrayEquals(change_3_9_old2new, insp.getReportedRevisionPairs());
 	}
 
@@ -261,8 +272,49 @@
 			errorCollector.assertEquals(hgAnnotateLine.trim(), apiLine);
 		}
 	}
+	
+	
+	@Test
+	public void testDiffTwoRevisions() throws Exception {
+		HgRepository repo = Configuration.get().find("test-annotate");
+		HgDataFile df = repo.getFileNode("file1");
+		LineGrepOutputParser gp = new LineGrepOutputParser("^@@.+");
+		ExecHelper eh = new ExecHelper(gp, repo.getWorkingDir());
+		int[] toTest = { 3, 4, 5 }; // p1 ancestry line, p2 ancestry line, not in ancestry line
+		final HgDiffCommand diffCmd = new HgDiffCommand(repo).file(df);
+		for (int cs : toTest) {
+			ByteArrayOutputStream bos = new ByteArrayOutputStream();
+			diffCmd.range(cs, 8).executeDiff(new DiffOutInspector(new PrintStream(bos)));
+			eh.run("hg", "diff", "-r", String.valueOf(cs), "-r", "8", "-U", "0", df.getPath().toString());
+			//
+			String[] apiResult = splitLines(bos.toString());
+			String[] expected = splitLines(gp.result());
+			Assert.assertArrayEquals("diff -r " + cs + "-r 8", expected, apiResult);
+			gp.reset();
+		}
+	}
+	
+	/**
+	 * Make sure boundary values are ok (down to BlameHelper#prepare and FileHistory) 
+	 */
+	@Test
+	public void testAnnotateFirstFileRev() throws Exception {
+		HgRepository repo = Configuration.get().find("test-annotate");
+		HgDataFile df = repo.getFileNode("file1");
+		LineGrepOutputParser gp = new LineGrepOutputParser("^@@.+");
+		ExecHelper eh = new ExecHelper(gp, repo.getWorkingDir());
+		eh.run("hg", "diff", "-c", "0", "-U", "0", df.getPath().toString());
+		//
+		ByteArrayOutputStream bos = new ByteArrayOutputStream();
+		HgDiffCommand diffCmd = new HgDiffCommand(repo).file(df);
+		diffCmd.changeset(0).executeParentsAnnotate(new DiffOutInspector(new PrintStream(bos)));
+		//
+		String[] apiResult = splitLines(bos.toString());
+		String[] expected = splitLines(gp.result());
+		Assert.assertArrayEquals(expected, apiResult);
+	}
 
-	// FIXME HgWorkingCopyStatusCollector (and HgStatusCollector), with their ancestors (rev 59/69) have examples
+	// TODO HgWorkingCopyStatusCollector (and HgStatusCollector), with their ancestors (rev 59/69) have examples
 	// of *incorrect* assignment of common lines (like "}") - our impl doesn't process common lines in any special way
 	// while original diff lib does. Would be nice to behave as close to original, as possible.
 	
@@ -292,56 +344,15 @@
 	}
 	
 	
-	private void aaa() throws Exception {
-		HgRepository repo = new HgLookup().detectFromWorkingDir();
-		final String fname = "src/org/tmatesoft/hg/internal/PatchGenerator.java";
-		final int checkChangeset = 539;
-		HgDataFile df = repo.getFileNode(fname);
-		HgBlameFacility af = new HgBlameFacility(df);
-		DiffOutInspector dump = new DiffOutInspector(System.out);
-		System.out.println("541 -> 543");
-		af.annotateSingleRevision(543, dump);
-		System.out.println("539 -> 541");
-		af.annotateSingleRevision(541, dump);
-		System.out.println("536 -> 539");
-		af.annotateSingleRevision(checkChangeset, dump);
-		System.out.println("531 -> 536");
-		af.annotateSingleRevision(536, dump);
-		System.out.println(" -1 -> 531");
-		af.annotateSingleRevision(531, dump);
-		
-		FileAnnotateInspector fai = new FileAnnotateInspector();
-		FileAnnotation.annotate(df, 541, fai);
-		for (int i = 0; i < fai.lineRevisions.length; i++) {
-			System.out.printf("%3d: LINE %d\n", fai.lineRevisions[i], i+1);
-		}
-	}
-
-	private void bbb() throws Exception {
-		HgRepository repo = new HgLookup().detectFromWorkingDir();
-		final String fname = "src/org/tmatesoft/hg/repo/HgManifest.java";
-		final int checkChangeset = 415;
-		HgDataFile df = repo.getFileNode(fname);
-		HgBlameFacility af = new HgBlameFacility(df);
-		DiffOutInspector dump = new DiffOutInspector(System.out);
-//		System.out.println("413 -> 415");
-//		af.diff(df, 413, 415, dump);
-//		System.out.println("408 -> 415");
-//		af.diff(df, 408, 415, dump);
-//		System.out.println("Combined (with merge):");
-//		dump.needRevisions(true);
-//		af.annotateChange(df, checkChangeset, dump);
-		dump.needRevisions(true);
-		af.annotate(checkChangeset, dump, HgIterateDirection.OldToNew);
-	}
-	
 	private void ccc() throws Throwable {
 		HgRepository repo = new HgLookup().detect("/home/artem/hg/hgtest-annotate-merge/");
 		HgDataFile df = repo.getFileNode("file.txt");
-		HgBlameFacility af = new HgBlameFacility(df);
 		DiffOutInspector dump = new DiffOutInspector(System.out);
 		dump.needRevisions(true);
-		af.annotate(8, dump, HgIterateDirection.NewToOld);
+		HgDiffCommand diffCmd = new HgDiffCommand(repo);
+		diffCmd.file(df);
+		diffCmd.range(0, 8).order(NewToOld);
+		diffCmd.executeAnnotate(dump);
 //		af.annotateSingleRevision(df, 113, dump);
 //		System.out.println();
 //		af.annotate(df, TIP, new LineDumpInspector(true), HgIterateDirection.NewToOld);
@@ -357,22 +368,19 @@
 		errorCollector.verify();
 		*/
 		FileAnnotateInspector fa = new FileAnnotateInspector();
-		FileAnnotation.annotate(df, 8, fa);
+		diffCmd.range(0, 8).order(NewToOld);
+		diffCmd.executeAnnotate(new FileAnnotation(fa));
 		for (int i = 0; i < fa.lineRevisions.length; i++) {
 			System.out.printf("%d: %s", fa.lineRevisions[i], fa.line(i) == null ? "null\n" : fa.line(i));
 		}
 	}
 
 	public static void main(String[] args) throws Throwable {
-//		System.out.println(Arrays.equals(new String[0], splitLines("")));
-//		System.out.println(Arrays.equals(new String[] { "abc" }, splitLines("abc")));
-//		System.out.println(Arrays.equals(new String[] { "a", "bc" }, splitLines("a\nbc")));
-//		System.out.println(Arrays.equals(new String[] { "a", "bc" }, splitLines("a\nbc\n")));
 		TestBlame tt = new TestBlame();
 		tt.ccc();
 	}
 
-	private static class DiffOutInspector implements HgBlameFacility.Inspector {
+	private static class DiffOutInspector implements HgBlameInspector {
 		private final PrintStream out;
 		private boolean dumpRevs;
 		private IntVector reportedRevisionPairs = new IntVector();
@@ -475,7 +483,7 @@
 		FileAnnotateInspector() {
 		}
 		
-		public void line(int lineNumber, int changesetRevIndex, BlockData lineContent, LineDescriptor ld) {
+		public void line(int lineNumber, int changesetRevIndex, HgBlameInspector.BlockData lineContent, LineDescriptor ld) {
 			if (lineRevisions == null) {
 				lineRevisions = new Integer[ld.totalLines()];
 				Arrays.fill(lineRevisions, NO_REVISION);
@@ -490,7 +498,8 @@
 		}
 	}
 
-	private static class LineDumpInspector implements HgBlameFacility.Inspector {
+	@SuppressWarnings("unused")
+	private static class LineDumpInspector implements HgBlameInspector {
 		
 		private final boolean lineByLine;
 
--- a/test/org/tmatesoft/hg/test/TestCheckout.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestCheckout.java	Wed Jul 10 11:48:55 2013 +0200
@@ -28,12 +28,15 @@
 import org.junit.Test;
 import org.tmatesoft.hg.core.HgCheckoutCommand;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.RelativePathRewrite;
 import org.tmatesoft.hg.repo.HgLookup;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.util.FileInfo;
 import org.tmatesoft.hg.util.FileWalker;
 import org.tmatesoft.hg.util.Pair;
 import org.tmatesoft.hg.util.Path;
+import org.tmatesoft.hg.util.PathRewrite;
 
 /**
  * 
@@ -123,8 +126,9 @@
 		File testRepoLoc = cloneRepoToTempLocation("test-flags", "test-checkout-flags", true);
 		repo = new HgLookup().detect(testRepoLoc);
 		new HgCheckoutCommand(repo).clean(true).changeset(0).execute();
-		
-		FileWalker fw = new FileWalker(repo.getSessionContext(), testRepoLoc, new Path.SimpleSource());
+
+		Path.Source pathSrc = new Path.SimpleSource(new PathRewrite.Composite(new RelativePathRewrite(testRepoLoc), repo.getToRepoPathHelper()));
+		FileWalker fw = new FileWalker(repo, testRepoLoc, pathSrc, null);
 		int execFound, linkFound, regularFound;
 		execFound = linkFound = regularFound = 0;
 		while(fw.hasNext()) {
@@ -142,10 +146,16 @@
 				regularFound++;
 			}
 		}
-		// TODO alter expected values to pass on Windows 
-		errorCollector.assertEquals("Executable files", 1, execFound);
-		errorCollector.assertEquals("Symlink files", 1, linkFound);
-		errorCollector.assertEquals("Regular files", 1, regularFound);
+		final int expectedExec, expectedLink, expectedRegular;
+		if (Internals.runningOnWindows()) {
+			expectedExec = expectedLink = 0;
+			expectedRegular = 2;
+		} else {
+			expectedExec = expectedLink = expectedRegular = 1;
+		}
+		errorCollector.assertEquals("Executable files", expectedExec, execFound);
+		errorCollector.assertEquals("Symlink files", expectedLink, linkFound);
+		errorCollector.assertEquals("Regular files", expectedRegular, regularFound);
 	}
 
 	private static final class FilesOnlyFilter implements FileFilter {
--- a/test/org/tmatesoft/hg/test/TestClone.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestClone.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -16,16 +16,16 @@
  */
 package org.tmatesoft.hg.test;
 
+import static org.tmatesoft.hg.internal.RequiresFile.*;
+
 import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
 
 import org.hamcrest.CoreMatchers;
 import org.junit.Rule;
 import org.junit.Test;
 import org.tmatesoft.hg.core.HgCloneCommand;
+import org.tmatesoft.hg.core.HgInitCommand;
+import org.tmatesoft.hg.internal.RepoInitializer;
 import org.tmatesoft.hg.repo.HgRemoteRepository;
 
 /**
@@ -56,13 +56,26 @@
 			cmd.source(hgRemote);
 			File dest = new File(tempDir, "test-clone-" + x++);
 			if (dest.exists()) {
-				rmdir(dest);
+				RepoUtils.rmdir(dest);
 			}
 			cmd.destination(dest);
 			cmd.execute();
 			verify(hgRemote, dest);
 		}
 	}
+	
+	@Test
+	public void testInitEmpty() throws Exception {
+		File repoLoc = RepoUtils.createEmptyDir("test-init");
+		new HgInitCommand().location(repoLoc).revlogV1().dotencode(false).fncache(false).execute();
+		
+		int requires = new RepoInitializer().initRequiresFromFile(new File(repoLoc, ".hg")).getRequires();
+		errorCollector.assertTrue(0 != (requires & REVLOGV1));
+		errorCollector.assertTrue(0 != (requires & STORE));
+		errorCollector.assertTrue(0 == (requires & DOTENCODE));
+		errorCollector.assertTrue(0 == (requires & FNCACHE));
+		errorCollector.assertTrue(0 == (requires & REVLOGV0));
+	}
 
 	private void verify(HgRemoteRepository hgRemote, File dest) throws Exception {
 		ExecHelper eh = new ExecHelper(new OutputParser.Stub(), dest);
@@ -73,22 +86,4 @@
 		eh.run("hg", "in", hgRemote.getLocation());
 		errorCollector.checkThat("Incoming", eh.getExitValue(), CoreMatchers.equalTo(1));
 	}
-
-	static void rmdir(File dest) throws IOException {
-		LinkedList<File> queue = new LinkedList<File>();
-		queue.addAll(Arrays.asList(dest.listFiles()));
-		while (!queue.isEmpty()) {
-			File next = queue.removeFirst();
-			if (next.isDirectory()) {
-				List<File> files = Arrays.asList(next.listFiles());
-				if (!files.isEmpty()) {
-					queue.addAll(files);
-					queue.add(next);
-				}
-				// fall through
-			} 
-			next.delete();
-		}
-		dest.delete();
-	}
 }
--- a/test/org/tmatesoft/hg/test/TestCommit.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestCommit.java	Wed Jul 10 11:48:55 2013 +0200
@@ -20,8 +20,6 @@
 import static org.tmatesoft.hg.repo.HgRepository.*;
 
 import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.List;
 
 import org.junit.Rule;
@@ -34,12 +32,20 @@
 import org.tmatesoft.hg.core.HgStatus.Kind;
 import org.tmatesoft.hg.core.HgStatusCommand;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.internal.ByteArrayChannel;
+import org.tmatesoft.hg.internal.COWTransaction;
+import org.tmatesoft.hg.internal.CommitFacility;
+import org.tmatesoft.hg.internal.DataSerializer.ByteArrayDataSource;
+import org.tmatesoft.hg.internal.DirstateReader;
 import org.tmatesoft.hg.internal.FileContentSupplier;
-import org.tmatesoft.hg.repo.CommitFacility;
+import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.Transaction;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgPhase;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRepositoryFiles;
 import org.tmatesoft.hg.util.Outcome;
 import org.tmatesoft.hg.util.Path;
 
@@ -54,7 +60,14 @@
 
 	@Rule
 	public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
-
+	
+	private final Transaction.Factory trFactory = new COWTransaction.Factory();
+//	{
+//		public Transaction create(Source ctxSource) {
+//			return new Transaction.NoRollback();
+//		}
+//	};
+	
 	@Test
 	public void testCommitToNonEmpty() throws Exception {
 		File repoLoc = RepoUtils.initEmptyTempRepo("test-commit2non-empty");
@@ -62,12 +75,12 @@
 		new ExecHelper(new OutputParser.Stub(), repoLoc).run("hg", "commit", "--addremove", "-m", "FIRST");
 		//
 		HgRepository hgRepo = new HgLookup().detect(repoLoc);
-		CommitFacility cf = new CommitFacility(hgRepo, 0);
-		// FIXME test diff for processing changed newlines (ie \r\n -> \n or vice verse) - if a whole line or 
-		// just changed endings are in the patch!
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), 0);
 		HgDataFile df = hgRepo.getFileNode("file1");
-		cf.add(df, new ByteArraySupplier("hello\nworld".getBytes()));
-		Nodeid secondRev = cf.commit("SECOND");
+		cf.add(df, new ByteArrayDataSource("hello\nworld".getBytes()));
+		Transaction tr = newTransaction(hgRepo);
+		Nodeid secondRev = cf.commit("SECOND", tr);
+		tr.commit();
 		//
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).execute();
 		errorCollector.assertEquals(2, commits.size());
@@ -90,12 +103,14 @@
 		//
 		HgRepository hgRepo = new HgLookup().detect(repoLoc);
 		assertEquals("[sanity]", 0, new HgLogCommand(hgRepo).execute().size());
-		CommitFacility cf = new CommitFacility(hgRepo, NO_REVISION);
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), NO_REVISION);
 		HgDataFile df = hgRepo.getFileNode(fname);
 		final byte[] initialContent = "hello\nworld".getBytes();
-		cf.add(df, new ByteArraySupplier(initialContent));
+		cf.add(df, new ByteArrayDataSource(initialContent));
 		String comment = "commit 1";
-		Nodeid c1Rev = cf.commit(comment);
+		Transaction tr = newTransaction(hgRepo);
+		Nodeid c1Rev = cf.commit(comment,  tr);
+		tr.commit();
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).execute();
 		errorCollector.assertEquals(1, commits.size());
 		HgChangeset c1 = commits.get(0);
@@ -114,7 +129,7 @@
 	
 	@Test
 	public void testCommitIntoBranch() throws Exception {
-		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-add-remove-commit", false);
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-commit2branch", false);
 		HgRepository hgRepo = new HgLookup().detect(repoLoc);
 		HgDataFile dfD = hgRepo.getFileNode("d");
 		assertTrue("[sanity]", dfD.exists());
@@ -123,17 +138,17 @@
 		final int parentCsetRevIndex = hgRepo.getChangelog().getLastRevision();
 		HgChangeset parentCset = new HgLogCommand(hgRepo).range(parentCsetRevIndex, parentCsetRevIndex).execute().get(0);
 		assertEquals("[sanity]", DEFAULT_BRANCH_NAME, parentCset.getBranch());
+		assertEquals("[sanity]", DEFAULT_BRANCH_NAME, hgRepo.getWorkingCopyBranchName());
 		//
 		RepoUtils.modifyFileAppend(fileD, "A CHANGE\n");
-		CommitFacility cf = new CommitFacility(hgRepo, parentCsetRevIndex);
-		FileContentSupplier contentProvider = new FileContentSupplier(fileD);
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), parentCsetRevIndex);
+		FileContentSupplier contentProvider = new FileContentSupplier(hgRepo, fileD);
 		cf.add(dfD, contentProvider);
 		cf.branch("branch1");
-		Nodeid commitRev1 = cf.commit("FIRST");
-		contentProvider.done();
+		Transaction tr = newTransaction(hgRepo);
+		Nodeid commitRev1 = cf.commit("FIRST",  tr);
+		tr.commit();
 		//
-		// FIXME requirement to reload repository is disgusting 
-		hgRepo = new HgLookup().detect(repoLoc);
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).range(parentCsetRevIndex+1, TIP).execute();
 		assertEquals(1, commits.size());
 		HgChangeset c1 = commits.get(0);
@@ -141,7 +156,10 @@
 		errorCollector.assertEquals("branch1", c1.getBranch());
 		errorCollector.assertEquals("FIRST", c1.getComment());
 		//
-		assertHgVerifyOk(repoLoc);
+		// check if cached value in hgRepo got updated
+		errorCollector.assertEquals("branch1", hgRepo.getWorkingCopyBranchName());
+		//
+		RepoUtils.assertHgVerifyOk(errorCollector, repoLoc);
 	}
 
 	/**
@@ -155,16 +173,14 @@
 		assertTrue("[sanity]", new File(repoLoc, "d").canRead());
 		RepoUtils.createFile(new File(repoLoc, "xx"), "xyz");
 		new HgAddRemoveCommand(hgRepo).add(Path.create("xx")).remove(Path.create("d")).execute();
-		CommitFacility cf = new CommitFacility(hgRepo, hgRepo.getChangelog().getLastRevision());
-		FileContentSupplier contentProvider = new FileContentSupplier(new File(repoLoc, "xx"));
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), hgRepo.getChangelog().getLastRevision());
+		FileContentSupplier contentProvider = new FileContentSupplier(hgRepo, new File(repoLoc, "xx"));
 		cf.add(hgRepo.getFileNode("xx"), contentProvider);
 		cf.forget(hgRepo.getFileNode("d"));
-		Nodeid commitRev = cf.commit("Commit with add/remove cmd");
-		contentProvider.done();
-		// Note, working directory still points to original revision, CommitFacility doesn't update dirstate
+		Transaction tr = newTransaction(hgRepo);
+		Nodeid commitRev = cf.commit("Commit with add/remove cmd",  tr);
+		tr.commit();
 		//
-		// FIXME requirement to reload repository is disgusting 
-		hgRepo = new HgLookup().detect(repoLoc);
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).changeset(commitRev).execute();
 		HgChangeset cmt = commits.get(0);
 		errorCollector.assertEquals(1, cmt.getAddedFiles().size());
@@ -175,14 +191,14 @@
 		new HgCatCommand(hgRepo).file(Path.create("xx")).changeset(commitRev).execute(sink);
 		assertArrayEquals("xyz".getBytes(), sink.toArray());
 		//
-		assertHgVerifyOk(repoLoc);
+		RepoUtils.assertHgVerifyOk(errorCollector, repoLoc);
 	}
 	/**
 	 * perform few commits one by one, into different branches
 	 */
 	@Test
 	public void testSequentialCommits() throws Exception {
-		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-add-remove-commit", false);
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-sequential-commits", false);
 		HgRepository hgRepo = new HgLookup().detect(repoLoc);
 		HgDataFile dfD = hgRepo.getFileNode("d");
 		assertTrue("[sanity]", dfD.exists());
@@ -191,27 +207,24 @@
 		//
 		RepoUtils.modifyFileAppend(fileD, " 1 \n");
 		final int parentCsetRevIndex = hgRepo.getChangelog().getLastRevision();
-		CommitFacility cf = new CommitFacility(hgRepo, parentCsetRevIndex);
-		FileContentSupplier contentProvider = new FileContentSupplier(fileD);
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), parentCsetRevIndex);
+		FileContentSupplier contentProvider = new FileContentSupplier(hgRepo, fileD);
 		cf.add(dfD, contentProvider);
 		cf.branch("branch1");
-		Nodeid commitRev1 = cf.commit("FIRST");
-		contentProvider.done();
+		Transaction tr = newTransaction(hgRepo);
+		Nodeid commitRev1 = cf.commit("FIRST",  tr);
 		//
 		RepoUtils.modifyFileAppend(fileD, " 2 \n");
-		cf.add(dfD, contentProvider = new FileContentSupplier(fileD));
+		cf.add(dfD, contentProvider = new FileContentSupplier(hgRepo, fileD));
 		cf.branch("branch2");
-		Nodeid commitRev2 = cf.commit("SECOND");
-		contentProvider.done();
+		Nodeid commitRev2 = cf.commit("SECOND",  tr);
 		//
 		RepoUtils.modifyFileAppend(fileD, " 2 \n");
-		cf.add(dfD, contentProvider = new FileContentSupplier(fileD));
+		cf.add(dfD, contentProvider = new FileContentSupplier(hgRepo, fileD));
 		cf.branch(DEFAULT_BRANCH_NAME);
-		Nodeid commitRev3 = cf.commit("THIRD");
-		contentProvider.done();
+		Nodeid commitRev3 = cf.commit("THIRD",  tr);
+		tr.commit();
 		//
-		// FIXME requirement to reload repository is disgusting 
-		hgRepo = new HgLookup().detect(repoLoc);
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).range(parentCsetRevIndex+1, TIP).execute();
 		assertEquals(3, commits.size());
 		HgChangeset c1 = commits.get(0);
@@ -226,12 +239,14 @@
 		errorCollector.assertEquals("FIRST", c1.getComment());
 		errorCollector.assertEquals("SECOND", c2.getComment());
 		errorCollector.assertEquals("THIRD", c3.getComment());
-		assertHgVerifyOk(repoLoc);
+		RepoUtils.assertHgVerifyOk(errorCollector, repoLoc);
 	}
 	
 	@Test
 	public void testCommandBasics() throws Exception {
 		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-commit-cmd", false);
+		// PhasesHelper relies on file existence to tell phase enablement
+		RepoUtils.createFile(new File(repoLoc, HgRepositoryFiles.Phaseroots.getPath()), "");
 		HgRepository hgRepo = new HgLookup().detect(repoLoc);
 		HgDataFile dfB = hgRepo.getFileNode("b");
 		assertTrue("[sanity]", dfB.exists());
@@ -246,7 +261,6 @@
 		Nodeid c1 = cmd.getCommittedRevision();
 		
 		// check that modified files are no longer reported as such
-		hgRepo = new HgLookup().detect(repoLoc);
 		TestStatus.StatusCollector status = new TestStatus.StatusCollector();
 		new HgStatusCommand(hgRepo).all().execute(status);
 		errorCollector.assertTrue(status.getErrors().isEmpty());
@@ -266,65 +280,221 @@
 		errorCollector.assertTrue(r.isOk());
 		Nodeid c2 = cmd.getCommittedRevision();
 		//
-		hgRepo = new HgLookup().detect(repoLoc);
+		errorCollector.assertEquals("SECOND", hgRepo.getCommitLastMessage());
+		//
 		int lastRev = hgRepo.getChangelog().getLastRevision();
 		List<HgChangeset> csets = new HgLogCommand(hgRepo).range(lastRev-1, lastRev).execute();
 		errorCollector.assertEquals(csets.get(0).getNodeid(), c1);
 		errorCollector.assertEquals(csets.get(1).getNodeid(), c2);
 		errorCollector.assertEquals(csets.get(0).getComment(), "FIRST");
 		errorCollector.assertEquals(csets.get(1).getComment(), "SECOND");
-		assertHgVerifyOk(repoLoc);
+		RepoUtils.assertHgVerifyOk(errorCollector, repoLoc);
+		// new commits are drafts by default, check our commit respects this
+		// TODO more tests with children of changesets with draft, secret or public phases (latter - 
+		// new commit is child of public, but there are other commits with draft/secret phases - ensure they are intact)
+		assertEquals(HgPhase.Draft, HgPhase.parse(hgRepo.getConfiguration().getStringValue("phases", "new-commit", HgPhase.Draft.mercurialString())));
+		errorCollector.assertEquals(HgPhase.Draft, csets.get(0).getPhase());
+		errorCollector.assertEquals(HgPhase.Draft, csets.get(1).getPhase());
 	}
 	
-	private void assertHgVerifyOk(File repoLoc) throws InterruptedException, IOException {
-		ExecHelper verifyRun = new ExecHelper(new OutputParser.Stub(), repoLoc);
-		verifyRun.run("hg", "verify");
-		errorCollector.assertEquals("hg verify", 0, verifyRun.getExitValue());
+	@Test
+	public void testUpdateActiveBookmark() throws Exception {
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-commit-bookmark-update", false);
+		ExecHelper eh = new ExecHelper(new OutputParser.Stub(), repoLoc);
+		String activeBookmark = "bm1";
+		eh.run("hg", "bookmarks", activeBookmark);
+
+		HgRepository hgRepo = new HgLookup().detect(repoLoc);
+		assertEquals("[sanity]", activeBookmark, hgRepo.getBookmarks().getActiveBookmarkName());
+		Nodeid activeBookmarkRevision = hgRepo.getBookmarks().getRevision(activeBookmark);
+		assertEquals("[sanity]", activeBookmarkRevision, hgRepo.getWorkingCopyParents().first());
+		
+		HgDataFile dfD = hgRepo.getFileNode("d");
+		File fileD = new File(repoLoc, "d");
+		assertTrue("[sanity]", dfD.exists());
+		assertTrue("[sanity]", fileD.canRead());
+
+		RepoUtils.modifyFileAppend(fileD, " 1 \n");
+		HgCommitCommand cmd = new HgCommitCommand(hgRepo).message("FIRST");
+		Outcome r = cmd.execute();
+		errorCollector.assertTrue(r.isOk());
+		Nodeid c = cmd.getCommittedRevision();
+		
+		errorCollector.assertEquals(activeBookmark, hgRepo.getBookmarks().getActiveBookmarkName());
+		errorCollector.assertEquals(c, hgRepo.getBookmarks().getRevision(activeBookmark));
+		// reload repo, and repeat the check
+		hgRepo = new HgLookup().detect(repoLoc);
+		errorCollector.assertEquals(activeBookmark, hgRepo.getBookmarks().getActiveBookmarkName());
+		errorCollector.assertEquals(c, hgRepo.getBookmarks().getRevision(activeBookmark));
+	}
+
+	/**
+	 * from the wiki:
+	 * "active bookmarks are automatically updated when committing to the changeset they are pointing to"
+	 * Synopsis: commit 1 (c1), hg bookmark active (points to commit1), make commit 2, hg bookmark -f -r c1 active, commit 3, check active still points to c1 
+	 */
+	@Test
+	public void testNoBookmarkUpdate() throws Exception {
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-no-bookmark-upd", false);
+		HgRepository hgRepo = new HgLookup().detect(repoLoc);
+		assertNull("[sanity]", hgRepo.getBookmarks().getActiveBookmarkName());
+		ExecHelper eh = new ExecHelper(new OutputParser.Stub(), repoLoc);
+		String activeBookmark = "bm1";
+		eh.run("hg", "bookmarks", activeBookmark);
+		assertEquals("Bookmarks has to reload", activeBookmark, hgRepo.getBookmarks().getActiveBookmarkName());
+		Nodeid initialBookmarkRevision = hgRepo.getBookmarks().getRevision(activeBookmark); // c1
+		assertEquals("[sanity]", initialBookmarkRevision, hgRepo.getWorkingCopyParents().first());
+
+		File fileD = new File(repoLoc, "d");
+		assertTrue("[sanity]", fileD.canRead());
+		RepoUtils.modifyFileAppend(fileD, " 1 \n");
+		HgCommitCommand cmd = new HgCommitCommand(hgRepo).message("FIRST");
+		Outcome r = cmd.execute();
+		errorCollector.assertTrue(r.isOk());
+		Nodeid c2 = cmd.getCommittedRevision();
+		errorCollector.assertEquals(c2, hgRepo.getBookmarks().getRevision(activeBookmark));
+		//
+		if (!Internals.runningOnWindows()) {
+			// need change to happen not the same moment as the last commit (and read of bookmark file)
+			Thread.sleep(1000); // XXX remove once better file change detection in place
+		}
+		eh.run("hg", "bookmark", activeBookmark, "--force", "--rev", initialBookmarkRevision.toString());
+		//
+		RepoUtils.modifyFileAppend(fileD, " 2 \n");
+		cmd = new HgCommitCommand(hgRepo).message("SECOND");
+		r = cmd.execute();
+		errorCollector.assertTrue(r.isOk());
+		//Nodeid c3 = cmd.getCommittedRevision();
+		errorCollector.assertEquals(initialBookmarkRevision, hgRepo.getBookmarks().getRevision(activeBookmark));
+	}
+
+	@Test
+	public void testRefreshTagsAndBranches() throws Exception {
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-branches", "test-refresh-after-commit", false);
+		final String tag = "tag.refresh", branch = "branch-refresh";
+		HgRepository hgRepo = new HgLookup().detect(repoLoc);
+		assertFalse(hgRepo.getTags().getAllTags().containsKey(tag));
+		assertNull(hgRepo.getBranches().getBranch(branch));
+		RepoUtils.modifyFileAppend(new File(repoLoc, "a"), "whatever");
+		//
+		final int parentCsetRevIndex = hgRepo.getChangelog().getLastRevision();
+		// HgCommitCommand can't do branch yet
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), parentCsetRevIndex);
+		cf.add(hgRepo.getFileNode("a"), new FileContentSupplier(hgRepo, new File(repoLoc, "a")));
+		cf.branch(branch);
+		Transaction tr = newTransaction(hgRepo);
+		Nodeid commit = cf.commit("FIRST",  tr);
+		tr.commit();
+		errorCollector.assertEquals("commit with branch shall update WC", branch, hgRepo.getWorkingCopyBranchName());
+		
+		ExecHelper eh = new ExecHelper(new OutputParser.Stub(), repoLoc);
+		eh.run("hg", "tag", tag);
+		assertEquals("[sanity]", 0, eh.getExitValue());
+		
+		errorCollector.assertTrue(hgRepo.getTags().getAllTags().containsKey(tag));
+		errorCollector.assertFalse(hgRepo.getBranches().getBranch(branch) == null);
+		errorCollector.assertTrue(hgRepo.getTags().tagged(tag).contains(commit));
+		errorCollector.assertTrue(hgRepo.getTags().tags(commit).contains(tag));
+	}
+	
+	@Test
+	public void testAddedFilesGetStream() throws Exception {
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-commit-addfile-stream", false);
+		final File newFile = new File(repoLoc, "xx");
+		final byte[] newFileContent = "xyz".getBytes();
+		RepoUtils.createFile(newFile, newFileContent);
+		HgRepository hgRepo = new HgLookup().detect(repoLoc);
+		new HgAddRemoveCommand(hgRepo).add(Path.create("xx")).execute();
+		// save the reference to HgDataFile without valid RevlogStream (entry in the dirstate
+		// doesn't make it valid)
+		final HgDataFile newFileNode = hgRepo.getFileNode("xx");
+		assertFalse(newFileNode.exists());
+		HgCommitCommand cmd = new HgCommitCommand(hgRepo).message("FIRST");
+		Outcome r = cmd.execute();
+		errorCollector.assertTrue(r.isOk());
+		TestStatus.StatusCollector status = new TestStatus.StatusCollector();
+		new HgStatusCommand(hgRepo).all().execute(status);
+		errorCollector.assertTrue(status.getErrors().isEmpty());
+		errorCollector.assertTrue(status.get(Kind.Added).isEmpty());
+		errorCollector.assertTrue(status.get(newFileNode.getPath()).contains(Kind.Clean));
+		//
+		errorCollector.assertTrue(newFileNode.exists());
+		final ByteArrayChannel read1 = new ByteArrayChannel();
+		newFileNode.content(0, read1);
+		errorCollector.assertEquals("Read from existing HgDataFile instance", newFileContent, read1.toArray());
+		final ByteArrayChannel read2 = new ByteArrayChannel();
+		hgRepo.getFileNode(newFileNode.getPath()).content(0, read2);
+		errorCollector.assertEquals("Read from fresh HgDataFile instance", newFileContent, read2.toArray());
+	}
+	
+	@Test
+	public void testRollback() throws Exception {
+		// Important: copy, not a clone of a repo to ensure old timestamps
+		// on repository files. Otherwise, there're chances transacition.rollback()
+		// would happen the very second (when fs timestamp granularity is second)
+		// repository got cloned, and RevlogChangeMonitor won't notice the file change
+		// (timestamp is the same, file size increased (CommitFacility) and decreased
+		// on rollback back to memorized value), and subsequent hgRepo access would fail
+		// trying to read more (due to Revlog#revisionAdded) revisions than there are in 
+		// the store file. 
+		// With copy and original timestamps we pretend commit happens to an existing repository
+		// in a regular manner (it's unlikely to have commits within the same second in a real life)
+		// XXX Note, once we have more robust method to detect file changes (e.g. Java7), this
+		// approach shall be abandoned.
+		File repoLoc = RepoUtils.copyRepoToTempLocation("log-1", "test-commit-rollback");
+		final Path newFilePath = Path.create("xx");
+		final File newFile = new File(repoLoc, newFilePath.toString());
+		RepoUtils.createFile(newFile, "xyz");
+		HgRepository hgRepo = new HgLookup().detect(repoLoc);
+		HgDataFile dfB = hgRepo.getFileNode("b");
+		HgDataFile dfD = hgRepo.getFileNode("d");
+		assertTrue("[sanity]", dfB.exists());
+		assertTrue("[sanity]", dfD.exists());
+		final File modifiedFile = new File(repoLoc, "b");
+		RepoUtils.modifyFileAppend(modifiedFile, " 1 \n");
+		//
+		new HgAddRemoveCommand(hgRepo).add(newFilePath).remove(dfD.getPath()).execute();
+		//
+		TestStatus.StatusCollector status = new TestStatus.StatusCollector();
+		new HgStatusCommand(hgRepo).all().execute(status);
+		assertTrue(status.getErrors().isEmpty());
+		assertTrue(status.get(Kind.Added).contains(newFilePath));
+		assertTrue(status.get(Kind.Modified).contains(dfB.getPath()));
+		assertTrue(status.get(Kind.Removed).contains(dfD.getPath()));
+		assertEquals(DEFAULT_BRANCH_NAME, hgRepo.getWorkingCopyBranchName());
+		//
+		final int lastClogRevision = hgRepo.getChangelog().getLastRevision();
+		final int lastManifestRev = hgRepo.getManifest().getLastRevision();
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), lastClogRevision);
+		cf.add(hgRepo.getFileNode("xx"), new FileContentSupplier(hgRepo, newFile));
+		cf.add(dfB, new FileContentSupplier(hgRepo, modifiedFile));
+		cf.forget(dfD);
+		cf.branch("another-branch");
+		Transaction tr = newTransaction(hgRepo);
+		Nodeid commitRev = cf.commit("Commit to fail",  tr);
+		tr.rollback();
+		//
+		errorCollector.assertEquals(lastClogRevision, hgRepo.getChangelog().getLastRevision());
+		errorCollector.assertEquals(lastManifestRev, hgRepo.getManifest().getLastRevision());
+		errorCollector.assertEquals(DEFAULT_BRANCH_NAME, DirstateReader.readBranch(Internals.getInstance(hgRepo)));
+		errorCollector.assertFalse(hgRepo.getChangelog().isKnown(commitRev));
+		errorCollector.assertFalse(hgRepo.getFileNode("xx").exists());
+		// check dirstate
+		status = new TestStatus.StatusCollector();
+		new HgStatusCommand(hgRepo).all().execute(status);
+		errorCollector.assertTrue(status.getErrors().isEmpty());
+		errorCollector.assertTrue(status.get(Kind.Added).contains(newFilePath));
+		errorCollector.assertTrue(status.get(Kind.Modified).contains(dfB.getPath()));
+		errorCollector.assertTrue(status.get(Kind.Removed).contains(dfD.getPath()));
+		
+		RepoUtils.assertHgVerifyOk(errorCollector, repoLoc);
+	}
+	
+	private Transaction newTransaction(SessionContext.Source ctxSource) {
+		return trFactory.create(ctxSource);
 	}
 
 	public static void main(String[] args) throws Exception {
 		new TestCommit().testCommitToEmpty();
-		if (Boolean.TRUE.booleanValue()) {
-			return;
-		}
-		String input = "abcdefghijklmnopqrstuvwxyz";
-		ByteArraySupplier bas = new ByteArraySupplier(input.getBytes());
-		ByteBuffer bb = ByteBuffer.allocate(7);
-		byte[] result = new byte[26];
-		int rpos = 0;
-		while (bas.read(bb) != -1) {
-			bb.flip();
-			bb.get(result, rpos, bb.limit());
-			rpos += bb.limit();
-			bb.clear();
-		}
-		if (input.length() != rpos) {
-			throw new AssertionError();
-		}
-		String output = new String(result);
-		if (!input.equals(output)) {
-			throw new AssertionError();
-		}
-		System.out.println(output);
-	}
-
-	static class ByteArraySupplier implements CommitFacility.ByteDataSupplier {
-
-		private final byte[] data;
-		private int pos = 0;
-
-		public ByteArraySupplier(byte[] source) {
-			data = source;
-		}
-
-		public int read(ByteBuffer buf) {
-			if (pos >= data.length) {
-				return -1;
-			}
-			int count = Math.min(buf.remaining(), data.length - pos);
-			buf.put(data, pos, count);
-			pos += count;
-			return count;
-		}
 	}
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/org/tmatesoft/hg/test/TestConfigFiles.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.test;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.tmatesoft.hg.internal.BasicSessionContext;
+import org.tmatesoft.hg.internal.ConfigFile;
+import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgRepoConfig;
+import org.tmatesoft.hg.repo.HgRepoConfig.ExtensionsSection;
+import org.tmatesoft.hg.repo.HgRepoConfig.PathsSection;
+import org.tmatesoft.hg.repo.HgRepoConfig.Section;
+import org.tmatesoft.hg.repo.HgRepository;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class TestConfigFiles {
+	
+	@Rule
+	public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
+
+	@Test
+	public void testConfigFile() throws Exception {
+		ConfigFile configFile = new ConfigFile(new BasicSessionContext(null));
+		configFile.addLocation(new File(Configuration.get().getTestDataDir(), "sample.rc"));
+		// section1 has key1 unset, key2 overridden from included, key4 from second occurence
+		HashMap<String, String> section1 = new HashMap<String, String>();
+		section1.put("key2", "alternative value 2");
+		section1.put("key3", "value 3");
+		section1.put("key4", "value 4");
+		// section2 comes from included config
+		HashMap<String, String> section2 = new HashMap<String, String>();
+		section2.put("key1", "value 1-2");
+		HashMap<String, String> section3 = new HashMap<String, String>();
+		section3.put("key1", "value 1-3");
+		HashMap<String, HashMap<String,String>> sections = new HashMap<String, HashMap<String,String>>();
+		sections.put("section1", section1);
+		sections.put("section2", section2);
+		sections.put("section3", section3);
+		//
+		for (String s : configFile.getSectionNames()) {
+//			System.out.printf("[%s]\n", s);
+			final HashMap<String, String> m = sections.remove(s);
+			errorCollector.assertTrue(m != null);
+			for (Map.Entry<String, String> e : configFile.getSection(s).entrySet()) {
+//				System.out.printf("%s = %s\n", e.getKey(), e.getValue());
+				if (m.containsKey(e.getKey())) {
+					errorCollector.assertEquals(m.remove(e.getKey()), e.getValue());
+				} else {
+					errorCollector.fail("Unexpected key:" + e.getKey());
+				}
+			}
+		}
+		errorCollector.assertEquals(0, sections.size());
+		errorCollector.assertEquals(0, section1.size());
+		errorCollector.assertEquals(0, section2.size());
+		errorCollector.assertEquals(0, section3.size());
+	}
+
+	@Test
+	public void testRepositoryConfig() throws Exception {
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-repocfg", false);
+		File hgrc = new File(repoLoc, ".hg/hgrc");
+		String username = "John Q. Public <john.public@acme.com>";
+		String path1_key = "hg4j.gc";
+		String path1_value = "https://code.google.com/p/hg4j/";
+		String ext1_key = "ext.one";
+		String ext2_key = "ext.disabled"; // disabled
+		String ext3_key = "hgext.two"; // check if found by "two" key 
+		String hgrcContent = String.format("#comment\n[ui]\nusername = %s\n\n[paths]\n%s = %s\ndefault=%3$s\n\n[extensions]\n%s = \n%s = !\n%s=\n", username, path1_key, path1_value, ext1_key, ext2_key, ext3_key);
+		RepoUtils.createFile(hgrc, hgrcContent);
+		//
+		HgRepository repo = new HgLookup().detect(repoLoc);
+		final HgRepoConfig cfg = repo.getConfiguration();
+		assertNotNull(cfg.getPaths());
+		assertNotNull(cfg.getExtensions());
+		final Section dne = cfg.getSection("does-not-exist");
+		assertNotNull(dne);
+		assertFalse(dne.exists());
+		assertEquals(username, cfg.getSection("ui").getString("username", null));
+		final PathsSection p = cfg.getPaths();
+		assertTrue(p.getPathSymbolicNames().contains(path1_key));
+		assertEquals(path1_value, p.getString(path1_key, null));
+		assertTrue(p.hasDefault());
+		assertEquals(path1_value, p.getDefault());
+		assertFalse(p.hasDefault() ^ p.getDefault() != null);
+		assertFalse(p.hasDefaultPush() ^ p.getDefaultPush() != null);
+		final ExtensionsSection e = cfg.getExtensions();
+		assertTrue(e.isEnabled(ext1_key));
+		assertTrue(e.getString(ext2_key, null).length() > 0);
+		assertFalse(e.isEnabled(ext2_key));
+		assertNotNull(e.getString(ext3_key, null));
+		assertTrue(e.isEnabled(ext3_key.substring("hgext.".length())));
+		//
+		assertEquals(username, new HgInternals(repo).getNextCommitUsername());
+	}
+}
--- a/test/org/tmatesoft/hg/test/TestDiffHelper.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestDiffHelper.java	Wed Jul 10 11:48:55 2013 +0200
@@ -118,6 +118,42 @@
 		diff.findMatchingBlocks(mc = new MatchCollector<CharSequence>());
 		assertEquals(3, mc.matchCount()); // bc, e, g
 	}
+
+	@Test
+	public void testChangedEOL() {
+		DiffHelper<LineSequence> diffHelper = new DiffHelper<LineSequence>();
+		MatchCollector<LineSequence> mc; DeltaCollector dc;
+		// all lines changed
+		diffHelper.init(newlines("one\ntwo\nthree\n".getBytes()), newlines("one\r\ntwo\r\nthree\r\n".getBytes()));
+		diffHelper.findMatchingBlocks(mc = new MatchCollector<LineSequence>());
+		assertEquals(0, mc.matchCount());
+		diffHelper.findMatchingBlocks(dc = new DeltaCollector());
+		assertEquals(0, dc.unchangedCount());
+		assertEquals(1, dc.deletedCount());
+		assertTrue(dc.deletedLine(0));
+		assertTrue(dc.deletedLine(1));
+		assertTrue(dc.deletedLine(2));
+		assertEquals(1, dc.addedCount());
+		assertTrue(dc.addedLine(0));
+		assertTrue(dc.addedLine(1));
+		assertTrue(dc.addedLine(2));
+		// one line changed
+		diffHelper.init(newlines("one\ntwo\nthree\n".getBytes()), newlines("one\ntwo\r\nthree\n".getBytes()));
+		diffHelper.findMatchingBlocks(mc = new MatchCollector<LineSequence>());
+		assertEquals(2, mc.matchCount());
+		assertTrue(mc.originLineMatched(0));
+		assertTrue(mc.targetLineMatched(0));
+		assertFalse(mc.originLineMatched(1));
+		assertFalse(mc.targetLineMatched(1));
+		assertTrue(mc.originLineMatched(2));
+		assertTrue(mc.targetLineMatched(2));
+		diffHelper.findMatchingBlocks(dc = new DeltaCollector());
+		assertEquals(2, dc.unchangedCount());
+		assertEquals(1, dc.deletedCount());
+		assertTrue(dc.deletedLine(1));
+		assertEquals(1, dc.addedCount());
+		assertTrue(dc.addedLine(1));
+	}
 	
 	// range is comprised of 3 values, range length always last, range start comes at index o (either 0 or 1)
 	static boolean includes(IntVector ranges, int o, int ln) {
@@ -188,20 +224,24 @@
 			same.add(s1From, s2From, length);
 		}
 
+		// return number of regions that didn't change
 		int unchangedCount() {
 			return same.size() / 3;
 		}
 
+		// return number of added regions
 		int addedCount() {
 			return added.size() / 3;
 		}
-
+		// return number of deleted regions
 		int deletedCount() {
 			return deleted.size() / 3;
 		}
+		// answer if 0-based line is marked as added
 		boolean addedLine(int ln) {
 			return includes(added, 1, ln);
 		}
+		// answer if 0-based line is marked as deleted
 		boolean deletedLine(int ln) {
 			return includes(deleted, 1, ln);
 		}
--- a/test/org/tmatesoft/hg/test/TestDirstate.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestDirstate.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -17,8 +17,7 @@
 package org.tmatesoft.hg.test;
 
 import static java.lang.Character.*;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.Assert.*;
 
 import java.util.TreeSet;
 
@@ -46,10 +45,13 @@
 	public void testParents() throws Exception {
 		repo = Configuration.get().find("log-branches");
 		final Pair<Nodeid, Nodeid> wcParents = repo.getWorkingCopyParents();
-		Assert.assertEquals("5f24ef64e9dfb1540db524f88cb5c3d265e1a3b5", wcParents.first().toString());
-		Assert.assertTrue(wcParents.second().isNull());
+		assertEquals("5f24ef64e9dfb1540db524f88cb5c3d265e1a3b5", wcParents.first().toString());
+		assertTrue(wcParents.second().isNull());
 		//
-		// TODO same static and non-static
+		HgDirstate ds = new HgInternals(repo).getDirstate();
+		final Pair<Nodeid, Nodeid> wcParents2 = ds.parents();
+		assertEquals(wcParents.first(), wcParents2.first());
+		assertEquals(wcParents.second(), wcParents2.second());
 	}
 
 	@Test
--- a/test/org/tmatesoft/hg/test/TestFileFlags.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestFileFlags.java	Wed Jul 10 11:48:55 2013 +0200
@@ -30,6 +30,7 @@
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.FileInfo;
 import org.tmatesoft.hg.util.FileWalker;
 import org.tmatesoft.hg.util.Path;
@@ -52,7 +53,7 @@
 	private HgRepository repo;
 	
 	@Test
-	public void testFlagsInManifest() {
+	public void testFlagsInManifest() throws HgRuntimeException {
 		HgDataFile link = repo.getFileNode("file-link");
 		HgDataFile exec = repo.getFileNode("file-exec");
 		HgDataFile file = repo.getFileNode("regular-file");
@@ -65,7 +66,7 @@
 	public void testFlagsInWorkingCopy() throws Exception {
 		File repoRoot = repo.getWorkingDir();
 		Path.Source pathSrc = new Path.SimpleSource(new PathRewrite.Composite(new RelativePathRewrite(repoRoot), repo.getToRepoPathHelper()));
-		FileWalker fw = new FileWalker(repo.getSessionContext(), repoRoot, pathSrc);
+		FileWalker fw = new FileWalker(repo, repoRoot, pathSrc, null);
 		
 		if (Internals.runningOnWindows()) {
 			System.out.println("Executing tests on Windows, no actual file flags in working area are checked");
--- a/test/org/tmatesoft/hg/test/TestHistory.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestHistory.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -45,6 +45,7 @@
 import org.tmatesoft.hg.internal.AdapterPlug;
 import org.tmatesoft.hg.repo.HgLookup;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.test.LogOutputParser.Record;
 import org.tmatesoft.hg.util.Adaptable;
 import org.tmatesoft.hg.util.CancelSupport;
@@ -626,7 +627,7 @@
 		}
 		
 
-		public void treeElement(TreeElement entry) throws HgCallbackTargetException {
+		public void treeElement(TreeElement entry) throws HgCallbackTargetException, HgRuntimeException {
 			// check consistency
 			Nodeid cset = entry.changeset().getNodeid();
 			errorCollector.assertEquals(entry.changesetRevision(), cset);
--- a/test/org/tmatesoft/hg/test/TestIgnore.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestIgnore.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -28,6 +28,9 @@
 import org.tmatesoft.hg.internal.WinToNixPathRewrite;
 import org.tmatesoft.hg.repo.HgIgnore;
 import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRepositoryFiles;
 import org.tmatesoft.hg.util.Path;
 
 /**
@@ -189,6 +192,26 @@
 		doAssert(hgIgnore, toIgnore, toPass);
 	}
 	
+	@Test
+	public void testRefreshOnChange() throws Exception {
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-refresh-hgignore", false);
+		File hgignoreFile = new File(repoLoc, HgRepositoryFiles.HgIgnore.getPath());
+		RepoUtils.createFile(hgignoreFile, "bin/");
+		HgRepository hgRepo = new HgLookup().detect(repoLoc);
+		final Path p1 = Path.create("bin/a/b/c");
+		final Path p2 = Path.create("src/a/b/c");
+		HgIgnore ignore = hgRepo.getIgnore();
+		errorCollector.assertTrue(ignore.isIgnored(p1));
+		errorCollector.assertFalse(ignore.isIgnored(p2));
+		Thread.sleep(1000); // Linux granularity for modification time is 1 second 
+		// file of the same length
+		RepoUtils.createFile(hgignoreFile, "src/");
+		ignore = hgRepo.getIgnore();
+		errorCollector.assertFalse(ignore.isIgnored(p1));
+		errorCollector.assertTrue(ignore.isIgnored(p2));
+		
+	}
+	
 	private void doAssert(HgIgnore hgIgnore, Path[] toIgnore, Path[] toPass) {
 		if (toIgnore == null && toPass == null) {
 			throw new IllegalArgumentException();
--- a/test/org/tmatesoft/hg/test/TestIncoming.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestIncoming.java	Wed Jul 10 11:48:55 2013 +0200
@@ -115,8 +115,8 @@
 		HashSet<Nodeid> set = new HashSet<Nodeid>(liteResult);
 		for (Nodeid nid : expected) {
 			boolean removed = set.remove(nid);
-			errorCollector.checkThat(what + " Missing " +  nid.shortNotation() + " in HgIncomingCommand.execLite result", removed, equalTo(true));
+			errorCollector.checkThat(what + " Missing " +  nid.shortNotation() + " in execLite result", removed, equalTo(true));
 		}
-		errorCollector.checkThat(what + " Superfluous cset reported by HgIncomingCommand.execLite", set.isEmpty(), equalTo(true));
+		errorCollector.checkThat(what + " Superfluous cset reported by execLite", set.isEmpty(), equalTo(true));
 	}
 }
--- a/test/org/tmatesoft/hg/test/TestInflaterDataAccess.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestInflaterDataAccess.java	Wed Jul 10 11:48:55 2013 +0200
@@ -51,7 +51,7 @@
 	@Test
 	public void testSeek() throws Exception {
 		DataAccess zip = zip(testContent1);
-		InflaterDataAccess ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25]);
+		InflaterDataAccess ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25], null);
 		ida.seek(20);
 		final int bufferCapacity = 10;
 		ByteBuffer chunk1 = ByteBuffer.allocate(bufferCapacity);
@@ -66,15 +66,15 @@
 	@Test
 	public void testLength() throws Exception {
 		DataAccess zip = zip(testContent1);
-		InflaterDataAccess ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25]);
+		InflaterDataAccess ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25], null);
 		errorCollector.assertEquals("Plain #length()", testContent1.length, ida.length());
 		//
-		ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25]);
+		ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25], null);
 		byte[] dummy = new byte[30];
 		ida.readBytes(dummy, 0, dummy.length);
 		errorCollector.assertEquals("#length() after readBytes()", testContent1.length, ida.length());
 		//
-		ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25]);
+		ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25], null);
 		// consume most of the stream, so that all original compressed data is already read
 		dummy = new byte[testContent1.length - 1];
 		ida.readBytes(dummy, 0, dummy.length);
@@ -86,7 +86,7 @@
 	@Test
 	public void testReadBytes() throws Exception {
 		DataAccess zip = zip(testContent1);
-		InflaterDataAccess ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25]);
+		InflaterDataAccess ida = new InflaterDataAccess(zip, 0, zip.length(), -1, new Inflater(), new byte[25], null);
 		ida.skip(10);
 		byte[] chunk1 = new byte[22];
 		ida.readBytes(chunk1, 0, 20);
--- a/test/org/tmatesoft/hg/test/TestOutgoing.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestOutgoing.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -16,19 +16,23 @@
  */
 package org.tmatesoft.hg.test;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
 import java.util.List;
 
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
+import org.tmatesoft.hg.core.HgCheckoutCommand;
+import org.tmatesoft.hg.core.HgCommitCommand;
 import org.tmatesoft.hg.core.HgLogCommand;
 import org.tmatesoft.hg.core.HgOutgoingCommand;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.repo.HgLookup;
 import org.tmatesoft.hg.repo.HgRemoteRepository;
+import org.tmatesoft.hg.repo.HgRepository;
 
 /**
  *
@@ -71,10 +75,10 @@
 			TestIncoming.report(collector, outParser, liteResult, errorCollector);
 			//
 			File f = new File(dest, "Test.txt");
-			append(f, "1");
+			RepoUtils.createFile(f, "1");
 			eh0.run("hg", "add");
 			eh0.run("hg", "commit", "-m", "1");
-			append(f, "2");
+			RepoUtils.modifyFileAppend(f, "2");
 			eh0.run("hg", "commit", "-m", "2");
 			//
 			cmd = new HgOutgoingCommand(lookup.detect(dest)).against(hgRemote);
@@ -85,10 +89,41 @@
 			TestIncoming.report(collector, outParser, liteResult, errorCollector);
 		}
 	}
-
-	static void append(File f, String s) throws IOException {
-		FileWriter fw = new FileWriter(f);
-		fw.append(s);
-		fw.close();
+	
+	/**
+	 * Issue 47: Incorrect set of outgoing changes when revision spins off prior to common revision of local and remote repos
+	 */
+	@Test
+	public void testOutgoingPreceedsCommon() throws Exception {
+		File srcRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-outgoing-src", false);
+		File dstRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-outgoing-dst", false);
+		File f1 = new File(srcRepoLoc, "file1");
+		assertTrue("[sanity]", f1.canWrite());
+		HgServer server = new HgServer().start(dstRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			new HgCheckoutCommand(srcRepo).changeset(6).clean(true).execute();
+			assertEquals("[sanity]", "with-merge", srcRepo.getWorkingCopyBranchName());
+			RepoUtils.modifyFileAppend(f1, "change1");
+			new HgCommitCommand(srcRepo).message("Commit 1").execute();
+			new HgCheckoutCommand(srcRepo).changeset(5).clean(true).execute();
+			assertEquals("[sanity]", "no-merge", srcRepo.getWorkingCopyBranchName());
+			RepoUtils.modifyFileAppend(f1, "change2");
+			new HgCommitCommand(srcRepo).message("Commit 2").execute();
+			//
+			HgOutgoingCommand cmd = new HgOutgoingCommand(srcRepo).against(dstRemote);
+			LogOutputParser outParser = new LogOutputParser(true);
+			ExecHelper eh = new ExecHelper(outParser, srcRepoLoc);
+			HgLogCommand.CollectHandler collector = new HgLogCommand.CollectHandler();
+			//
+			List<Nodeid> liteResult = cmd.executeLite();
+			cmd.executeFull(collector);
+			eh.run("hg", "outgoing", "--debug", dstRemote.getLocation());
+			TestIncoming.report(collector, outParser, liteResult, errorCollector);
+		} finally {
+			server.stop();
+		}
 	}
 }
--- a/test/org/tmatesoft/hg/test/TestPhases.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestPhases.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -18,18 +18,23 @@
 
 import static org.junit.Assert.*;
 
+import java.util.ArrayList;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.junit.Rule;
 import org.junit.Test;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.PhasesHelper;
+import org.tmatesoft.hg.internal.RevisionSet;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgInternals;
 import org.tmatesoft.hg.repo.HgLookup;
 import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.repo.HgPhase;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 
 /**
  * {hg4j.tests.repos}/test-phases/
@@ -66,8 +71,43 @@
 		final long end = System.nanoTime();
 		System.out.printf("With ParentWalker(simulates log command for whole repo): %,d μs (pw init: %,d ns)\n", (end - start1)/1000, start2 - start1);
 	}
+	
+	@Test
+	public void testAllSecretAndDraft() throws Exception {
+		HgRepository repo = Configuration.get().find("test-phases");
+		Internals implRepo = HgInternals.getImplementationRepo(repo);
+		HgPhase[] expected = readPhases(repo);
+		ArrayList<Nodeid> secret = new ArrayList<Nodeid>();
+		ArrayList<Nodeid> draft = new ArrayList<Nodeid>();
+		ArrayList<Nodeid> pub = new ArrayList<Nodeid>();
+		for (int i = 0; i < expected.length; i++) {
+			Nodeid n = repo.getChangelog().getRevision(i);
+			switch (expected[i]) {
+			case Secret : secret.add(n); break; 
+			case Draft : draft.add(n); break;
+			case Public : pub.add(n); break;
+			default : throw new IllegalStateException();
+			}
+		}
+		final RevisionSet rsSecret = new RevisionSet(secret);
+		final RevisionSet rsDraft = new RevisionSet(draft);
+		assertFalse("[sanity]", rsSecret.isEmpty());
+		assertFalse("[sanity]", rsDraft.isEmpty());
+		HgParentChildMap<HgChangelog> pw = new HgParentChildMap<HgChangelog>(repo.getChangelog());
+		pw.init();
+		PhasesHelper ph1 = new PhasesHelper(implRepo, null);
+		PhasesHelper ph2 = new PhasesHelper(implRepo, pw);
+		RevisionSet s1 = ph1.allSecret().symmetricDifference(rsSecret);
+		RevisionSet s2 = ph2.allSecret().symmetricDifference(rsSecret);
+		errorCollector.assertTrue("Secret,no ParentChildMap:" + s1.toString(), s1.isEmpty());
+		errorCollector.assertTrue("Secret, with ParentChildMap:" + s2.toString(), s2.isEmpty());
+		RevisionSet s3 = ph1.allDraft().symmetricDifference(rsDraft);
+		RevisionSet s4 = ph2.allDraft().symmetricDifference(rsDraft);
+		errorCollector.assertTrue("Draft,no ParentChildMap:" + s3.toString(), s3.isEmpty());
+		errorCollector.assertTrue("Draft, with ParentChildMap:" + s4.toString(), s4.isEmpty());
+	}
 
-	private HgPhase[] initAndCheck(PhasesHelper ph, HgPhase[] expected) {
+	private HgPhase[] initAndCheck(PhasesHelper ph, HgPhase[] expected) throws HgRuntimeException {
 		HgChangelog clog = ph.getRepo().getChangelog();
 		HgPhase[] result = new HgPhase[clog.getRevisionCount()];
 		for (int i = 0, l = clog.getLastRevision(); i <= l; i++) {
@@ -85,6 +125,7 @@
 		OutputParser.Stub output = new OutputParser.Stub();
 		ExecHelper eh = new ExecHelper(output, repo.getWorkingDir());
 		eh.run("hg", "phase", "-r", "0:-1");
+		assertEquals("Perhaps, older Mercurial version, with no hg phase command support?", 0, eh.getExitValue());
 		Matcher m = Pattern.compile("(\\d+): (\\w+)$", Pattern.MULTILINE).matcher(output.result());
 		int i = 0;
 		while (m.find()) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/org/tmatesoft/hg/test/TestPull.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.test;
+
+import static org.tmatesoft.hg.repo.HgRepository.TIP;
+
+import java.io.File;
+import java.util.List;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.tmatesoft.hg.core.HgIncomingCommand;
+import org.tmatesoft.hg.core.HgPullCommand;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgRemoteRepository;
+import org.tmatesoft.hg.repo.HgRepository;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class TestPull {
+
+	@Rule
+	public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
+	
+	@Test
+	public void testPullToEmpty() throws Exception {
+		File srcRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-pull2empty-src", false);
+		File dstRepoLoc = RepoUtils.initEmptyTempRepo("test-pull2empty-dst");
+		HgServer server = new HgServer().start(srcRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			final HgRemoteRepository srcRemote = hgLookup.detect(server.getURL());
+			HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
+			HgPullCommand cmd = new HgPullCommand(dstRepo).source(srcRemote);
+			cmd.execute();
+			final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			checkRepositoriesAreSame(srcRepo, dstRepo);
+			final List<Nodeid> incoming = new HgIncomingCommand(dstRepo).against(srcRemote).executeLite();
+			errorCollector.assertTrue(incoming.toString(), incoming.isEmpty());
+			RepoUtils.assertHgVerifyOk(errorCollector, dstRepoLoc);
+		} finally {
+			server.stop();
+		}
+	}
+	
+	// test when pull comes with new file (if AddRevInspector/RevlogStreamWriter is ok with file that doesn't exist 
+
+	private void checkRepositoriesAreSame(HgRepository srcRepo, HgRepository dstRepo) {
+		// XXX copy of TestPush#checkRepositoriesAreSame
+		errorCollector.assertEquals(srcRepo.getChangelog().getRevisionCount(), dstRepo.getChangelog().getRevisionCount());
+		errorCollector.assertEquals(srcRepo.getChangelog().getRevision(0), dstRepo.getChangelog().getRevision(0));
+		errorCollector.assertEquals(srcRepo.getChangelog().getRevision(TIP), dstRepo.getChangelog().getRevision(TIP));
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/org/tmatesoft/hg/test/TestPush.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,387 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.test;
+
+import static org.junit.Assert.*;
+import static org.tmatesoft.hg.repo.HgRepository.TIP;
+
+import java.io.File;
+import java.util.List;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.tmatesoft.hg.core.HgCheckoutCommand;
+import org.tmatesoft.hg.core.HgCommitCommand;
+import org.tmatesoft.hg.core.HgOutgoingCommand;
+import org.tmatesoft.hg.core.HgPushCommand;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.PhasesHelper;
+import org.tmatesoft.hg.internal.RevisionSet;
+import org.tmatesoft.hg.repo.HgBookmarks;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgPhase;
+import org.tmatesoft.hg.repo.HgRemoteRepository;
+import org.tmatesoft.hg.repo.HgRepository;
+
+/**
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class TestPush {
+
+	@Rule
+	public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
+
+	@Test
+	public void testPushToEmpty() throws Exception {
+		File srcRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-push2empty-src", false);
+		File dstRepoLoc = RepoUtils.initEmptyTempRepo("test-push2empty-dst");
+		HgServer server = new HgServer().start(dstRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			HgPushCommand cmd = new HgPushCommand(srcRepo);
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			cmd.destination(dstRemote);
+			cmd.execute();
+			final HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
+			checkRepositoriesAreSame(srcRepo, dstRepo);
+			final List<Nodeid> outgoing = new HgOutgoingCommand(srcRepo).against(dstRemote).executeLite();
+			errorCollector.assertTrue(outgoing.toString(), outgoing.isEmpty());
+		} finally {
+			server.stop();
+		}
+	}
+
+	@Test
+	public void testPushChanges() throws Exception {
+		File srcRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-push-src", false);
+		File dstRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-push-dst", false);
+		File f1 = new File(srcRepoLoc, "file1");
+		assertTrue("[sanity]", f1.canWrite());
+		HgServer server = new HgServer().start(dstRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			RepoUtils.modifyFileAppend(f1, "change1");
+			new HgCommitCommand(srcRepo).message("Commit 1").execute();
+			new HgCheckoutCommand(srcRepo).changeset(7).clean(true).execute();
+			assertEquals("[sanity]", "no-merge", srcRepo.getWorkingCopyBranchName());
+			RepoUtils.modifyFileAppend(f1, "change2");
+			new HgCommitCommand(srcRepo).message("Commit 2").execute();
+			//
+			new HgPushCommand(srcRepo).destination(dstRemote).execute();
+			checkRepositoriesAreSame(srcRepo, hgLookup.detect(dstRepoLoc));
+			final List<Nodeid> outgoing = new HgOutgoingCommand(srcRepo).against(dstRemote).executeLite();
+			errorCollector.assertTrue(outgoing.toString(), outgoing.isEmpty());
+		} finally {
+			server.stop();
+		}
+	}
+	
+	@Test
+	public void testPushToNonPublishingServer() throws Exception {
+		// check drafts are same as on server
+		// copy, not clone as latter updates phase information
+		File srcRepoLoc = RepoUtils.copyRepoToTempLocation("test-phases", "test-push-nopub-src");
+		File dstRepoLoc = RepoUtils.initEmptyTempRepo("test-push-nopub-dst");
+		File f1 = new File(srcRepoLoc, "hello.c");
+		assertTrue("[sanity]", f1.canWrite());
+		HgServer server = new HgServer().publishing(false).start(dstRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			PhasesHelper phaseHelper = new PhasesHelper(HgInternals.getImplementationRepo(srcRepo));
+			final RevisionSet allDraft = phaseHelper.allDraft();
+			assertFalse("[sanity]", allDraft.isEmpty());
+			final int publicCsetToBranchAt = 4;
+			assertEquals("[sanity]", HgPhase.Public, phaseHelper.getPhase(publicCsetToBranchAt, null));
+			// in addition to existing draft csets, add one more draft, branching at some other public revision
+			new HgCheckoutCommand(srcRepo).changeset(publicCsetToBranchAt).clean(true).execute();
+			RepoUtils.modifyFileAppend(f1, "// aaa");
+			final HgCommitCommand commitCmd = new HgCommitCommand(srcRepo).message("Commit aaa");
+			assertTrue(commitCmd.execute().isOk());
+			Nodeid newCommit = commitCmd.getCommittedRevision();
+			//
+			new HgPushCommand(srcRepo).destination(dstRemote).execute();
+			HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
+			final HgChangelog srcClog = srcRepo.getChangelog();
+			final HgChangelog dstClog = dstRepo.getChangelog();
+			// refresh PhasesHelper
+			phaseHelper = new PhasesHelper(HgInternals.getImplementationRepo(srcRepo));
+			// check if phase didn't change
+			errorCollector.assertEquals(HgPhase.Draft, phaseHelper.getPhase(srcClog.getRevisionIndex(newCommit), newCommit));
+			for (Nodeid n : allDraft) {
+				// check drafts from src were actually pushed to dst 
+				errorCollector.assertTrue(dstClog.isKnown(n));
+				// check drafts didn't change their phase
+				errorCollector.assertEquals(HgPhase.Draft, phaseHelper.getPhase(srcClog.getRevisionIndex(n), n));
+			}
+		} finally {
+			server.stop();
+		}
+	}
+	
+	/**
+	 * If server lists revisions we know as drafts as public, update them locally
+	 */
+	@Test
+	public void testPushUpdatesPublishedDrafts() throws Exception {
+		/* o		r9, secret
+		 * |  o		r8, draft
+		 * |  |
+		 * |  o		r7, draft
+		 * o  |		r6, secret 
+		 * | /
+		 * o		r5, draft
+		 * |
+		 * o		r4, public
+		 */
+		// remote: r5 -> public, r6 -> draft, r8 -> secret
+		// local: new draft from r4, push
+		File srcRepoLoc = RepoUtils.copyRepoToTempLocation("test-phases", "test-push-phase-update-1-src");
+		File dstRepoLoc = RepoUtils.copyRepoToTempLocation("test-phases", "test-push-phase-update-1-dst");
+		File f1 = new File(srcRepoLoc, "hello.c");
+		assertTrue("[sanity]", f1.canWrite());
+		final HgLookup hgLookup = new HgLookup();
+		final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+		final ExecHelper dstRun = new ExecHelper(new OutputParser.Stub(), dstRepoLoc);
+		final int publicCsetToBranchAt = 4;
+		final int r5 = 5, r6 = 6, r8 = 8;
+		PhasesHelper srcPhase = new PhasesHelper(HgInternals.getImplementationRepo(srcRepo));
+		assertEquals("[sanity]", HgPhase.Draft, srcPhase.getPhase(r5, null));
+		assertEquals("[sanity]", HgPhase.Secret, srcPhase.getPhase(r6, null));
+		assertEquals("[sanity]", HgPhase.Draft, srcPhase.getPhase(r8, null));
+		// change phases in repository of remote server:
+		dstRun.exec("hg", "phase", "--public", String.valueOf(r5));
+		assertEquals(0, dstRun.getExitValue());
+		dstRun.exec("hg", "phase", "--draft", String.valueOf(r6));
+		assertEquals(0, dstRun.getExitValue());
+		dstRun.exec("hg", "phase", "--secret", "--force", String.valueOf(r8));
+		assertEquals(0, dstRun.getExitValue());
+		HgServer server = new HgServer().publishing(false).start(dstRepoLoc);
+		try {
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			// commit new draft head
+			new HgCheckoutCommand(srcRepo).changeset(publicCsetToBranchAt).clean(true).execute();
+			RepoUtils.modifyFileAppend(f1, "// aaa");
+			final HgCommitCommand commitCmd = new HgCommitCommand(srcRepo).message("Commit aaa");
+			assertTrue(commitCmd.execute().isOk());
+			final Nodeid newCommit = commitCmd.getCommittedRevision();
+			//
+			new HgPushCommand(srcRepo).destination(dstRemote).execute();
+			// refresh phase information
+			srcPhase = new PhasesHelper(HgInternals.getImplementationRepo(srcRepo));
+			// r5 and r6 are changed to match server phases (more exposed)
+			errorCollector.assertEquals(HgPhase.Public, srcPhase.getPhase(r5, null));
+			errorCollector.assertEquals(HgPhase.Draft, srcPhase.getPhase(r6, null));
+			// r8 is secret on server, locally can't make it less exposed though
+			errorCollector.assertEquals(HgPhase.Draft, srcPhase.getPhase(r8, null));
+			//
+			HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
+			final HgChangelog dstClog = dstRepo.getChangelog();
+			assertTrue(dstClog.isKnown(newCommit));
+			PhasesHelper dstPhase = new PhasesHelper(HgInternals.getImplementationRepo(dstRepo));
+			errorCollector.assertEquals(HgPhase.Draft, dstPhase.getPhase(dstClog.getRevisionIndex(newCommit), newCommit));
+			// the one that was secret is draft now
+			errorCollector.assertEquals(HgPhase.Draft, srcPhase.getPhase(r8, null));
+		} finally {
+			server.stop();
+		}
+	}
+	
+	/**
+	 * update phases of local revisions and push changes
+	 */
+	@Test
+	public void testPushPublishAndUpdates() throws Exception {
+		File srcRepoLoc = RepoUtils.copyRepoToTempLocation("test-phases", "test-push-phase-update-2-src");
+		File dstRepoLoc = RepoUtils.initEmptyTempRepo("test-push-phase-update-1-dst");
+		final int r4 = 4, r5 = 5, r6 = 6, r9 = 9;
+		HgServer server = new HgServer().publishing(false).start(dstRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			new HgPushCommand(srcRepo).destination(dstRemote).execute();
+			//
+			// make sure pushed repository got same draft root
+			final Nodeid r4PublicHead = srcRepo.getChangelog().getRevision(r4);
+			final Nodeid r5DraftRoot = srcRepo.getChangelog().getRevision(r5);
+			HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
+			final HgChangelog dstClog = dstRepo.getChangelog();
+			PhasesHelper dstPhase = new PhasesHelper(HgInternals.getImplementationRepo(dstRepo));
+			assertEquals(HgPhase.Public, dstPhase.getPhase(dstClog.getRevisionIndex(r4PublicHead), r4PublicHead));
+			assertEquals(HgPhase.Draft, dstPhase.getPhase(dstClog.getRevisionIndex(r5DraftRoot), r5DraftRoot));
+			//
+			// now, graduate some local revisions, r5:draft->public, r6:secret->public, r9: secret->draft
+			final ExecHelper srcRun = new ExecHelper(new OutputParser.Stub(), srcRepoLoc);
+			srcRun.exec("hg", "phase", "--public", String.valueOf(r5));
+			srcRun.exec("hg", "phase", "--public", String.valueOf(r6));
+			srcRun.exec("hg", "phase", "--draft", String.valueOf(r9));
+			// PhaseHelper shall be new for the command, and would pick up these external changes 
+			new HgPushCommand(srcRepo).destination(dstRemote).execute();
+			final Nodeid r6Nodeid = srcRepo.getChangelog().getRevision(r6);
+			final Nodeid r9Nodeid = srcRepo.getChangelog().getRevision(r9);
+			// refresh 
+			dstPhase = new PhasesHelper(HgInternals.getImplementationRepo(dstRepo));
+			// not errorCollector as subsequent code would fail if these secret revs didn't get into dst
+			assertTrue(dstClog.isKnown(r6Nodeid));
+			assertTrue(dstClog.isKnown(r9Nodeid));
+			errorCollector.assertEquals(HgPhase.Public, dstPhase.getPhase(dstClog.getRevisionIndex(r5DraftRoot), r5DraftRoot));
+			errorCollector.assertEquals(HgPhase.Public, dstPhase.getPhase(dstClog.getRevisionIndex(r6Nodeid), r6Nodeid));
+			errorCollector.assertEquals(HgPhase.Draft, dstPhase.getPhase(dstClog.getRevisionIndex(r9Nodeid), r9Nodeid));
+		} finally {
+			server.stop();
+		}
+	}
+
+	
+	@Test
+	public void testPushToPublishingServer() throws Exception {
+		// copy, not clone as latter updates phase information
+		File srcRepoLoc = RepoUtils.copyRepoToTempLocation("test-phases", "test-push-pub-src");
+		File dstRepoLoc = RepoUtils.initEmptyTempRepo("test-push-pub-dst");
+		HgServer server = new HgServer().publishing(true).start(dstRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			PhasesHelper phaseHelper = new PhasesHelper(HgInternals.getImplementationRepo(srcRepo));
+			final RevisionSet allDraft = phaseHelper.allDraft();
+			assertFalse("[sanity]", allDraft.isEmpty());
+			// push all changes
+			new HgPushCommand(srcRepo).destination(dstRemote).execute();
+			HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
+			final HgChangelog srcClog = srcRepo.getChangelog();
+			final HgChangelog dstClog = dstRepo.getChangelog();
+			// refresh PhasesHelper
+			phaseHelper = new PhasesHelper(HgInternals.getImplementationRepo(srcRepo));
+			for (Nodeid n : allDraft) {
+				// check drafts from src were actually pushed to dst 
+				errorCollector.assertTrue(dstClog.isKnown(n));
+				// check drafts became public
+				errorCollector.assertEquals(HgPhase.Public, phaseHelper.getPhase(srcClog.getRevisionIndex(n), n));
+			}
+		} finally {
+			server.stop();
+		}
+	}
+
+	@Test
+	public void testPushSecretChangesets() throws Exception {
+		// copy, not clone as latter updates phase information
+		File srcRepoLoc = RepoUtils.copyRepoToTempLocation("test-phases", "test-push-no-secret-src");
+		File dstRepoLoc = RepoUtils.initEmptyTempRepo("test-push-no-secret-dst");
+		HgServer server = new HgServer().start(dstRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			PhasesHelper phaseHelper = new PhasesHelper(HgInternals.getImplementationRepo(srcRepo));
+			final RevisionSet allSecret = phaseHelper.allSecret();
+			assertFalse("[sanity]", allSecret.isEmpty());
+			new HgPushCommand(srcRepo).destination(dstRemote).execute();
+			HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
+			final HgChangelog srcClog = srcRepo.getChangelog();
+			final HgChangelog dstClog = dstRepo.getChangelog();
+			errorCollector.assertEquals(srcClog.getRevisionCount() - allSecret.size(), dstClog.getRevisionCount());
+			for (Nodeid n : allSecret) {		
+				errorCollector.assertTrue(n.toString(), !dstClog.isKnown(n));
+			}
+		} finally {
+			server.stop();
+		}
+	}
+
+	@Test
+	public void testUpdateBookmarkOnPush() throws Exception {
+		File srcRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-push-src", false);
+		File dstRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-push-dst", false);
+		final ExecHelper srcRun = new ExecHelper(new OutputParser.Stub(), srcRepoLoc);
+		final ExecHelper dstRun = new ExecHelper(new OutputParser.Stub(), dstRepoLoc);
+		File f1 = new File(srcRepoLoc, "file1");
+		assertTrue("[sanity]", f1.canWrite());
+		//
+		final String bm1 = "mark1", bm2 = "mark2", bm3 = "mark3", bm4 = "mark4", bm5 = "mark5";
+		final int bm2Local = 1, bm2Remote = 6, bm3Local = 7, bm3Remote = 2, bm_4_5 = 3;
+		// 1) bm1 - local active bookmark, check that push updates in remote
+		srcRun.exec("hg", "bookmark", bm1);
+		dstRun.exec("hg", "bookmark", "-r", "8", bm1);
+		// 2) bm2 - local points to ancestor of revision remote points to
+		srcRun.exec("hg", "bookmark", "-r", String.valueOf(bm2Local), bm2);
+		dstRun.exec("hg", "bookmark", "-r", String.valueOf(bm2Remote), bm2);
+		// 3) bm3 - remote points to ancestor of revision local one points to   
+		srcRun.exec("hg", "bookmark", "-r", String.valueOf(bm3Local), bm3);
+		dstRun.exec("hg", "bookmark", "-r", String.valueOf(bm3Remote), bm3);
+		// 4) bm4 - remote bookmark, not known locally
+		dstRun.exec("hg", "bookmark", "-r", String.valueOf(bm_4_5), bm4);
+		// 5) bm5 - local bookmark, not known remotely
+		srcRun.exec("hg", "bookmark", "-r", String.valueOf(bm_4_5), bm5);
+		//
+		HgServer server = new HgServer().start(dstRepoLoc);
+		try {
+			final HgLookup hgLookup = new HgLookup();
+			final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
+			final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL());
+			RepoUtils.modifyFileAppend(f1, "change1");
+			final HgCommitCommand commitCmd = new HgCommitCommand(srcRepo).message("Commit 1");
+			assertTrue(commitCmd.execute().isOk());
+			assertEquals(bm1, srcRepo.getBookmarks().getActiveBookmarkName());
+			assertEquals(commitCmd.getCommittedRevision(), srcRepo.getBookmarks().getRevision(bm1));
+			//
+			new HgPushCommand(srcRepo).destination(dstRemote).execute();
+			Thread.sleep(300); // let the server perform the update
+			//
+			HgBookmarks srcBookmarks = srcRepo.getBookmarks();
+			final HgChangelog srcClog = srcRepo.getChangelog();
+			// first, check local bookmarks are intact
+			errorCollector.assertEquals(srcClog.getRevision(bm2Local), srcBookmarks.getRevision(bm2));
+			errorCollector.assertEquals(srcClog.getRevision(bm3Local), srcBookmarks.getRevision(bm3));
+			errorCollector.assertEquals(null, srcBookmarks.getRevision(bm4));
+			errorCollector.assertEquals(srcClog.getRevision(bm_4_5), srcBookmarks.getRevision(bm5));
+			// now, check remote bookmarks were touched
+			HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
+			HgBookmarks dstBookmarks = dstRepo.getBookmarks();
+			final HgChangelog dstClog = dstRepo.getChangelog();
+			// bm1 changed and points to newly pushed commit.
+			// if the test fails (bm1 points to r8), chances are server didn't manage to update
+			// bookmarks yet (there's Thread.sleep() above to give it a chance).
+			errorCollector.assertEquals(commitCmd.getCommittedRevision(), dstBookmarks.getRevision(bm1));
+			// bm2 didn't change
+			errorCollector.assertEquals(dstClog.getRevision(bm2Remote), dstBookmarks.getRevision(bm2));
+			// bm3 did change, now points to value we've got in srcRepo
+			errorCollector.assertEquals(srcClog.getRevision(bm3Local), dstBookmarks.getRevision(bm3));
+			// bm4 is not affected
+			errorCollector.assertEquals(dstClog.getRevision(bm_4_5), dstBookmarks.getRevision(bm4));
+			// bm5 is not known remotely
+			errorCollector.assertEquals(null, dstBookmarks.getRevision(bm5));
+		} finally {
+			server.stop();
+		}
+	}
+
+	private void checkRepositoriesAreSame(HgRepository srcRepo, HgRepository dstRepo) {
+		errorCollector.assertEquals(srcRepo.getChangelog().getRevisionCount(), dstRepo.getChangelog().getRevisionCount());
+		errorCollector.assertEquals(srcRepo.getChangelog().getRevision(0), dstRepo.getChangelog().getRevision(0));
+		errorCollector.assertEquals(srcRepo.getChangelog().getRevision(TIP), dstRepo.getChangelog().getRevision(TIP));
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/org/tmatesoft/hg/test/TestRepositoryLock.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,178 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.test;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.CountDownLatch;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.tmatesoft.hg.core.HgStatusCommand;
+import org.tmatesoft.hg.internal.BasicSessionContext;
+import org.tmatesoft.hg.internal.DataAccessProvider;
+import org.tmatesoft.hg.repo.HgLookup;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRepositoryLock;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class TestRepositoryLock {
+
+	@Test
+	public void testWorkingDirLock() throws Exception {
+		File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-wc-lock", false);
+		// turn off lock timeout, to fail fast
+		File hgrc = new File(repoLoc, ".hg/hgrc");
+		RepoUtils.createFile(hgrc, "[ui]\ntimeout=0\n"); // or 1
+		final OutputParser.Stub p = new OutputParser.Stub();
+		ExecHelper eh = new ExecHelper(p, repoLoc);
+		HgRepository hgRepo = new HgLookup().detect(repoLoc);
+		final HgRepositoryLock wdLock = hgRepo.getWorkingDirLock();
+		try {
+			wdLock.acquire();
+			eh.run("hg", "tag", "tag-aaa");
+			Assert.assertNotSame(0 /*returns 0 on success*/, eh.getExitValue());
+			Assert.assertTrue(p.result().toString().contains("abort"));
+		} finally {
+			wdLock.release();
+		}
+	}
+
+	public static void main(String[] args) throws Exception {
+		Map<String, Object> po = new HashMap<String, Object>();
+		po.put(DataAccessProvider.CFG_PROPERTY_MAPIO_LIMIT, 0);
+		final HgLookup hgLookup = new HgLookup(new BasicSessionContext(po , null));
+		final File rebaseFromRepoLoc = RepoUtils.cloneRepoToTempLocation(new File("/temp/hg/junit-test-repos/test-annotate"), "repo-lock-remote", false, true);
+		final File rebaseToRepoLoc = RepoUtils.cloneRepoToTempLocation(rebaseFromRepoLoc, "repo-lock-local", false, true);
+		final File remoteChanges = new File(rebaseFromRepoLoc, "file1");
+		//
+		// create commit in the "local" repository that will be rebased on top of changes
+		// pulled from "remote repository"
+		File localChanges = new File(rebaseToRepoLoc, "file-new");
+		if (localChanges.exists()) {
+			RepoUtils.modifyFileAppend(localChanges, "whatever");
+		} else {
+			RepoUtils.createFile(localChanges, "whatever");
+		}
+		commit(rebaseToRepoLoc, "local change");
+		//
+		final int rebaseRevisionCount = 70;
+		final CountDownLatch latch = new CountDownLatch(2);
+		Runnable r1 = new Runnable() {
+			public void run() {
+				for (int i = 0; i < rebaseRevisionCount; i++) {
+					commitPullRebaseNative(rebaseFromRepoLoc, rebaseToRepoLoc, remoteChanges);
+					sleep(500, 1000);
+				}
+				latch.countDown();
+			}
+		};
+		Runnable r2 = new Runnable() {
+			public void run() {
+				for (int i = 0; i < 100; i++) {
+					readWithHg4J(hgLookup, rebaseToRepoLoc);
+					sleep(800, 400);
+				}
+				latch.countDown();
+			}
+		};
+		new Thread(r1, "pull-rebase-thread").start();
+		new Thread(r2, "hg4j-read-thread").start();
+		latch.await();
+		System.out.println("DONE.");
+		// now `hg log` in rebaseToRepoLoc shall show 
+		// all rebaseRevisionCount revisions from rebaseFromRepoLoc + 1 more, "local change", on top of them
+	}
+
+	private static int count = 0;
+
+	private static void commitPullRebaseNative(final File rebaseFromRepoLoc, final File rebaseToRepoLoc, final File rebaseFromChanges) {
+		try {
+			OutputParser.Stub p = new OutputParser.Stub();
+			final ExecHelper eh = new ExecHelper(p, rebaseToRepoLoc);
+			RepoUtils.modifyFileAppend(rebaseFromChanges, "Change #" + count++);
+			commit(rebaseFromRepoLoc, "remote change");
+			p.reset();
+			eh.run("hg", "--traceback", "pull", rebaseFromRepoLoc.toString());
+			if (eh.getExitValue() != 0) {
+				System.out.println(p.result());
+			}
+			Assert.assertEquals(0, eh.getExitValue());
+			p.reset();
+			eh.run("hg", "--traceback", "--config", "extensions.hgext.rebase=", "rebase");
+			if (eh.getExitValue() != 0) {
+				System.out.println(p.result());
+			}
+			System.out.print("X");
+			Assert.assertEquals(0, eh.getExitValue());
+		} catch (RuntimeException ex) {
+			throw ex;
+		} catch (Exception ex) {
+			ex.printStackTrace();
+			throw new RuntimeException(null, ex); 
+		}
+	}
+	
+	private static void readWithHg4J(final HgLookup hgLookup, final File repoLoc) {
+		try {
+			System.out.print("(");
+			final long start = System.nanoTime();
+			HgRepository hgRepo = hgLookup.detect(repoLoc);
+			final HgRepositoryLock wcLock = hgRepo.getWorkingDirLock();
+			final HgRepositoryLock storeLock = hgRepo.getStoreLock();
+			wcLock.acquire();
+			System.out.print(".");
+			storeLock.acquire();
+			System.out.print(".");
+			try {
+				new HgStatusCommand(hgRepo).execute(new TestStatus.StatusCollector());
+				System.out.printf("%d ms)\n", (System.nanoTime() - start) / 1000000);
+			} finally {
+				storeLock.release();
+				wcLock.release();
+			}
+		} catch (RuntimeException ex) {
+			throw ex;
+		} catch (Exception ex) {
+			ex.printStackTrace();
+			throw new RuntimeException(null, ex); 
+		}
+	}
+	
+	private static void commit(File repoLoc, String message) throws Exception {
+		OutputParser.Stub p = new OutputParser.Stub();
+		final ExecHelper eh = new ExecHelper(p, repoLoc);
+		eh.run("hg", "commit", "--addremove", "-m", "\"" + message + "\"");
+		if (eh.getExitValue() != 0) {
+			System.out.println(p.result());
+		}
+		Assert.assertEquals(0, eh.getExitValue());
+	}
+	
+	private static void sleep(int msBase, int msDelta) {
+		try {
+			Thread.sleep(msBase + Math.round(Math.random() * msDelta));
+		} catch (InterruptedException ex) {
+			// IGNORE
+		}
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/org/tmatesoft/hg/test/TestRevisionMaps.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.tmatesoft.hg.core.HgException;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgParentChildMap;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRevisionMap;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class TestRevisionMaps {
+
+	@Rule
+	public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
+
+	@Test
+	public void testParentChildMap() throws HgException {
+		final HgRepository repo = Configuration.get().find("test-annotate");
+		Nodeid[] allRevs = RepoUtils.allRevisions(repo);
+		HgParentChildMap<HgChangelog> parentHelper = new HgParentChildMap<HgChangelog>(repo.getChangelog());
+		parentHelper.init();
+		errorCollector.assertEquals(Arrays.asList(allRevs), parentHelper.all());
+		for (Nodeid n : allRevs) {
+			errorCollector.assertTrue(parentHelper.knownNode(n));
+			// parents
+			final Nodeid p1 = parentHelper.safeFirstParent(n);
+			final Nodeid p2 = parentHelper.safeSecondParent(n);
+			errorCollector.assertFalse(p1 == null);
+			errorCollector.assertFalse(p2 == null);
+			errorCollector.assertEquals(p1.isNull() ? null : p1, parentHelper.firstParent(n));
+			errorCollector.assertEquals(p2.isNull() ? null : p2, parentHelper.secondParent(n));
+			HashSet<Nodeid> parents = new HashSet<Nodeid>();
+			boolean modified = parentHelper.appendParentsOf(n, parents);
+			errorCollector.assertEquals(p1.isNull() && p2.isNull(), !modified);
+			HashSet<Nodeid> cp = new HashSet<Nodeid>();
+			cp.add(parentHelper.firstParent(n));
+			cp.add(parentHelper.secondParent(n));
+			cp.remove(null);
+			errorCollector.assertEquals(cp, parents);
+			modified = parentHelper.appendParentsOf(n, parents);
+			errorCollector.assertFalse(modified);
+			//
+			// isChild, hasChildren, childrenOf, directChildren
+			if (!p1.isNull()) {
+				errorCollector.assertTrue(parentHelper.isChild(p1, n));
+				errorCollector.assertTrue(parentHelper.hasChildren(p1));
+				errorCollector.assertTrue(parentHelper.childrenOf(Collections.singleton(p1)).contains(n));
+				errorCollector.assertTrue(parentHelper.directChildren(p1).contains(n));
+			}
+			if (!p2.isNull()) {
+				errorCollector.assertTrue(parentHelper.isChild(p2, n));
+				errorCollector.assertTrue(parentHelper.hasChildren(p2));
+				errorCollector.assertTrue(parentHelper.childrenOf(Collections.singleton(p2)).contains(n));
+				errorCollector.assertTrue(parentHelper.directChildren(p2).contains(n));
+			}
+			errorCollector.assertFalse(parentHelper.isChild(n, p1));
+			errorCollector.assertFalse(parentHelper.isChild(n, p2));
+			//
+			
+		}
+		// heads
+		errorCollector.assertEquals(Arrays.asList(allRevs[7], allRevs[9]), new ArrayList<Nodeid>(parentHelper.heads()));
+		// isChild
+		errorCollector.assertTrue(parentHelper.isChild(allRevs[1], allRevs[9]));
+		errorCollector.assertTrue(parentHelper.isChild(allRevs[0], allRevs[7]));
+		errorCollector.assertFalse(parentHelper.isChild(allRevs[4], allRevs[7]));
+		errorCollector.assertFalse(parentHelper.isChild(allRevs[2], allRevs[6]));
+		// childrenOf
+		errorCollector.assertEquals(Arrays.asList(allRevs[7]), parentHelper.childrenOf(Collections.singleton(allRevs[5])));
+		errorCollector.assertEquals(Arrays.asList(allRevs[8], allRevs[9]), parentHelper.childrenOf(Arrays.asList(allRevs[4], allRevs[6])));
+		errorCollector.assertEquals(Arrays.asList(allRevs[6], allRevs[8], allRevs[9]), parentHelper.childrenOf(Collections.singleton(allRevs[3])));
+		// directChildren
+		errorCollector.assertEquals(Arrays.asList(allRevs[2], allRevs[3]), parentHelper.directChildren(allRevs[1]));
+		errorCollector.assertEquals(Arrays.asList(allRevs[8]), parentHelper.directChildren(allRevs[6]));
+		errorCollector.assertEquals(Collections.emptyList(), parentHelper.directChildren(allRevs[7]));
+	}
+
+	@Test
+	public void testRevisionMap() throws HgException {
+		// XXX this test may benefit from external huge repository
+		final HgRepository repo = Configuration.get().find("test-annotate");
+		Nodeid[] allRevs = RepoUtils.allRevisions(repo);
+		final HgChangelog clog = repo.getChangelog();
+		final HgRevisionMap<HgChangelog> rmap = new HgRevisionMap<HgChangelog>(clog).init();
+		doTestRevisionMap(allRevs, rmap);
+	}
+
+	@Test
+	public void testRevisionMapFromParentChildMap() throws HgException {
+		final HgRepository repo = Configuration.get().find("test-annotate");
+		Nodeid[] allRevs = RepoUtils.allRevisions(repo);
+		HgParentChildMap<HgChangelog> parentHelper = new HgParentChildMap<HgChangelog>(repo.getChangelog());
+		parentHelper.init();
+		doTestRevisionMap(allRevs, parentHelper.getRevisionMap());
+	}
+
+	private void doTestRevisionMap(Nodeid[] allRevs, HgRevisionMap<HgChangelog> rmap) {
+		for (int i = 0; i < allRevs.length; i++) {
+			errorCollector.assertEquals(i, rmap.revisionIndex(allRevs[i]));
+			errorCollector.assertEquals(allRevs[i], rmap.revision(i));
+		}
+	}
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/org/tmatesoft/hg/test/TestRevisionSet.java	Wed Jul 10 11:48:55 2013 +0200
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.test;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.RevisionSet;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgParentChildMap;
+import org.tmatesoft.hg.repo.HgRepository;
+
+/**
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class TestRevisionSet {
+	
+	@Rule
+	public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
+	
+	@Test
+	public void testRegularSetOperations() {
+		Nodeid n1 = Nodeid.fromAscii("c75297c1786734589175c673db40e8ecaa032b09");
+		Nodeid n2 = Nodeid.fromAscii("3b7d51ed4c65082f9235e3459e282d7ff723aa97");
+		Nodeid n3 = Nodeid.fromAscii("14dac192aa262feb8ff6645a102648498483a188");
+		Nodeid n4 = Nodeid.fromAscii("1deea2f332183c947937f6df988c2c6417efc217");
+		Nodeid[] nodes = { n1, n2, n3 };
+		RevisionSet a = new RevisionSet(nodes);
+		Nodeid[] nodes1 = { n3, n4 };
+		RevisionSet b = new RevisionSet(nodes1);
+		Nodeid[] nodes2 = { n1, n2, n3, n4 };
+		RevisionSet union_ab = new RevisionSet(nodes2);
+		Nodeid[] nodes3 = { n3 };
+		RevisionSet intersect_ab = new RevisionSet(nodes3);
+		Nodeid[] nodes4 = { n1, n2 };
+		RevisionSet subtract_ab = new RevisionSet(nodes4);
+		Nodeid[] nodes5 = { n4 };
+		RevisionSet subtract_ba = new RevisionSet(nodes5);
+		Nodeid[] nodes6 = { n1, n2, n4 };
+		RevisionSet symDiff_ab = new RevisionSet(nodes6);
+		
+		errorCollector.assertEquals(union_ab, a.union(b));
+		errorCollector.assertEquals(union_ab, b.union(a));
+		errorCollector.assertEquals(intersect_ab, a.intersect(b));
+		errorCollector.assertEquals(intersect_ab, b.intersect(a));
+		errorCollector.assertEquals(subtract_ab, a.subtract(b));
+		errorCollector.assertEquals(subtract_ba, b.subtract(a));
+		errorCollector.assertEquals(symDiff_ab, a.symmetricDifference(b));
+		errorCollector.assertEquals(symDiff_ab, b.symmetricDifference(a));
+		Nodeid[] nodes7 = { n1, n2, n4 };
+		Nodeid[] nodes8 = { n4, n1, n2 };
+		errorCollector.assertTrue(new RevisionSet(nodes7).equals(new RevisionSet(nodes8)));
+		Nodeid[] nodes9 = {};
+		Nodeid[] nodes10 = {};
+		errorCollector.assertTrue(new RevisionSet(nodes9).equals(new RevisionSet(nodes10)));
+		Nodeid[] nodes11 = { n1 };
+		Nodeid[] nodes12 = { n2 };
+		errorCollector.assertFalse(new RevisionSet(nodes11).equals(new RevisionSet(nodes12)));
+	}
+	
+	@Test
+	public void testRootsAndHeads() throws Exception {
+		final HgRepository repo = Configuration.get().find("test-annotate");
+		Nodeid[] allRevs = RepoUtils.allRevisions(repo);
+		HgParentChildMap<HgChangelog> parentHelper = new HgParentChildMap<HgChangelog>(repo.getChangelog());
+		parentHelper.init();
+		final RevisionSet complete = new RevisionSet(allRevs);
+		Nodeid[] nodes = { allRevs[0] };
+		// roots
+		errorCollector.assertEquals(new RevisionSet(nodes), complete.roots(parentHelper));
+		Nodeid[] nodes1 = { allRevs[0], allRevs[1] };
+		RevisionSet fromR2 = complete.subtract(new RevisionSet(nodes1));
+		Nodeid[] nodes2 = { allRevs[0], allRevs[1], allRevs[2] };
+		RevisionSet fromR3 = complete.subtract(new RevisionSet(nodes2));
+		Nodeid[] nodes3 = { allRevs[2], allRevs[3] };
+		errorCollector.assertEquals(new RevisionSet(nodes3), fromR2.roots(parentHelper));
+		Nodeid[] nodes4 = { allRevs[3], allRevs[4], allRevs[5] };
+		errorCollector.assertEquals(new RevisionSet(nodes4), fromR3.roots(parentHelper));
+		Nodeid[] nodes5 = { allRevs[9], allRevs[7] };
+		// heads
+		errorCollector.assertEquals(new RevisionSet(nodes5), complete.heads(parentHelper));
+		Nodeid[] nodes6 = { allRevs[9], allRevs[8] };
+		RevisionSet toR7 = complete.subtract(new RevisionSet(nodes6));
+		Nodeid[] nodes7 = { allRevs[7], allRevs[6], allRevs[4] };
+		errorCollector.assertEquals(new RevisionSet(nodes7), toR7.heads(parentHelper));
+		Nodeid[] nodes8 = { allRevs[5], allRevs[7] };
+		RevisionSet withoutNoMergeBranch = toR7.subtract(new RevisionSet(nodes8));
+		Nodeid[] nodes9 = { allRevs[6], allRevs[4] };
+		errorCollector.assertEquals(new RevisionSet(nodes9), withoutNoMergeBranch.heads(parentHelper));
+		errorCollector.assertEquals(complete.heads(parentHelper), complete.heads(parentHelper).heads(parentHelper));
+	}
+	
+	@Test
+	public void testAncestorsAndChildren() throws Exception {
+		final HgRepository repo = Configuration.get().find("test-annotate");
+		Nodeid[] allRevs = RepoUtils.allRevisions(repo);
+		HgParentChildMap<HgChangelog> parentHelper = new HgParentChildMap<HgChangelog>(repo.getChangelog());
+		parentHelper.init();
+		final RevisionSet complete = new RevisionSet(allRevs);
+		Nodeid[] nodes = {};
+		// children
+		errorCollector.assertTrue(new RevisionSet(nodes).children(parentHelper).isEmpty());
+		Nodeid[] nodes1 = { allRevs[8], allRevs[9] };
+		Nodeid[] nodes2 = { allRevs[4] };
+		errorCollector.assertEquals(new RevisionSet(nodes1), new RevisionSet(nodes2).children(parentHelper));
+		Nodeid[] nodes3 = { allRevs[8], allRevs[9], allRevs[4], allRevs[5], allRevs[7] };
+		// default branch and no-merge branch both from r2 
+		RevisionSet s1 = new RevisionSet(nodes3);
+		Nodeid[] nodes4 = { allRevs[2] };
+		errorCollector.assertEquals(s1, new RevisionSet(nodes4).children(parentHelper));
+		Nodeid[] nodes5 = { allRevs[0], allRevs[1] };
+		// ancestors
+		RevisionSet fromR2 = complete.subtract(new RevisionSet(nodes5));
+		Nodeid[] nodes6 = { allRevs[9], allRevs[5], allRevs[7], allRevs[8] };
+		// no-merge branch and r9 are not in ancestors of r8 (as well as r8 itself)
+		RevisionSet s3 = fromR2.subtract(new RevisionSet(nodes6));
+		Nodeid[] nodes7 = { allRevs[8] };
+		errorCollector.assertEquals(s3, fromR2.ancestors(new RevisionSet(nodes7), parentHelper));
+		Nodeid[] nodes8 = { allRevs[5], allRevs[7] };
+		// ancestors of no-merge branch
+		RevisionSet branchNoMerge = new RevisionSet(nodes8);
+		Nodeid[] nodes9 = { allRevs[0], allRevs[1], allRevs[2] };
+		errorCollector.assertEquals(new RevisionSet(nodes9), complete.ancestors(branchNoMerge, parentHelper));
+		Nodeid[] nodes10 = { allRevs[2] };
+		errorCollector.assertEquals(new RevisionSet(nodes10), fromR2.ancestors(branchNoMerge, parentHelper));
+	}
+}
--- a/test/org/tmatesoft/hg/test/TestRevlog.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestRevlog.java	Wed Jul 10 11:48:55 2013 +0200
@@ -33,6 +33,7 @@
 import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgManifest.Flags;
 import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.repo.HgRuntimeException;
 import org.tmatesoft.hg.util.Path;
 
 /**
@@ -187,7 +188,7 @@
 		return patch1;
 	}
 	
-	private byte[] getRevisionTrueContent(File repoLoc, final int manifestRev, int clogRev) throws HgRepositoryNotFoundException {
+	private byte[] getRevisionTrueContent(File repoLoc, final int manifestRev, int clogRev) throws HgRepositoryNotFoundException, IllegalArgumentException, HgRuntimeException {
 		HgRepository hgRepo = new HgLookup().detect(repoLoc);
 		final ByteArrayOutputStream out = new ByteArrayOutputStream(1024 * 1000);
 		hgRepo.getManifest().walk(clogRev, clogRev, new HgManifest.Inspector() {
--- a/test/org/tmatesoft/hg/test/TestStorePath.java	Thu Jun 06 14:21:11 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestStorePath.java	Wed Jul 10 11:48:55 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2011-2012 TMate Software Ltd
+ * Copyright (c) 2011-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -60,7 +60,7 @@
 	public TestStorePath() {
 		propertyOverrides.put("hg.consolelog.debug", true);
 		sessionCtx = new BasicSessionContext(propertyOverrides, null);
-		repoInit = new RepoInitializer().setRequires(STORE + FNCACHE + DOTENCODE);
+		repoInit = new RepoInitializer().setRequires(REVLOGV1 | STORE | FNCACHE | DOTENCODE);
 		storePathHelper = repoInit.buildDataFilesHelper(sessionCtx);
 	}