changeset 707:42b88709e41d

Merge: support 'unresolved' resolution with MergeStateBuilder
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Fri, 16 Aug 2013 19:22:59 +0200
parents cd5c87d96315
children 4ffc17c0b534
files src/org/tmatesoft/hg/core/HgMergeCommand.java src/org/tmatesoft/hg/internal/DirstateBuilder.java src/org/tmatesoft/hg/internal/FileUtils.java src/org/tmatesoft/hg/internal/MergeStateBuilder.java src/org/tmatesoft/hg/internal/WorkingDirFileWriter.java src/org/tmatesoft/hg/repo/HgManifest.java src/org/tmatesoft/hg/repo/HgMergeState.java src/org/tmatesoft/hg/repo/HgRepositoryFiles.java test/org/tmatesoft/hg/test/RepoUtils.java test/org/tmatesoft/hg/test/TestMerge.java
diffstat 10 files changed, 309 insertions(+), 53 deletions(-) [+]
line wrap: on
line diff
--- a/src/org/tmatesoft/hg/core/HgMergeCommand.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgMergeCommand.java	Fri Aug 16 19:22:59 2013 +0200
@@ -36,6 +36,7 @@
 import org.tmatesoft.hg.internal.Transaction;
 import org.tmatesoft.hg.internal.WorkingDirFileWriter;
 import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgParentChildMap;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.repo.HgRepositoryLock;
@@ -86,9 +87,11 @@
 			final DirstateBuilder dirstateBuilder = new DirstateBuilder(implRepo);
 			dirstateBuilder.fillFrom(new DirstateReader(implRepo, new Path.SimpleSource(repo.getSessionContext().getPathFactory(), cacheFiles)));
 			final HgChangelog clog = repo.getChangelog();
-			dirstateBuilder.parents(clog.getRevision(firstCset), clog.getRevision(secondCset));
+			final Nodeid headCset1 = clog.getRevision(firstCset);
+			dirstateBuilder.parents(headCset1, clog.getRevision(secondCset));
 			//
 			MergeStateBuilder mergeStateBuilder = new MergeStateBuilder(implRepo);
+			mergeStateBuilder.prepare(headCset1);
 
 			ManifestRevision m1, m2, ma;
 			m1 = new ManifestRevision(cacheRevs, cacheFiles).init(repo, firstCset);
@@ -105,37 +108,38 @@
 						fileRevBase = ma.contains(f) ? ma.nodeid(f) : null;
 						if (fileRevA.equals(fileRevB)) {
 							HgFileRevision fr = new HgFileRevision(repo, fileRevA, m1.flags(f), f);
-							resolver.presentState(f, fr, fr);
+							resolver.presentState(f, fr, fr, null);
 							mediator.same(fr, resolver);
 						} else if (fileRevBase == fileRevA) {
 							assert fileRevBase != null;
 							HgFileRevision frBase = new HgFileRevision(repo, fileRevBase, ma.flags(f), f);
 							HgFileRevision frSecond= new HgFileRevision(repo, fileRevB, m2.flags(f), f);
-							resolver.presentState(f, frBase, frSecond);
+							resolver.presentState(f, frBase, frSecond, frBase);
 							mediator.fastForwardB(frBase, frSecond, resolver);
 						} else if (fileRevBase == fileRevB) {
 							assert fileRevBase != null;
 							HgFileRevision frBase = new HgFileRevision(repo, fileRevBase, ma.flags(f), f);
 							HgFileRevision frFirst = new HgFileRevision(repo, fileRevA, m1.flags(f), f);
-							resolver.presentState(f, frFirst, frBase);
+							resolver.presentState(f, frFirst, frBase, frBase);
 							mediator.fastForwardA(frBase, frFirst, resolver);
 						} else {
 							HgFileRevision frBase = fileRevBase == null ? null : new HgFileRevision(repo, fileRevBase, ma.flags(f), f);
 							HgFileRevision frFirst = new HgFileRevision(repo, fileRevA, m1.flags(f), f);
 							HgFileRevision frSecond= new HgFileRevision(repo, fileRevB, m2.flags(f), f);
-							resolver.presentState(f, frFirst, frSecond);
+							resolver.presentState(f, frFirst, frSecond, frBase);
 							mediator.resolve(frBase, frFirst, frSecond, resolver);
 						}
 					} else {
 						// m2 doesn't contain the file, either new in m1, or deleted in m2
 						HgFileRevision frFirst = new HgFileRevision(repo, m1.nodeid(f), m1.flags(f), f);
-						resolver.presentState(f, frFirst, null);
 						if (ma.contains(f)) {
 							// deleted in m2
 							HgFileRevision frBase = new HgFileRevision(repo, ma.nodeid(f), ma.flags(f), f);
+							resolver.presentState(f, frFirst, null, frBase);
 							mediator.onlyA(frBase, frFirst, resolver);
 						} else {
 							// new in m1
+							resolver.presentState(f, frFirst, null, null);
 							mediator.newInA(frFirst, resolver);
 						}
 					}
@@ -147,13 +151,14 @@
 					}
 					HgFileRevision frSecond= new HgFileRevision(repo, m2.nodeid(f), m2.flags(f), f);
 					// file in m2 is either new or deleted in m1
-					resolver.presentState(f, null, frSecond);
 					if (ma.contains(f)) {
 						// deleted in m1
 						HgFileRevision frBase = new HgFileRevision(repo, ma.nodeid(f), ma.flags(f), f);
+						resolver.presentState(f, null, frSecond, frBase);
 						mediator.onlyB(frBase, frSecond, resolver);
 					} else {
 						// new in m2
+						resolver.presentState(f, null, frSecond, null);
 						mediator.newInB(frSecond, resolver);
 					}
 					resolver.apply();
@@ -162,9 +167,11 @@
 				transaction.commit();
 			} catch (HgRuntimeException ex) {
 				transaction.rollback();
+				mergeStateBuilder.abandon();
 				throw ex;
 			} catch (HgIOException ex) {
 				transaction.rollback();
+				mergeStateBuilder.abandon();
 				throw ex;
 			}
 		} catch (HgRuntimeException ex) {
@@ -255,33 +262,70 @@
 		 * @throws IOException propagated exceptions from content
 		 */
 		public void use(InputStream content) throws IOException;
+		/**
+		 * Do not use this file for resolution. Marks the file for deletion, if appropriate.
+		 */
 		public void forget(HgFileRevision rev);
-		public void unresolved(); // record the file for later processing by 'hg resolve'
+		/**
+		 * Record the file for later processing by 'hg resolve'. It's required
+		 * that processed file present in both trunks. We need two file revisions
+		 * to put an entry into merge/state file.
+		 * 
+		 * XXX Perhaps, shall take two HgFileRevision arguments to facilitate
+		 * extra control over what goes into merge/state and to ensure this method
+		 * is not invoked when there are no conflicting revisions. 
+		 */
+		public void unresolved();
 	}
 
 	/**
-	 * Base mediator implementation, with regular resolution
+	 * Base mediator implementation, with regular resolution. 
+	 * Subclasses shall implement {@link #resolve(HgFileRevision, HgFileRevision, HgFileRevision, Resolver)} and
+	 * may optionally provide extra logic (e.g. ask user) for other cases.
 	 */
 	@Experimental(reason="Provisional API. Work in progress")
-	public abstract class MediatorBase implements Mediator {
+	public abstract static class MediatorBase implements Mediator {
+		/**
+		 * Implementation keeps this revision
+		 */
 		public void same(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException {
 			resolver.use(rev);
 		}
+		/**
+		 * Implementation keeps file revision from first/left/A trunk.
+		 * Subclasses may opt to {@link Resolver#forget(HgFileRevision) delete} it as it's done in second/right/B trunk.
+		 */
 		public void onlyA(HgFileRevision base, HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException {
 			resolver.use(rev);
 		}
+		/**
+		 * Implementation restores file from second/right/B trunk. 
+		 * Subclasses may ask user to decide if it's necessary to do that 
+		 */
 		public void onlyB(HgFileRevision base, HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException {
 			resolver.use(rev);
 		}
+		/**
+		 * Implementation keeps this revision
+		 */
 		public void newInA(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException {
 			resolver.use(rev);
 		}
+		/**
+		 * Implementation adds this revision. Subclasses my let user decide if it's necessary to add the file
+		 */
 		public void newInB(HgFileRevision rev, Resolver resolver) throws HgCallbackTargetException {
 			resolver.use(rev);
 		}
+		/**
+		 * Implementation keeps latest revision
+		 */
 		public void fastForwardA(HgFileRevision base, HgFileRevision first, Resolver resolver) throws HgCallbackTargetException {
 			resolver.use(first);
 		}
+		/**
+		 * Implementation keeps latest revision
+		 */
 		public void fastForwardB(HgFileRevision base, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException {
 			resolver.use(second);
 		}
@@ -295,6 +339,7 @@
 		private boolean changedDirstate;
 		private HgFileRevision revA;
 		private HgFileRevision revB;
+		private HgFileRevision revBase;
 		private Path file;
 		// resolutions:
 		private HgFileRevision resolveUse, resolveForget;
@@ -312,14 +357,15 @@
 			if (changedDirstate) {
 				dirstateBuilder.serialize(tr);
 			}
-			mergeStateBuilder.serialize(tr);
+			mergeStateBuilder.serialize();
 		}
 
-		void presentState(Path p, HgFileRevision revA, HgFileRevision revB) {
+		void presentState(Path p, HgFileRevision revA, HgFileRevision revB, HgFileRevision base) {
 			assert revA != null || revB != null;
 			file = p;
 			this.revA = revA;
 			this.revB = revB;
+			revBase = base;
 			resolveUse = resolveForget = null;
 			resolveContent = null;
 			resolveMarkUnresolved = false;
@@ -327,9 +373,18 @@
 
 		void apply() throws HgIOException, HgRuntimeException {
 			if (resolveMarkUnresolved) {
-				mergeStateBuilder.unresolved(file);
+				HgFileRevision c = revBase;
+				if (revBase == null) {
+					// fake revision, null parent
+					c = new HgFileRevision(repo.getRepo(), Nodeid.NULL, HgManifest.Flags.RegularFile, file);
+				}
+				mergeStateBuilder.unresolved(file, revA, revB, c, revA.getFileFlags());
+				changedDirstate = true;
+				dirstateBuilder.recordMergedExisting(file, revA.getPath());
 			} else if (resolveForget != null) {
-				if (resolveForget == revA) {
+				// it revision to forget comes from second/B trunk, shall record it as removed
+				// only when corresponding file in first/A trunk is missing (merge:_forgetremoved())
+				if (resolveForget == revA || (resolveForget == revB && revA == null)) {
 					changedDirstate = true;
 					dirstateBuilder.recordRemoved(file);
 				}
@@ -381,10 +436,8 @@
 			assert resolveUse == null;
 			assert resolveForget == null;
 			try {
-				// cache new contents just to fail fast if there are troubles with content
-				final FileUtils fileUtils = new FileUtils(repo.getLog(), this);
-				resolveContent = fileUtils.createTempFile();
-				fileUtils.write(content, resolveContent);
+				resolveContent = FileUtils.createTempFile();
+				new FileUtils(repo.getLog(), this).write(content, resolveContent);
 			} finally {
 				content.close();
 			}
@@ -404,6 +457,9 @@
 		}
 
 		public void unresolved() {
+			if (revA == null || revB == null) {
+				throw new UnsupportedOperationException("To mark conflict as unresolved need two revisions");
+			}
 			resolveMarkUnresolved = true;
 		}
 	}
--- a/src/org/tmatesoft/hg/internal/DirstateBuilder.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DirstateBuilder.java	Fri Aug 16 19:22:59 2013 +0200
@@ -108,6 +108,22 @@
 		normal.put(fname, new HgDirstate.Record(0, -2, -1, fname, null));
 	}
 	
+	/**
+	 * Mark file from this dirstate as merged, using all the information 
+	 */
+	public void recordMergedExisting(Path fname, Path knownInDirstate) {
+		HgDirstate.Record r = forget(knownInDirstate);
+		HgDirstate.Record n;
+		if (r == null) {
+			assert false;
+			n = new HgDirstate.Record(0, -1, -1, fname, null);
+		} else {
+			n = new HgDirstate.Record(r.mode(), r.size(), r.modificationTime(), fname, r.copySource());
+		}
+		merged.put(fname, n);
+	}
+
+	
 	private HgDirstate.Record forget(Path fname) {
 		HgDirstate.Record r;
 		if ((r = normal.remove(fname)) != null) {
--- a/src/org/tmatesoft/hg/internal/FileUtils.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FileUtils.java	Fri Aug 16 19:22:59 2013 +0200
@@ -25,6 +25,9 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.channels.FileChannel;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
 
 import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.util.LogFacility;
@@ -128,7 +131,29 @@
 	}
 
 	// nothing special, just a single place with common prefix
-	public File createTempFile() throws IOException {
+	public static File createTempFile() throws IOException {
 		return File.createTempFile("hg4j-", null);
 	}
-}
+
+	public static void rmdir(File dest) throws IOException {
+		if (!dest.isDirectory()) {
+			return;
+		}
+		LinkedList<File> queue = new LinkedList<File>();
+		queue.addAll(Arrays.asList(dest.listFiles()));
+		while (!queue.isEmpty()) {
+			File next = queue.removeFirst();
+			if (next.isDirectory()) {
+				List<File> files = Arrays.asList(next.listFiles());
+				if (!files.isEmpty()) {
+					queue.addAll(files);
+					queue.add(next);
+				}
+				// fall through
+			} 
+			next.delete();
+		}
+		dest.delete();
+	}
+
+}
\ No newline at end of file
--- a/src/org/tmatesoft/hg/internal/MergeStateBuilder.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/MergeStateBuilder.java	Fri Aug 16 19:22:59 2013 +0200
@@ -16,9 +16,24 @@
  */
 package org.tmatesoft.hg.internal;
 
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.tmatesoft.hg.core.HgFileRevision;
 import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgManifest;
 import org.tmatesoft.hg.repo.HgMergeState;
+import org.tmatesoft.hg.repo.HgRepositoryFiles;
+import org.tmatesoft.hg.util.ByteChannel;
+import org.tmatesoft.hg.util.CancelledException;
 import org.tmatesoft.hg.util.Path;
+import org.tmatesoft.hg.util.LogFacility.Severity;
 
 /**
  * Constructs merge/state file
@@ -28,21 +43,139 @@
  * @author TMate Software Ltd.
  */
 public class MergeStateBuilder {
-	
+
 	private final Internals repo;
+	private final List<Record> unresolved = new ArrayList<Record>();
+	private Nodeid stateParent = Nodeid.NULL;
 
 	public MergeStateBuilder(Internals implRepo) {
 		repo = implRepo;
 	}
 	
+	public void prepare(Nodeid nodeid) {
+		assert nodeid != null;
+		unresolved.clear();
+		stateParent = nodeid;
+		abandon();
+	}
+	
 	public void resolved() {
 		throw Internals.notImplemented();
 	}
 
-	public void unresolved(Path file) {
-		throw Internals.notImplemented();
+	public void unresolved(Path file, HgFileRevision first, HgFileRevision second, HgFileRevision base, HgManifest.Flags flags) throws HgIOException {
+		Record r = new Record(file, first.getPath(), second.getPath(), base.getPath(), base.getRevision(), flags);
+		final File d = mergeStateDir();
+		d.mkdirs();
+		File f = new File(d, r.hash());
+		try {
+			FileOutputStream fos = new FileOutputStream(f);
+			first.putContentTo(new OutputStreamSink(fos));
+			fos.flush();
+			fos.close();
+			unresolved.add(r);
+		} catch (IOException ex) {
+			throw new HgIOException(String.format("Failed to write content of unresolved file %s to merge state at %s", file, f), f);
+		} catch (CancelledException ex) {
+			repo.getLog().dump(getClass(), Severity.Error, ex, "Our impl doesn't throw cancellation");
+		}
 	}
 
-	public void serialize(Transaction tr) throws HgIOException {
+	// merge/state serialization is not a part of a transaction
+	public void serialize() throws HgIOException {
+		if (unresolved.isEmpty()) {
+			return;
+		}
+		File mergeStateFile = repo.getRepositoryFile(HgRepositoryFiles.MergeState);
+		try {
+			final byte NL = '\n';
+			FileOutputStream fos = new FileOutputStream(mergeStateFile);
+			fos.write(stateParent.toString().getBytes());
+			fos.write(NL);
+			for(Record r : unresolved) {
+				fos.write(r.key.toString().getBytes());
+				fos.write(0);
+				fos.write('u');
+				fos.write(0);
+				fos.write(r.hash().toString().getBytes());
+				fos.write(0);
+				fos.write(r.fnameA.toString().getBytes());
+				fos.write(0);
+				fos.write(r.fnameAncestor.toString().getBytes());
+				fos.write(0);
+				fos.write(r.ancestorRev.toString().getBytes());
+				fos.write(0);
+				fos.write(r.fnameB.toString().getBytes());
+				fos.write(0);
+				fos.write(r.flags.mercurialString().getBytes());
+				fos.write(NL);
+			}
+			fos.flush();
+			fos.close();
+		} catch (IOException ex) {
+			throw new HgIOException("Failed to serialize merge state", mergeStateFile);
+		}
+	}
+	
+	public void abandon() {
+		File mergeStateDir = mergeStateDir();
+		try {
+			FileUtils.rmdir(mergeStateDir);
+		} catch (IOException ex) {
+			// ignore almost silently
+			repo.getLog().dump(getClass(), Severity.Warn, ex, String.format("Failed to delete merge state in %s", mergeStateDir));
+		}
+	}
+
+	private File mergeStateDir() {
+		return repo.getRepositoryFile(HgRepositoryFiles.MergeState).getParentFile();
+	}
+
+	private static class Record {
+		public final Path key;
+		public final Path fnameA, fnameB, fnameAncestor;
+		public final Nodeid ancestorRev;
+		public final HgManifest.Flags flags;
+		private String hash;
+
+		public Record(Path fname, Path a, Path b, Path ancestor, Nodeid rev, HgManifest.Flags f) {
+			key = fname;
+			fnameA = a;
+			fnameB = b;
+			fnameAncestor = ancestor;
+			ancestorRev = rev;
+			flags = f;
+		}
+		
+		public String hash() {
+			if (hash == null) {
+				hash = new DigestHelper().sha1(key).asHexString();
+			}
+			return hash;
+		}
+	}
+
+	private static class OutputStreamSink implements ByteChannel {
+		private final OutputStream out;
+
+		public OutputStreamSink(OutputStream outputStream) {
+			out = outputStream;
+		}
+
+		public int write(ByteBuffer buffer) throws IOException {
+			final int toWrite = buffer.remaining();
+			if (toWrite <= 0) {
+				return 0;
+			}
+			if (buffer.hasArray()) {
+				out.write(buffer.array(), buffer.arrayOffset(), toWrite);
+			} else {
+				while (buffer.hasRemaining()) {
+					out.write(buffer.get());
+				}
+			}
+			buffer.position(buffer.limit());
+			return toWrite;
+		}
 	}
 }
--- a/src/org/tmatesoft/hg/internal/WorkingDirFileWriter.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/WorkingDirFileWriter.java	Fri Aug 16 19:22:59 2013 +0200
@@ -217,6 +217,6 @@
 	}
 
 	private void handleUnexpectedCancel(CancelledException ex) {
-		hgRepo.getSessionContext().getLog().dump(WorkingDirFileWriter.class, Severity.Error, ex, "Our impl doesn't throw cancellation");
+		hgRepo.getLog().dump(WorkingDirFileWriter.class, Severity.Error, ex, "Our impl doesn't throw cancellation");
 	}
 }
--- a/src/org/tmatesoft/hg/repo/HgManifest.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgManifest.java	Fri Aug 16 19:22:59 2013 +0200
@@ -122,7 +122,10 @@
 			return RegularFile;
 		}
 		
-		String nativeString() {
+		/**
+		 * String representation of the flag as of native Mercurial client. 
+		 */
+		public String mercurialString() {
 			if (this == Exec) {
 				return "x";
 			}
--- a/src/org/tmatesoft/hg/repo/HgMergeState.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgMergeState.java	Fri Aug 16 19:22:59 2013 +0200
@@ -18,18 +18,18 @@
 
 import static org.tmatesoft.hg.core.Nodeid.NULL;
 
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.Iterator;
 import java.util.List;
 
 import org.tmatesoft.hg.core.HgFileRevision;
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.Internals;
+import org.tmatesoft.hg.internal.LineReader;
 import org.tmatesoft.hg.internal.ManifestRevision;
 import org.tmatesoft.hg.internal.Pool;
 import org.tmatesoft.hg.util.Pair;
@@ -105,7 +105,7 @@
 		Pool<Path> fnamePool = new Pool<Path>();
 		Pair<Nodeid, Nodeid> wcParents = hgRepo.getWorkingCopyParents();
 		wcp1 = nodeidPool.unify(wcParents.first()); wcp2 = nodeidPool.unify(wcParents.second());
-		final File f = repo.getFileFromRepoDir("merge/state");
+		final File f = repo.getRepositoryFile(HgRepositoryFiles.MergeState);
 		if (!f.canRead()) {
 			// empty state
 			return;
@@ -120,12 +120,13 @@
 				final int rp2 = hgRepo.getChangelog().getRevisionIndex(wcp2);
 				hgRepo.getManifest().walk(rp2, rp2, m2);
 			}
-			BufferedReader br = new BufferedReader(new FileReader(f));
-			String s = br.readLine();
+			LineReader lr = new LineReader(f, repo.getLog());
+			Iterator<String> lines = lr.read(new LineReader.SimpleLineCollector(), new ArrayList<String>()).iterator();
+			String s = lines.next();
 			stateParent = nodeidPool.unify(Nodeid.fromAscii(s));
 			final int rp1 = hgRepo.getChangelog().getRevisionIndex(stateParent);
 			hgRepo.getManifest().walk(rp1, rp1, m1);
-			while ((s = br.readLine()) != null) {
+			while (lines.hasNext()) {
 				String[] r = s.split("\\00");
 				Path p1fname = pathPool.path(r[3]);
 				Nodeid nidP1 = m1.nodeid(p1fname);
@@ -162,8 +163,7 @@
 				result.add(e);
 			}
 			entries = result.toArray(new Entry[result.size()]);
-			br.close();
-		} catch (IOException ex) {
+		} catch (HgIOException ex) {
 			throw new HgInvalidControlFileException("Merge state read failed", ex, f);
 		}
 	}
--- a/src/org/tmatesoft/hg/repo/HgRepositoryFiles.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgRepositoryFiles.java	Fri Aug 16 19:22:59 2013 +0200
@@ -33,10 +33,16 @@
 	Branch(Home.Repo, "branch"), 
 	UndoBranch(Home.Repo, "undo.branch"), UndoDirstate(Home.Repo, "undo.dirstate"),
 	Phaseroots(Home.Store, "phaseroots"), FNCache(Home.Store, "fncache"),
-	WorkingCopyLock(Home.Repo, "wlock"), StoreLock(Home.Store, "lock");
+	WorkingCopyLock(Home.Repo, "wlock"), StoreLock(Home.Store, "lock"),
+	MergeState(Home.Repo, "merge/state");
 
 	/**
-	 * Possible file locations 
+	 * Possible file locations
+	 * <ul> 
+	 * <li>{@link #Root} lives in working copy
+	 * <li>{@link #Repo} files under <samp>.hg/</samp>
+	 * <li>#{@link #Store} files in storage area, either <samp>.hg/store/</samp> or <samp>.hg/</samp>
+	 * </ul>
 	 */
 	public enum Home {
 		Root, Repo, Store
--- a/test/org/tmatesoft/hg/test/RepoUtils.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/test/org/tmatesoft/hg/test/RepoUtils.java	Fri Aug 16 19:22:59 2013 +0200
@@ -28,7 +28,6 @@
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.LinkedList;
-import java.util.List;
 
 import junit.framework.Assert;
 
@@ -175,21 +174,7 @@
 	}
 
 	static void rmdir(File dest) throws IOException {
-		LinkedList<File> queue = new LinkedList<File>();
-		queue.addAll(Arrays.asList(dest.listFiles()));
-		while (!queue.isEmpty()) {
-			File next = queue.removeFirst();
-			if (next.isDirectory()) {
-				List<File> files = Arrays.asList(next.listFiles());
-				if (!files.isEmpty()) {
-					queue.addAll(files);
-					queue.add(next);
-				}
-				// fall through
-			} 
-			next.delete();
-		}
-		dest.delete();
+		FileUtils.rmdir(dest);
 	}
 
 	static Nodeid[] allRevisions(HgRepository repo) {
--- a/test/org/tmatesoft/hg/test/TestMerge.java	Fri Aug 16 14:54:09 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestMerge.java	Fri Aug 16 19:22:59 2013 +0200
@@ -16,15 +16,18 @@
  */
 package org.tmatesoft.hg.test;
 
+import java.io.File;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
 import org.tmatesoft.hg.core.HgCallbackTargetException;
 import org.tmatesoft.hg.core.HgFileRevision;
 import org.tmatesoft.hg.core.HgMergeCommand;
 import org.tmatesoft.hg.core.HgMergeCommand.Resolver;
+import org.tmatesoft.hg.repo.HgLookup;
 import org.tmatesoft.hg.repo.HgRepository;
 
 /**
@@ -40,6 +43,8 @@
 	@Test
 	public void testMediator() throws Exception {
 		HgRepository repo = Configuration.get().find("merge-1");
+		Assert.assertEquals("[sanity]", repo.getChangelog().getRevisionIndex(repo.getWorkingCopyParents().first()), 1);
+
 		HgMergeCommand cmd = new HgMergeCommand(repo);
 
 		MergeNotificationCollector c;
@@ -57,6 +62,33 @@
 		errorCollector.assertTrue("file3", c.onlyA.contains("file3"));
 		errorCollector.assertTrue("file4", c.same.contains("file4"));
 	}
+	
+	
+	@Test
+	public void testResolver() throws Exception {
+		File repoLoc1 = RepoUtils.copyRepoToTempLocation("merge-1", "test-merge-no-conflicts");
+		File repoLoc2 = RepoUtils.copyRepoToTempLocation("merge-1", "test-merge-with-conflicts");
+		HgRepository repo = new HgLookup().detect(repoLoc1);
+		Assert.assertEquals("[sanity]", repo.getChangelog().getRevisionIndex(repo.getWorkingCopyParents().first()), 1);
+
+		HgMergeCommand cmd = new HgMergeCommand(repo);
+		cmd.changeset(2).execute(new HgMergeCommand.MediatorBase() {
+			
+			public void resolve(HgFileRevision base, HgFileRevision first, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException {
+				errorCollector.fail("There's no conflict in changesets 1 and 2 merge");
+			}
+		});
+		// FIXME run hg status to see changes
+		repo = new HgLookup().detect(repoLoc2);
+		cmd = new HgMergeCommand(repo);
+		cmd.changeset(3).execute(new HgMergeCommand.MediatorBase() {
+			
+			public void resolve(HgFileRevision base, HgFileRevision first, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException {
+				resolver.unresolved();
+			}
+		});
+		// FIXME run hg status and hg resolve to see changes
+	}
 
 	private static class MergeNotificationCollector implements HgMergeCommand.Mediator {
 		public final List<String> same = new ArrayList<String>();