changeset 618:7c0d2ce340b8

Refactor approach how content finds it way down to a commit revision
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Thu, 16 May 2013 19:46:13 +0200
parents 65c01508f002
children 868b2ffdcd5c
files src/org/tmatesoft/hg/core/HgCloneCommand.java src/org/tmatesoft/hg/core/HgCommitCommand.java src/org/tmatesoft/hg/core/HgIOException.java src/org/tmatesoft/hg/internal/ChangelogEntryBuilder.java src/org/tmatesoft/hg/internal/CommitFacility.java src/org/tmatesoft/hg/internal/DataAccessProvider.java src/org/tmatesoft/hg/internal/DataSerializer.java src/org/tmatesoft/hg/internal/DeflaterDataSerializer.java src/org/tmatesoft/hg/internal/FileContentSupplier.java src/org/tmatesoft/hg/internal/FileUtils.java src/org/tmatesoft/hg/internal/ManifestEntryBuilder.java src/org/tmatesoft/hg/internal/Patch.java src/org/tmatesoft/hg/internal/RevlogCompressor.java src/org/tmatesoft/hg/internal/RevlogStreamWriter.java src/org/tmatesoft/hg/internal/WorkingCopyContent.java src/org/tmatesoft/hg/repo/HgLookup.java test/org/tmatesoft/hg/test/TestCommit.java
diffstat 17 files changed, 413 insertions(+), 348 deletions(-) [+]
line wrap: on
line diff
--- a/src/org/tmatesoft/hg/core/HgCloneCommand.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgCloneCommand.java	Thu May 16 19:46:13 2013 +0200
@@ -30,7 +30,6 @@
 
 import org.tmatesoft.hg.internal.ByteArrayDataAccess;
 import org.tmatesoft.hg.internal.DataAccess;
-import org.tmatesoft.hg.internal.DataAccessProvider;
 import org.tmatesoft.hg.internal.DataSerializer;
 import org.tmatesoft.hg.internal.DigestHelper;
 import org.tmatesoft.hg.internal.FNCacheFile;
@@ -148,6 +147,7 @@
 		private final SessionContext ctx;
 		private final Path.Source pathFactory;
 		private FileOutputStream indexFile;
+		private File currentFile;
 		private String filename; // human-readable name of the file being written, for log/exception purposes 
 
 		private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>();
@@ -199,7 +199,7 @@
 			try {
 				revlogHeader.offset(0).baseRevision(-1);
 				revisionSequence.clear();
-				indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i"));
+				indexFile = new FileOutputStream(currentFile = new File(hgDir, filename = "store/00changelog.i"));
 				collectChangelogIndexes = true;
 			} catch (IOException ex) {
 				throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(hgDir, filename));
@@ -223,7 +223,7 @@
 			try {
 				revlogHeader.offset(0).baseRevision(-1);
 				revisionSequence.clear();
-				indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i"));
+				indexFile = new FileOutputStream(currentFile = new File(hgDir, filename = "store/00manifest.i"));
 			} catch (IOException ex) {
 				throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(hgDir, filename));
 			}
@@ -247,7 +247,7 @@
 				revisionSequence.clear();
 				File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString());
 				file.getParentFile().mkdirs();
-				indexFile = new FileOutputStream(file);
+				indexFile = new FileOutputStream(currentFile = file);
 			} catch (IOException ex) {
 				String m = String.format("Failed to write file %s", filename);
 				throw new HgInvalidControlFileException(m, ex, new File(filename));
@@ -279,6 +279,7 @@
 			indexFile.close();
 			indexFile = null;
 			filename = null;
+			currentFile = null;
 		}
 
 		private int knownRevision(Nodeid p) {
@@ -367,11 +368,15 @@
 				revlogHeader.length(content.length, compressedLen);
 				
 				// XXX may be wise not to create DataSerializer for each revision, but for a file
-				DataAccessProvider.StreamDataSerializer sds = new DataAccessProvider.StreamDataSerializer(ctx.getLog(), indexFile) {
+				DataSerializer sds = new DataSerializer() {
 					@Override
-					public void done() {
-						// override parent behavior not to close stream in use
-					}
+						public void write(byte[] data, int offset, int length) throws HgIOException {
+							try {
+								indexFile.write(data, offset, length);
+							} catch (IOException ex) {
+								throw new HgIOException("Write failure", ex, currentFile);
+							}
+						}
 				};
 				revlogHeader.serialize(sds);
 
@@ -389,9 +394,12 @@
 				revisionSequence.add(node);
 				prevRevContent.done();
 				prevRevContent = new ByteArrayDataAccess(content);
+			} catch (HgIOException ex) {
+				String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename);
+				throw new HgInvalidControlFileException(m, ex, currentFile);
 			} catch (IOException ex) {
 				String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename);
-				throw new HgInvalidControlFileException(m, ex, new File(hgDir, filename));
+				throw new HgInvalidControlFileException(m, ex, currentFile);
 			}
 			return cancelException == null;
 		}
--- a/src/org/tmatesoft/hg/core/HgCommitCommand.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgCommitCommand.java	Thu May 16 19:46:13 2013 +0200
@@ -19,10 +19,7 @@
 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
 
 import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
 
-import org.tmatesoft.hg.internal.ByteArrayChannel;
 import org.tmatesoft.hg.internal.COWTransaction;
 import org.tmatesoft.hg.internal.CommitFacility;
 import org.tmatesoft.hg.internal.CompleteRepoLock;
@@ -30,6 +27,7 @@
 import org.tmatesoft.hg.internal.FileContentSupplier;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.Transaction;
+import org.tmatesoft.hg.internal.WorkingCopyContent;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgInternals;
@@ -116,16 +114,13 @@
 				HgDataFile df = repo.getFileNode(m);
 				cf.add(df, new WorkingCopyContent(df));
 			}
-			ArrayList<FileContentSupplier> toClear = new ArrayList<FileContentSupplier>();
 			for (Path a : status.getAdded()) {
 				HgDataFile df = repo.getFileNode(a); // TODO need smth explicit, like repo.createNewFileNode(Path) here
 				// XXX might be an interesting exercise not to demand a content supplier, but instead return a "DataRequester"
 				// object, that would indicate interest in data, and this code would "push" it to requester, so that any exception
 				// is handled here, right away, and won't need to travel supplier and CommitFacility. (although try/catch inside
 				// supplier.read (with empty throws declaration)
-				FileContentSupplier fcs = new FileContentSupplier(repo, a);
-				cf.add(df, fcs);
-				toClear.add(fcs);
+				cf.add(df, new FileContentSupplier(repo, a));
 			}
 			for (Path r : status.getRemoved()) {
 				HgDataFile df = repo.getFileNode(r); 
@@ -145,10 +140,6 @@
 				tr.rollback();
 				throw ex;
 			}
-			// TODO toClear list is awful
-			for (FileContentSupplier fcs : toClear) {
-				fcs.done();
-			}
 			return new Outcome(Kind.Success, "Commit ok");
 		} catch (HgRuntimeException ex) {
 			throw new HgLibraryFailureException(ex);
@@ -182,44 +173,4 @@
 		parents[0] = pn.first().isNull() ? NO_REVISION : clog.getRevisionIndex(pn.first());
 		parents[1] = pn.second().isNull() ? NO_REVISION : clog.getRevisionIndex(pn.second());
 	}
-
-	private static class WorkingCopyContent implements CommitFacility.ByteDataSupplier {
-		private final HgDataFile file;
-		private ByteBuffer fileContent; 
-
-		public WorkingCopyContent(HgDataFile dataFile) {
-			file = dataFile;
-			if (!dataFile.exists()) {
-				throw new IllegalArgumentException();
-			}
-		}
-
-		public int read(ByteBuffer dst) {
-			if (fileContent == null) {
-				try {
-					ByteArrayChannel sink = new ByteArrayChannel();
-					// TODO desperately need partial read here
-					file.workingCopy(sink);
-					fileContent = ByteBuffer.wrap(sink.toArray());
-				} catch (CancelledException ex) {
-					// ByteArrayChannel doesn't cancel, never happens
-					assert false;
-				}
-			}
-			if (fileContent.remaining() == 0) {
-				return -1;
-			}
-			int dstCap = dst.remaining();
-			if (fileContent.remaining() > dstCap) {
-				// save actual limit, and pretend we've got exactly desired amount of bytes
-				final int lim = fileContent.limit();
-				fileContent.limit(dstCap);
-				dst.put(fileContent);
-				fileContent.limit(lim);
-			} else {
-				dst.put(fileContent);
-			}
-			return dstCap - dst.remaining();
-		}
-	}
 }
--- a/src/org/tmatesoft/hg/core/HgIOException.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgIOException.java	Thu May 16 19:46:13 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -45,6 +45,9 @@
 		file = troubleFile;
 	}
 
+	/**
+	 * @return file that causes trouble, may be <code>null</code>
+	 */
 	public File getFile() {
 		return file;
 	}
--- a/src/org/tmatesoft/hg/internal/ChangelogEntryBuilder.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ChangelogEntryBuilder.java	Thu May 16 19:46:13 2013 +0200
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2012 TMate Software Ltd
+ * Copyright (c) 2012-2013 TMate Software Ltd
  *  
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -16,6 +16,7 @@
  */
 package org.tmatesoft.hg.internal;
 
+import java.io.ByteArrayOutputStream;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -23,24 +24,28 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.TimeZone;
 import java.util.Map.Entry;
+import java.util.TimeZone;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
 import org.tmatesoft.hg.util.Path;
 
 /**
- *
+ * Builds changelog entry
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class ChangelogEntryBuilder {
+public class ChangelogEntryBuilder implements DataSource {
 
 	private String user;
 	private List<Path> modifiedFiles;
 	private final Map<String, String> extrasMap = new LinkedHashMap<String, String>();
 	private Integer tzOffset;
 	private Long csetTime;
+	private Nodeid manifestRev;
+	private CharSequence comment;
 	
 	public ChangelogEntryBuilder user(String username) {
 		user = username;
@@ -89,6 +94,89 @@
 		return this;
 	}
 	
+	public ChangelogEntryBuilder manifest(Nodeid manifestRevision) {
+		manifestRev = manifestRevision;
+		return this;
+	}
+	
+	public ChangelogEntryBuilder comment(CharSequence commentString) {
+		comment = commentString;
+		return this;
+	}
+
+	public void serialize(DataSerializer out) throws HgIOException {
+		byte[] b = build();
+		out.write(b, 0, b.length);
+	}
+
+	public int serializeLength() {
+		return -1;
+	}
+
+	public byte[] build() {
+		ByteArrayOutputStream out = new ByteArrayOutputStream();
+		final int LF = '\n';
+		CharSequence extras = buildExtras();
+		CharSequence files = buildFiles();
+		byte[] manifestRevision = manifestRev.toString().getBytes();
+		byte[] username = user().getBytes(EncodingHelper.getUTF8());
+		out.write(manifestRevision, 0, manifestRevision.length);
+		out.write(LF);
+		out.write(username, 0, username.length);
+		out.write(LF);
+		final long csetDate = csetTime();
+		byte[] date = String.format("%d %d", csetDate, csetTimezone(csetDate)).getBytes();
+		out.write(date, 0, date.length);
+		if (extras.length() > 0) {
+			out.write(' ');
+			byte[] b = extras.toString().getBytes();
+			out.write(b, 0, b.length);
+		}
+		out.write(LF);
+		byte[] b = files.toString().getBytes();
+		out.write(b, 0, b.length);
+		out.write(LF);
+		out.write(LF);
+		byte[] cmt = comment.toString().getBytes(EncodingHelper.getUTF8());
+		out.write(cmt, 0, cmt.length);
+		return out.toByteArray();
+	}
+
+	private CharSequence buildExtras() {
+		StringBuilder extras = new StringBuilder();
+		for (Iterator<Entry<String, String>> it = extrasMap.entrySet().iterator(); it.hasNext();) {
+			final Entry<String, String> next = it.next();
+			extras.append(encodeExtrasPair(next.getKey()));
+			extras.append(':');
+			extras.append(encodeExtrasPair(next.getValue()));
+			if (it.hasNext()) {
+				extras.append('\00');
+			}
+		}
+		return extras;
+	}
+
+	private CharSequence buildFiles() {
+		StringBuilder files = new StringBuilder();
+		if (modifiedFiles != null) {
+			Collections.sort(modifiedFiles);
+			for (Iterator<Path> it = modifiedFiles.iterator(); it.hasNext(); ) {
+				files.append(it.next());
+				if (it.hasNext()) {
+					files.append('\n');
+				}
+			}
+		}
+		return files;
+	}
+
+	private final static CharSequence encodeExtrasPair(String s) {
+		if (s != null) {
+			return s.replace("\\", "\\\\").replace("\n", "\\n").replace("\r", "\\r").replace("\00", "\\0");
+		}
+		return s;
+	}
+
 	private long csetTime() {
 		if (csetTime != null) { 
 			return csetTime;
@@ -102,37 +190,4 @@
 		}
 		return -(TimeZone.getDefault().getOffset(time) / 1000);
 	}
-
-	public byte[] build(Nodeid manifestRevision, String comment) {
-		String f = "%s\n%s\n%d %d %s\n%s\n\n%s";
-		StringBuilder extras = new StringBuilder();
-		for (Iterator<Entry<String, String>> it = extrasMap.entrySet().iterator(); it.hasNext();) {
-			final Entry<String, String> next = it.next();
-			extras.append(encodeExtrasPair(next.getKey()));
-			extras.append(':');
-			extras.append(encodeExtrasPair(next.getValue()));
-			if (it.hasNext()) {
-				extras.append('\00');
-			}
-		}
-		StringBuilder files = new StringBuilder();
-		if (modifiedFiles != null) {
-			for (Iterator<Path> it = modifiedFiles.iterator(); it.hasNext(); ) {
-				files.append(it.next());
-				if (it.hasNext()) {
-					files.append('\n');
-				}
-			}
-		}
-		final long date = csetTime();
-		final int tz = csetTimezone(date);
-		return String.format(f, manifestRevision.toString(), user(), date, tz, extras, files, comment).getBytes();
-	}
-
-	private final static CharSequence encodeExtrasPair(String s) {
-		if (s != null) {
-			return s.replace("\\", "\\\\").replace("\n", "\\n").replace("\r", "\\r").replace("\00", "\\0");
-		}
-		return s;
-	}
 }
--- a/src/org/tmatesoft/hg/internal/CommitFacility.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/CommitFacility.java	Thu May 16 19:46:13 2013 +0200
@@ -25,7 +25,6 @@
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -38,6 +37,7 @@
 import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.HgRepositoryLockException;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.util.Pair;
@@ -53,7 +53,7 @@
 public final class CommitFacility {
 	private final Internals repo;
 	private final int p1Commit, p2Commit;
-	private Map<Path, Pair<HgDataFile, ByteDataSupplier>> files = new LinkedHashMap<Path, Pair<HgDataFile, ByteDataSupplier>>();
+	private Map<Path, Pair<HgDataFile, DataSource>> files = new LinkedHashMap<Path, Pair<HgDataFile, DataSource>>();
 	private Set<Path> removals = new TreeSet<Path>();
 	private String branch, user;
 
@@ -74,12 +74,12 @@
 		return p1Commit != NO_REVISION && p2Commit != NO_REVISION;
 	}
 
-	public void add(HgDataFile dataFile, ByteDataSupplier content) {
+	public void add(HgDataFile dataFile, DataSource content) {
 		if (content == null) {
 			throw new IllegalArgumentException();
 		}
 		removals.remove(dataFile.getPath());
-		files.put(dataFile.getPath(), new Pair<HgDataFile, ByteDataSupplier>(dataFile, content));
+		files.put(dataFile.getPath(), new Pair<HgDataFile, DataSource>(dataFile, content));
 	}
 
 	public void forget(HgDataFile dataFile) {
@@ -138,22 +138,14 @@
 		// Register new/changed
 		LinkedHashMap<Path, RevlogStream> newlyAddedFiles = new LinkedHashMap<Path, RevlogStream>();
 		ArrayList<Path> touchInDirstate = new ArrayList<Path>();
-		for (Pair<HgDataFile, ByteDataSupplier> e : files.values()) {
+		for (Pair<HgDataFile, DataSource> e : files.values()) {
 			HgDataFile df = e.first();
+			DataSource bds = e.second();
 			Pair<Integer, Integer> fp = fileParents.get(df.getPath());
 			if (fp == null) {
 				// NEW FILE
 				fp = new Pair<Integer, Integer>(NO_REVISION, NO_REVISION);
 			}
-			ByteDataSupplier bds = e.second();
-			// FIXME quickfix, instead, pass ByteDataSupplier directly to RevlogStreamWriter
-			ByteBuffer bb = ByteBuffer.allocate(2048);
-			ByteArrayChannel bac = new ByteArrayChannel();
-			while (bds.read(bb) != -1) {
-				bb.flip();
-				bac.write(bb);
-				bb.clear();
-			}
 			RevlogStream contentStream;
 			if (df.exists()) {
 				contentStream = repo.getImplAccess().getStream(df);
@@ -165,27 +157,27 @@
 				// (despite the fact the file is in there)
 			}
 			RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo, contentStream, transaction);
-			Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second());
+			Nodeid fileRev = fileWriter.addRevision(bds, clogRevisionIndex, fp.first(), fp.second());
 			newManifestRevision.put(df.getPath(), fileRev);
 			touchInDirstate.add(df.getPath());
 		}
 		//
 		// Manifest
-		final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder();
+		final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder(repo.buildFileNameEncodingHelper());
 		for (Map.Entry<Path, Nodeid> me : newManifestRevision.entrySet()) {
 			manifestBuilder.add(me.getKey().toString(), me.getValue());
 		}
 		RevlogStreamWriter manifestWriter = new RevlogStreamWriter(repo, repo.getImplAccess().getManifestStream(), transaction);
-		Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder.build(), clogRevisionIndex, manifestParents.first(), manifestParents.second());
+		Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder, clogRevisionIndex, manifestParents.first(), manifestParents.second());
 		//
 		// Changelog
 		final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder();
 		changelogBuilder.setModified(files.keySet());
 		changelogBuilder.branch(branch == null ? DEFAULT_BRANCH_NAME : branch);
 		changelogBuilder.user(String.valueOf(user));
-		byte[] clogContent = changelogBuilder.build(manifestRev, message);
+		changelogBuilder.manifest(manifestRev).comment(message);
 		RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo, repo.getImplAccess().getChangelogStream(), transaction);
-		Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit);
+		Nodeid changesetRev = changelogWriter.addRevision(changelogBuilder, clogRevisionIndex, p1Commit, p2Commit);
 		// TODO move fncache update to an external facility, along with dirstate and bookmark update
 		if (!newlyAddedFiles.isEmpty() && repo.fncacheInUse()) {
 			FNCacheFile fncache = new FNCacheFile(repo);
@@ -265,18 +257,4 @@
 		return repo.getManifest().getRevisionIndex(manifestRev);
 	}
 */
-
-	// unlike DataAccess (which provides structured access), this one 
-	// deals with a sequence of bytes, when there's no need in structure of the data
-	// FIXME java.nio.ReadableByteChannel or ByteStream/ByteSequence(read, length, reset)
-	// SHALL be inline with util.ByteChannel, reading bytes from HgDataFile, preferably DataAccess#readBytes(BB) to match API,
-	// and a wrap for ByteVector
-	public interface ByteDataSupplier { // TODO look if can resolve DataAccess in HgCloneCommand visibility issue
-		// FIXME needs lifecycle, e.g. for supplier that reads from WC
-		int read(ByteBuffer buf);
-	}
-	
-	public interface ByteDataConsumer {
-		void write(ByteBuffer buf);
-	}
 }
--- a/src/org/tmatesoft/hg/internal/DataAccessProvider.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DataAccessProvider.java	Thu May 16 19:46:13 2013 +0200
@@ -21,10 +21,8 @@
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
@@ -101,34 +99,13 @@
 		return new DataAccess(); // non-null, empty.
 	}
 	
-	public DataSerializer createWriter(final Transaction tr, File f, boolean createNewIfDoesntExist) throws HgIOException {
+	public DataSerializer createWriter(final Transaction tr, File f, boolean createNewIfDoesntExist) {
 		if (!f.exists() && !createNewIfDoesntExist) {
 			return new DataSerializer();
 		}
-		try {
-			final File transactionFile = tr.prepare(f);
-			return new StreamDataSerializer(context.getLog(), new FileOutputStream(transactionFile, true)) {
-				@Override
-				public void done() {
-					super.done();
-					// FIXME invert RevlogStreamWriter to send DataSource here instead of grabbing DataSerializer
-					// besides, DataSerializer#done is invoked regardless of whether write was successful or not,
-					// while Transaction#done() assumes there's no error
-					try {
-						tr.done(transactionFile);
-					} catch (HgIOException ex) {
-						context.getLog().dump(DataAccessProvider.class, Error, ex, null);
-					}
-				}
-			};
-		} catch (final FileNotFoundException ex) {
-			context.getLog().dump(getClass(), Error, ex, null);
-			return new DataSerializer() {
-				public void write(byte[] data, int offset, int length) throws IOException {
-					throw ex;
-				}
-			};
-		}
+		// TODO invert RevlogStreamWriter to send DataSource here instead of grabbing DataSerializer
+		// to control the moment transaction gets into play and whether it fails or not
+		return new TransactionAwareFileSerializer(tr, f);
 	}
 
 	private static class MemoryMapFileAccess extends DataAccess {
@@ -408,56 +385,58 @@
 			}
 		}
 	}
+	
+	/**
+	 * Appends serialized changes to the end of the file
+	 */
+	private static class TransactionAwareFileSerializer extends DataSerializer {
+		
+		private final Transaction transaction;
+		private final File file;
+		private FileOutputStream fos;
+		private File transactionFile;
+		private boolean writeFailed = false;
 
-	public/*XXX, private, once HgCloneCommand stops using it */ static class StreamDataSerializer extends DataSerializer {
-		private final OutputStream out;
-		private final LogFacility log;
-		private byte[] buffer;
-	
-		public StreamDataSerializer(LogFacility logFacility, OutputStream os) {
-			assert os != null;
-			out = os;
-			log = logFacility;
+		public TransactionAwareFileSerializer(Transaction tr, File f) {
+			transaction = tr;
+			file = f;
 		}
 		
 		@Override
-		public void write(byte[] data, int offset, int length) throws IOException {
-			out.write(data, offset, length);
-		}
-	
-		@Override
-		public void writeInt(int... values) throws IOException {
-			ensureBufferSize(4*values.length); // sizeof(int)
-			int idx = 0;
-			for (int v : values) {
-				DataSerializer.bigEndian(v, buffer, idx);
-				idx += 4;
+		public void write(byte[] data, int offset, int length) throws HgIOException {
+			try {
+				if (fos == null) {
+					transactionFile = transaction.prepare(file);
+					fos = new FileOutputStream(transactionFile, true);
+				}
+				fos.write(data, offset, length);
+				fos.flush();
+			} catch (IOException ex) {
+				writeFailed = true;
+				transaction.failure(transactionFile, ex);
+				throw new HgIOException("Write failure", ex, transactionFile);
 			}
-			out.write(buffer, 0, idx);
 		}
 		
 		@Override
-		public void writeByte(byte... values) throws IOException {
-			if (values.length == 1) {
-				out.write(values[0]);
-			} else {
-				out.write(values, 0, values.length);
-			}
-		}
-		
-		private void ensureBufferSize(int bytesNeeded) {
-			if (buffer == null || buffer.length < bytesNeeded) {
-				buffer = new byte[bytesNeeded];
-			}
-		}
-	
-		@Override
-		public void done() {
-			try {
-				out.flush();
-				out.close();
-			} catch (IOException ex) {
-				log.dump(getClass(), Error, ex, "Failure to close stream");
+		public void done() throws HgIOException {
+			if (fos != null) {
+				assert transactionFile != null;
+				try {
+					fos.close();
+					if (!writeFailed) {
+						// XXX, Transaction#done() assumes there's no error , but perhaps it's easier to 
+						// rely on #failure(), and call #done() always (or change #done() to #success()
+						transaction.done(transactionFile);
+					}
+					fos = null;
+				} catch (IOException ex) {
+					if (!writeFailed) {
+						// do not eclipse original exception
+						transaction.failure(transactionFile, ex);
+					}
+					throw new HgIOException("Write failure", ex, transactionFile);
+				}
 			}
 		}
 	}
--- a/src/org/tmatesoft/hg/internal/DataSerializer.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DataSerializer.java	Thu May 16 19:46:13 2013 +0200
@@ -16,7 +16,9 @@
  */
 package org.tmatesoft.hg.internal;
 
-import java.io.IOException;
+import java.io.ByteArrayOutputStream;
+
+import org.tmatesoft.hg.core.HgIOException;
 
 /**
  * Serialization friend of {@link DataAccess}
@@ -26,28 +28,36 @@
  */
 @Experimental(reason="Work in progress")
 public class DataSerializer {
+	private byte[] buffer;
 	
-	public void writeByte(byte... values) throws IOException {
+	public void writeByte(byte... values) throws HgIOException {
 		write(values, 0, values.length);
 	}
 
-	public void writeInt(int... values) throws IOException {
-		byte[] buf = new byte[4];
+	public void writeInt(int... values) throws HgIOException {
+		ensureBufferSize(4*values.length); // sizeof(int)
+		int idx = 0;
 		for (int v : values) {
-			bigEndian(v, buf, 0);
-			write(buf, 0, buf.length);
+			bigEndian(v, buffer, idx);
+			idx += 4;
+		}
+		write(buffer, 0, idx);
+	}
+
+	public void write(byte[] data, int offset, int length) throws HgIOException {
+		throw new HgIOException("Attempt to write to non-existent file", null);
+	}
+
+	public void done() throws HgIOException {
+		// no-op
+	}
+	
+	private void ensureBufferSize(int bytesNeeded) {
+		if (buffer == null || buffer.length < bytesNeeded) {
+			buffer = new byte[bytesNeeded];
 		}
 	}
 
-	public void write(byte[] data, int offset, int length) throws IOException {
-		throw new IOException("Attempt to write to non-existent file");
-	}
-
-	public void done() {
-		// FIXME perhaps, shall allow IOException, too
-		// no-op
-	}
-	
 	/**
 	 * Writes 4 bytes of supplied value into the buffer at given offset, big-endian. 
 	 */
@@ -64,7 +74,11 @@
 	 */
 	@Experimental(reason="Work in progress")
 	interface DataSource {
-		public void serialize(DataSerializer out) throws IOException;
+		/**
+		 * Invoked once for a single write operation, 
+		 * although the source itself may get serialized several times
+		 */
+		public void serialize(DataSerializer out) throws HgIOException;
 
 		/**
 		 * Hint of data length it would like to writes
@@ -81,7 +95,7 @@
 			data = bytes;
 		}
 
-		public void serialize(DataSerializer out) throws IOException {
+		public void serialize(DataSerializer out) throws HgIOException {
 			if (data != null) {
 				out.write(data, 0, data.length);
 			}
@@ -90,6 +104,18 @@
 		public int serializeLength() {
 			return data == null ? 0 : data.length;
 		}
+	}
+	
+	public static class ByteArrayDataSerializer extends DataSerializer {
+		private final ByteArrayOutputStream out = new ByteArrayOutputStream();
+
+		@Override
+		public void write(byte[] data, int offset, int length) {
+			out.write(data, offset, length);
+		}
 		
+		public byte[] toByteArray() {
+			return out.toByteArray();
+		}
 	}
 }
--- a/src/org/tmatesoft/hg/internal/DeflaterDataSerializer.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/DeflaterDataSerializer.java	Thu May 16 19:46:13 2013 +0200
@@ -16,10 +16,11 @@
  */
 package org.tmatesoft.hg.internal;
 
-import java.io.IOException;
 import java.util.zip.Deflater;
 import java.util.zip.DeflaterOutputStream;
 
+import org.tmatesoft.hg.core.HgIOException;
+
 /**
  * {@link DeflaterOutputStream} counterpart for {@link DataSerializer} API
  * 
@@ -43,7 +44,7 @@
 	}
 
 	@Override
-	public void writeInt(int... values) throws IOException {
+	public void writeInt(int... values) throws HgIOException {
 		for (int i = 0; i < values.length; i+= AUX_BUFFER_CAPACITY) {
 			int idx = 0;
 			for (int j = i, x = Math.min(values.length, i + AUX_BUFFER_CAPACITY); j < x; j++) {
@@ -58,7 +59,7 @@
 	}
 
 	@Override
-	public void write(byte[] data, int offset, int length) throws IOException {
+	public void write(byte[] data, int offset, int length) throws HgIOException {
 		// @see DeflaterOutputStream#write(byte[], int, int)
 		int stride = deflateOutBuffer.length;
 		for (int i = 0; i < length; i += stride) {
@@ -66,7 +67,7 @@
 		}
 	}
 	
-	private void internalWrite(byte[] data, int offset, int length) throws IOException {
+	private void internalWrite(byte[] data, int offset, int length) throws HgIOException {
 		deflater.setInput(data, offset, length);
 		while (!deflater.needsInput()) {
 			deflate();
@@ -74,11 +75,11 @@
 	}
 
 	@Override
-	public void done() {
+	public void done() throws HgIOException {
 		delegate.done();
 	}
 
-	public void finish() throws IOException {
+	public void finish() throws HgIOException {
 		if (!deflater.finished()) {
 			deflater.finish();
 			while (!deflater.finished()) {
@@ -87,7 +88,7 @@
 		}
 	}
 
-	protected void deflate() throws IOException {
+	protected void deflate() throws HgIOException {
 		int len = deflater.deflate(deflateOutBuffer, 0, deflateOutBuffer.length);
 		if (len > 0) {
 			delegate.write(deflateOutBuffer, 0, len);
--- a/src/org/tmatesoft/hg/internal/FileContentSupplier.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FileContentSupplier.java	Thu May 16 19:46:13 2013 +0200
@@ -18,56 +18,55 @@
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
 
 import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.util.Path;
 
 /**
- * FIXME files are opened at the moment of instantiation, though the moment the data is requested might be distant
+ * {@link DataSource} that reads from regular files
  * 
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class FileContentSupplier implements CommitFacility.ByteDataSupplier {
-	private final FileChannel channel;
-	private IOException error;
+public class FileContentSupplier implements DataSource {
+	private final File file;
+	private final SessionContext ctx;
 	
-	public FileContentSupplier(HgRepository repo, Path file) throws HgIOException {
-		this(new File(repo.getWorkingDir(), file.toString()));
-	}
-
-	public FileContentSupplier(File f) throws HgIOException {
-		if (!f.canRead()) {
-			throw new HgIOException(String.format("Can't read file %s", f), f);
-		}
-		try {
-			channel = new FileInputStream(f).getChannel();
-		} catch (FileNotFoundException ex) {
-			throw new HgIOException("Can't open file", ex, f);
-		}
+	public FileContentSupplier(HgRepository repo, Path file) {
+		this(repo, new File(repo.getWorkingDir(), file.toString()));
 	}
 
-	public int read(ByteBuffer buf) {
-		if (error != null) {
-			return -1;
-		}
-		try {
-			return channel.read(buf);
-		} catch (IOException ex) {
-			error = ex;
-		}
-		return -1;
+	public FileContentSupplier(SessionContext.Source ctxSource, File f) {
+		ctx = ctxSource.getSessionContext();
+		file = f;
 	}
 	
-	public void done() throws IOException {
-		channel.close();
-		if (error != null) {
-			throw error;
+	public void serialize(DataSerializer out) throws HgIOException {
+		FileInputStream fis = null;
+		try {
+			fis = new FileInputStream(file);
+			FileChannel fc = fis.getChannel();
+			ByteBuffer buffer = ByteBuffer.allocate((int) Math.min(100*1024, fc.size()));
+			while (fc.read(buffer) != -1) {
+				buffer.flip();
+				// #allocate() above ensures backing array
+				out.write(buffer.array(), 0, buffer.limit());
+				buffer.clear();
+			}
+		} catch (IOException ex) {
+			throw new HgIOException("Failed to get content of the file", ex, file);
+		} finally {
+			new FileUtils(ctx.getLog()).closeQuietly(fis);
 		}
 	}
+	
+	public int serializeLength() {
+		return Internals.ltoi(file.length());
+	}
 }
\ No newline at end of file
--- a/src/org/tmatesoft/hg/internal/FileUtils.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FileUtils.java	Thu May 16 19:46:13 2013 +0200
@@ -95,4 +95,11 @@
 		}
 	}
 
+	public static void main(String[] args) throws Exception {
+		final long start = System.nanoTime();
+		final File src = new File(".../hg/cpython/.hg/store/00changelog.d");
+		copyFile(src, new File("/tmp/zxczxczxc234"));
+		final long end = System.nanoTime();
+		System.out.printf("Copy of %,d bytes took %d ms", src.length(), (end-start)/1000000);
+	}
 }
--- a/src/org/tmatesoft/hg/internal/ManifestEntryBuilder.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/ManifestEntryBuilder.java	Thu May 16 19:46:13 2013 +0200
@@ -18,7 +18,9 @@
 
 import java.io.ByteArrayOutputStream;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
 
 /**
  * Create binary manifest entry ready to write down into 00manifest.i
@@ -36,16 +38,20 @@
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class ManifestEntryBuilder {
-	private ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+public class ManifestEntryBuilder implements DataSource {
+	private final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+	private final EncodingHelper encHelper;
 
+	public ManifestEntryBuilder(EncodingHelper encodingHelper) {
+		encHelper = encodingHelper;
+	}
 	
 	public ManifestEntryBuilder reset() {
 		buffer.reset();
 		return this;
 	}
 	public ManifestEntryBuilder add(String fname, Nodeid revision) {
-		byte[] b = fname.getBytes();
+		byte[] b = encHelper.toManifest(fname);
 		buffer.write(b, 0, b.length);
 		buffer.write('\0');
 		b = revision.toString().getBytes();
@@ -58,4 +64,13 @@
 		return buffer.toByteArray();
 	}
 
+	public void serialize(DataSerializer out) throws HgIOException {
+		byte[] r = build();
+		out.write(r, 0 , r.length);
+	}
+
+	public int serializeLength() {
+		return buffer.size();
+	}
+
 }
--- a/src/org/tmatesoft/hg/internal/Patch.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/Patch.java	Thu May 16 19:46:13 2013 +0200
@@ -20,6 +20,8 @@
 import java.util.ArrayList;
 import java.util.Formatter;
 
+import org.tmatesoft.hg.core.HgIOException;
+
 /**
  * @see http://mercurial.selenic.com/wiki/BundleFormat
  * in Changelog group description
@@ -177,7 +179,7 @@
 		return prefix + totalDataLen;
 	}
 	
-	/*package-local*/ void serialize(DataSerializer out) throws IOException {
+	/*package-local*/ void serialize(DataSerializer out) throws HgIOException {
 		for (int i = 0, x = data.size(); i < x; i++) {
 			final int start = starts.get(i);
 			final int end = ends.get(i);
@@ -462,7 +464,7 @@
 
 	public class PatchDataSource implements DataSerializer.DataSource {
 
-		public void serialize(DataSerializer out) throws IOException {
+		public void serialize(DataSerializer out) throws HgIOException {
 			Patch.this.serialize(out);
 		}
 
--- a/src/org/tmatesoft/hg/internal/RevlogCompressor.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RevlogCompressor.java	Thu May 16 19:46:13 2013 +0200
@@ -16,9 +16,9 @@
  */
 package org.tmatesoft.hg.internal;
 
-import java.io.IOException;
 import java.util.zip.Deflater;
 
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.util.LogFacility.Severity;
 
@@ -44,7 +44,7 @@
 	}
 	
 	// out stream is not closed!
-	public int writeCompressedData(DataSerializer out) throws IOException {
+	public int writeCompressedData(DataSerializer out) throws HgIOException {
 		zip.reset();
 		DeflaterDataSerializer dds = new DeflaterDataSerializer(out, zip, sourceData.serializeLength());
 		sourceData.serialize(dds);
@@ -61,7 +61,7 @@
 			compressedLen = writeCompressedData(counter);
 			assert counter.totalWritten == compressedLen;
 	        return compressedLen;
-		} catch (IOException ex) {
+		} catch (HgIOException ex) {
 			// can't happen provided we write to our stream that does nothing but byte counting
 			ctx.getLog().dump(getClass(), Severity.Error, ex, "Failed estimating compressed length of revlog data");
 			return counter.totalWritten; // use best known value so far
@@ -71,15 +71,15 @@
 	private static class Counter extends DataSerializer {
 		public int totalWritten = 0;
 
-		public void writeByte(byte... values) throws IOException {
+		public void writeByte(byte... values) throws HgIOException {
 			totalWritten += values.length;
 		}
 
-		public void writeInt(int... values) throws IOException {
+		public void writeInt(int... values) throws HgIOException {
 			totalWritten += 4 * values.length;
 		}
 
-		public void write(byte[] data, int offset, int length) throws IOException {
+		public void write(byte[] data, int offset, int length) throws HgIOException {
 			totalWritten += length;
 		}
 	}
--- a/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java	Thu May 16 19:46:13 2013 +0200
@@ -19,13 +19,15 @@
 import static org.tmatesoft.hg.internal.Internals.REVLOGV1_RECORD_SIZE;
 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
 
-import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
 import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.core.SessionContext;
+import org.tmatesoft.hg.internal.DataSerializer.ByteArrayDataSerializer;
+import org.tmatesoft.hg.internal.DataSerializer.ByteArrayDataSource;
+import org.tmatesoft.hg.internal.DataSerializer.DataSource;
 import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgInvalidStateException;
 
@@ -60,17 +62,18 @@
 	/**
 	 * @return nodeid of added revision
 	 */
-	public Nodeid addRevision(byte[] content, int linkRevision, int p1, int p2) throws HgIOException {
+	public Nodeid addRevision(DataSource content, int linkRevision, int p1, int p2) throws HgIOException {
 		lastEntryRevision = Nodeid.NULL;
 		int revCount = revlogStream.revisionCount();
 		lastEntryIndex = revCount == 0 ? NO_REVISION : revCount - 1;
 		populateLastEntry();
 		//
-		Patch patch = GeneratePatchInspector.delta(lastEntryContent, content);
+		byte[] contentByteArray = toByteArray(content);
+		Patch patch = GeneratePatchInspector.delta(lastEntryContent, contentByteArray);
 		int patchSerializedLength = patch.serializedLength();
 		
-		final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, content.length);
-		DataSerializer.DataSource dataSource = writeComplete ? new DataSerializer.ByteArrayDataSource(content) : patch.new PatchDataSource();
+		final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, contentByteArray.length);
+		DataSerializer.DataSource dataSource = writeComplete ? new ByteArrayDataSource(contentByteArray) : patch.new PatchDataSource();
 		revlogDataZip.reset(dataSource);
 		final int compressedLen;
 		final boolean useCompressedData = preferCompressedOverComplete(revlogDataZip.getCompressedLength(), dataSource.serializeLength());
@@ -83,17 +86,17 @@
 		//
 		Nodeid p1Rev = revision(p1);
 		Nodeid p2Rev = revision(p2);
-		byte[] revisionNodeidBytes = dh.sha1(p1Rev, p2Rev, content).asBinary();
+		byte[] revisionNodeidBytes = dh.sha1(p1Rev, p2Rev, contentByteArray).asBinary();
 		//
 
-		DataSerializer indexFile, dataFile, activeFile;
-		indexFile = dataFile = activeFile = null;
+		DataSerializer indexFile, dataFile;
+		indexFile = dataFile = null;
 		try {
 			//
-			activeFile = indexFile = revlogStream.getIndexStreamWriter(transaction);
+			indexFile = revlogStream.getIndexStreamWriter(transaction);
 			final boolean isInlineData = revlogStream.isInlineData();
 			HeaderWriter revlogHeader = new HeaderWriter(isInlineData);
-			revlogHeader.length(content.length, compressedLen);
+			revlogHeader.length(contentByteArray.length, compressedLen);
 			revlogHeader.nodeid(revisionNodeidBytes);
 			revlogHeader.linkRevision(linkRevision);
 			revlogHeader.parents(p1, p2);
@@ -108,7 +111,6 @@
 			} else {
 				dataFile = revlogStream.getDataStreamWriter(transaction);
 			}
-			activeFile = dataFile;
 			if (useCompressedData) {
 				int actualCompressedLenWritten = revlogDataZip.writeCompressedData(dataFile);
 				if (actualCompressedLenWritten != compressedLen) {
@@ -120,17 +122,13 @@
 			}
 			
 			
-			lastEntryContent = content;
+			lastEntryContent = contentByteArray;
 			lastEntryBase = revlogHeader.baseRevision();
 			lastEntryIndex++;
 			lastEntryRevision = Nodeid.fromBinary(revisionNodeidBytes, 0);
 			revisionCache.put(lastEntryIndex, lastEntryRevision);
 
 			revlogStream.revisionAdded(lastEntryIndex, lastEntryRevision, lastEntryBase, lastEntryOffset);
-		} catch (IOException ex) {
-			String m = String.format("Failed to write revision %d", lastEntryIndex+1, null);
-			// FIXME proper file in the exception based on activeFile == dataFile || indexFile 
-			throw new HgIOException(m, ex, new File(revlogStream.getDataFileName()));
 		} finally {
 			indexFile.done();
 			if (dataFile != null && dataFile != indexFile) {
@@ -140,6 +138,12 @@
 		return lastEntryRevision;
 	}
 	
+	private byte[] toByteArray(DataSource content) throws HgIOException {
+		ByteArrayDataSerializer ba = new ByteArrayDataSerializer();
+		content.serialize(ba);
+		return ba.toByteArray();
+	}
+
 	private Nodeid revision(int revisionIndex) {
 		if (revisionIndex == NO_REVISION) {
 			return Nodeid.NULL;
@@ -251,7 +255,7 @@
 			return this;
 		}
 		
-		public void serialize(DataSerializer out) throws IOException {
+		public void serialize(DataSerializer out) throws HgIOException {
 			header.clear();
 			if (offset == 0) {
 				int version = 1 /* RevlogNG */;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/WorkingCopyContent.java	Thu May 16 19:46:13 2013 +0200
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.tmatesoft.hg.core.HgCommitCommand;
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgInvalidFileException;
+import org.tmatesoft.hg.repo.HgInvalidStateException;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.util.ByteChannel;
+import org.tmatesoft.hg.util.CancelledException;
+
+/**
+ * Access content of the working copy. The difference with {@link FileContentSupplier} is that this one doesn't need {@link File}
+ * in the working directory. However, provided this class is used from {@link HgCommitCommand} when "modified" file was detected,
+ * it's odd to expect no file in the working dir.
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public class WorkingCopyContent implements DataSerializer.DataSource {
+	private final HgDataFile file;
+
+	public WorkingCopyContent(HgDataFile dataFile) {
+		file = dataFile;
+		if (!dataFile.exists()) {
+			throw new IllegalArgumentException();
+		}
+	}
+
+	public void serialize(final DataSerializer out) throws HgIOException {
+		final HgIOException failure[] = new HgIOException[1];
+		try {
+			// TODO #workingCopy API is very limiting, CancelledException is inconvenient, 
+			// and absence of HgIOException is very uncomfortable
+			file.workingCopy(new ByteChannel() {
+				
+				public int write(ByteBuffer buffer) throws IOException {
+					try {
+						if (buffer.hasArray()) {
+							out.write(buffer.array(), buffer.position(), buffer.remaining());
+						}
+						int rv = buffer.remaining();
+						buffer.position(buffer.limit()); // pretend we've consumed the data
+						return rv;
+					} catch (HgIOException ex) {
+						failure[0] = ex;
+						throw new IOException(ex);
+					}
+				}
+			});
+		} catch (HgInvalidFileException ex) {
+			if (failure[0] != null) {
+				throw failure[0];
+			}
+			throw new HgIOException("Write failure", ex, new File(file.getRepo().getWorkingDir(), file.getPath().toString()));
+		} catch (CancelledException ex) {
+			throw new HgInvalidStateException("Our channel doesn't cancel here");
+		}
+	}
+
+	public int serializeLength() {
+		return file.getLength(HgRepository.WORKING_COPY);
+	}
+}
\ No newline at end of file
--- a/src/org/tmatesoft/hg/repo/HgLookup.java	Wed May 15 20:10:09 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgLookup.java	Thu May 16 19:46:13 2013 +0200
@@ -38,7 +38,7 @@
  * @author Artem Tikhomirov
  * @author TMate Software Ltd.
  */
-public class HgLookup {
+public class HgLookup implements SessionContext.Source {
 
 	private ConfigFile globalCfg;
 	private SessionContext sessionContext;
@@ -81,7 +81,7 @@
 			throw new HgRepositoryNotFoundException(String.format("Can't locate .hg/ directory of Mercurial repository in %s nor in parent dirs", location)).setLocation(location.getPath());
 		}
 		String repoPath = repository.getParentFile().getAbsolutePath();
-		HgRepository rv = new HgRepository(getContext(), repoPath, repository);
+		HgRepository rv = new HgRepository(getSessionContext(), repoPath, repository);
 		int requiresFlags = rv.getImplHelper().getRequiresFlags();
 		if ((requiresFlags & RequiresFile.REVLOGV1) == 0) {
 			throw new HgRepositoryNotFoundException(String.format("%s: repository version is not supported (Mercurial <0.9?)", repoPath)).setLocation(location.getPath());
@@ -93,7 +93,7 @@
 		if (location == null || !location.canRead()) {
 			throw new HgRepositoryNotFoundException(String.format("Can't read file %s", location)).setLocation(String.valueOf(location));
 		}
-		return new HgBundle(getContext(), new DataAccessProvider(getContext()), location).link();
+		return new HgBundle(getSessionContext(), new DataAccessProvider(getSessionContext()), location).link();
 	}
 	
 	/**
@@ -134,7 +134,7 @@
 				throw new HgBadArgumentException(String.format("Found %s server spec in the config, but failed to initialize with it", key), ex);
 			}
 		}
-		return new HgRemoteRepository(getContext(), url);
+		return new HgRemoteRepository(getSessionContext(), url);
 	}
 	
 	public HgRemoteRepository detect(URL url) throws HgBadArgumentException {
@@ -144,23 +144,23 @@
 		if (Boolean.FALSE.booleanValue()) {
 			throw Internals.notImplemented();
 		}
-		return new HgRemoteRepository(getContext(), url);
+		return new HgRemoteRepository(getSessionContext(), url);
 	}
 
 	private ConfigFile getGlobalConfig() {
 		if (globalCfg == null) {
-			globalCfg = new ConfigFile(getContext());
+			globalCfg = new ConfigFile(getSessionContext());
 			try {
 				globalCfg.addLocation(new File(System.getProperty("user.home"), ".hgrc"));
 			} catch (HgInvalidFileException ex) {
 				// XXX perhaps, makes sense to let caller/client know that we've failed to read global config? 
-				getContext().getLog().dump(getClass(), Warn, ex, null);
+				getSessionContext().getLog().dump(getClass(), Warn, ex, null);
 			}
 		}
 		return globalCfg;
 	}
 
-	private SessionContext getContext() {
+	public SessionContext getSessionContext() {
 		if (sessionContext == null) {
 			sessionContext = new BasicSessionContext(null);
 		}
--- a/test/org/tmatesoft/hg/test/TestCommit.java	Wed May 15 20:10:09 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestCommit.java	Thu May 16 19:46:13 2013 +0200
@@ -21,7 +21,6 @@
 
 import java.io.File;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.List;
 
 import org.junit.Rule;
@@ -38,6 +37,7 @@
 import org.tmatesoft.hg.internal.ByteArrayChannel;
 import org.tmatesoft.hg.internal.COWTransaction;
 import org.tmatesoft.hg.internal.CommitFacility;
+import org.tmatesoft.hg.internal.DataSerializer.ByteArrayDataSource;
 import org.tmatesoft.hg.internal.FileContentSupplier;
 import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.Transaction;
@@ -77,7 +77,7 @@
 		// FIXME test diff for processing changed newlines (ie \r\n -> \n or vice verse) - if a whole line or 
 		// just changed endings are in the patch!
 		HgDataFile df = hgRepo.getFileNode("file1");
-		cf.add(df, new ByteArraySupplier("hello\nworld".getBytes()));
+		cf.add(df, new ByteArrayDataSource("hello\nworld".getBytes()));
 		Transaction tr = newTransaction(hgRepo);
 		Nodeid secondRev = cf.commit("SECOND", tr);
 		tr.commit();
@@ -106,7 +106,7 @@
 		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), NO_REVISION);
 		HgDataFile df = hgRepo.getFileNode(fname);
 		final byte[] initialContent = "hello\nworld".getBytes();
-		cf.add(df, new ByteArraySupplier(initialContent));
+		cf.add(df, new ByteArrayDataSource(initialContent));
 		String comment = "commit 1";
 		Transaction tr = newTransaction(hgRepo);
 		Nodeid c1Rev = cf.commit(comment,  tr);
@@ -141,13 +141,12 @@
 		//
 		RepoUtils.modifyFileAppend(fileD, "A CHANGE\n");
 		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), parentCsetRevIndex);
-		FileContentSupplier contentProvider = new FileContentSupplier(fileD);
+		FileContentSupplier contentProvider = new FileContentSupplier(hgRepo, fileD);
 		cf.add(dfD, contentProvider);
 		cf.branch("branch1");
 		Transaction tr = newTransaction(hgRepo);
 		Nodeid commitRev1 = cf.commit("FIRST",  tr);
 		tr.commit();
-		contentProvider.done();
 		//
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).range(parentCsetRevIndex+1, TIP).execute();
 		assertEquals(1, commits.size());
@@ -171,13 +170,12 @@
 		RepoUtils.createFile(new File(repoLoc, "xx"), "xyz");
 		new HgAddRemoveCommand(hgRepo).add(Path.create("xx")).remove(Path.create("d")).execute();
 		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), hgRepo.getChangelog().getLastRevision());
-		FileContentSupplier contentProvider = new FileContentSupplier(new File(repoLoc, "xx"));
+		FileContentSupplier contentProvider = new FileContentSupplier(hgRepo, new File(repoLoc, "xx"));
 		cf.add(hgRepo.getFileNode("xx"), contentProvider);
 		cf.forget(hgRepo.getFileNode("d"));
 		Transaction tr = newTransaction(hgRepo);
 		Nodeid commitRev = cf.commit("Commit with add/remove cmd",  tr);
 		tr.commit();
-		contentProvider.done();
 		//
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).changeset(commitRev).execute();
 		HgChangeset cmt = commits.get(0);
@@ -206,24 +204,21 @@
 		RepoUtils.modifyFileAppend(fileD, " 1 \n");
 		final int parentCsetRevIndex = hgRepo.getChangelog().getLastRevision();
 		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), parentCsetRevIndex);
-		FileContentSupplier contentProvider = new FileContentSupplier(fileD);
+		FileContentSupplier contentProvider = new FileContentSupplier(hgRepo, fileD);
 		cf.add(dfD, contentProvider);
 		cf.branch("branch1");
 		Transaction tr = newTransaction(hgRepo);
 		Nodeid commitRev1 = cf.commit("FIRST",  tr);
-		contentProvider.done();
 		//
 		RepoUtils.modifyFileAppend(fileD, " 2 \n");
-		cf.add(dfD, contentProvider = new FileContentSupplier(fileD));
+		cf.add(dfD, contentProvider = new FileContentSupplier(hgRepo, fileD));
 		cf.branch("branch2");
 		Nodeid commitRev2 = cf.commit("SECOND",  tr);
-		contentProvider.done();
 		//
 		RepoUtils.modifyFileAppend(fileD, " 2 \n");
-		cf.add(dfD, contentProvider = new FileContentSupplier(fileD));
+		cf.add(dfD, contentProvider = new FileContentSupplier(hgRepo, fileD));
 		cf.branch(DEFAULT_BRANCH_NAME);
 		Nodeid commitRev3 = cf.commit("THIRD",  tr);
-		contentProvider.done();
 		tr.commit();
 		//
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).range(parentCsetRevIndex+1, TIP).execute();
@@ -331,7 +326,7 @@
 		final int parentCsetRevIndex = hgRepo.getChangelog().getLastRevision();
 		// HgCommitCommand can't do branch yet
 		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), parentCsetRevIndex);
-		cf.add(hgRepo.getFileNode("a"), new FileContentSupplier(new File(repoLoc, "a")));
+		cf.add(hgRepo.getFileNode("a"), new FileContentSupplier(hgRepo, new File(repoLoc, "a")));
 		cf.branch(branch);
 		Transaction tr = newTransaction(hgRepo);
 		Nodeid commit = cf.commit("FIRST",  tr);
@@ -360,47 +355,5 @@
 
 	public static void main(String[] args) throws Exception {
 		new TestCommit().testCommitToEmpty();
-		if (Boolean.TRUE.booleanValue()) {
-			return;
-		}
-		String input = "abcdefghijklmnopqrstuvwxyz";
-		ByteArraySupplier bas = new ByteArraySupplier(input.getBytes());
-		ByteBuffer bb = ByteBuffer.allocate(7);
-		byte[] result = new byte[26];
-		int rpos = 0;
-		while (bas.read(bb) != -1) {
-			bb.flip();
-			bb.get(result, rpos, bb.limit());
-			rpos += bb.limit();
-			bb.clear();
-		}
-		if (input.length() != rpos) {
-			throw new AssertionError();
-		}
-		String output = new String(result);
-		if (!input.equals(output)) {
-			throw new AssertionError();
-		}
-		System.out.println(output);
-	}
-
-	static class ByteArraySupplier implements CommitFacility.ByteDataSupplier {
-
-		private final byte[] data;
-		private int pos = 0;
-
-		public ByteArraySupplier(byte[] source) {
-			data = source;
-		}
-
-		public int read(ByteBuffer buf) {
-			if (pos >= data.length) {
-				return -1;
-			}
-			int count = Math.min(buf.remaining(), data.length - pos);
-			buf.put(data, pos, count);
-			pos += count;
-			return count;
-		}
 	}
 }