changeset 591:e447384f3771

CommitFacility as internal class; refactored infrastructure around internals (access to RevlogStream)
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Tue, 30 Apr 2013 18:55:42 +0200
parents 8cbc2a883d95
children b12cc3d64a35
files src/org/tmatesoft/hg/core/HgCommitCommand.java src/org/tmatesoft/hg/internal/CommitFacility.java src/org/tmatesoft/hg/internal/FileContentSupplier.java src/org/tmatesoft/hg/internal/Internals.java src/org/tmatesoft/hg/internal/RevlogStreamFactory.java src/org/tmatesoft/hg/internal/RevlogStreamWriter.java src/org/tmatesoft/hg/repo/CommitFacility.java src/org/tmatesoft/hg/repo/HgRepository.java test/org/tmatesoft/hg/test/TestCommit.java
diffstat 9 files changed, 403 insertions(+), 323 deletions(-) [+]
line wrap: on
line diff
--- a/src/org/tmatesoft/hg/core/HgCommitCommand.java	Mon Apr 29 17:04:51 2013 +0200
+++ b/src/org/tmatesoft/hg/core/HgCommitCommand.java	Tue Apr 30 18:55:42 2013 +0200
@@ -23,9 +23,10 @@
 import java.util.ArrayList;
 
 import org.tmatesoft.hg.internal.ByteArrayChannel;
+import org.tmatesoft.hg.internal.CommitFacility;
 import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.internal.FileContentSupplier;
-import org.tmatesoft.hg.repo.CommitFacility;
+import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.repo.HgChangelog;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgInternals;
@@ -104,7 +105,7 @@
 				newRevision = Nodeid.NULL;
 				return new Outcome(Kind.Failure, "nothing to add");
 			}
-			CommitFacility cf = new CommitFacility(repo, parentRevs[0], parentRevs[1]);
+			CommitFacility cf = new CommitFacility(Internals.getInstance(repo), parentRevs[0], parentRevs[1]);
 			for (Path m : status.getModified()) {
 				HgDataFile df = repo.getFileNode(m);
 				cf.add(df, new WorkingCopyContent(df));
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/CommitFacility.java	Tue Apr 30 18:55:42 2013 +0200
@@ -0,0 +1,240 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import org.tmatesoft.hg.core.HgCommitCommand;
+import org.tmatesoft.hg.core.HgIOException;
+import org.tmatesoft.hg.core.HgRepositoryLockException;
+import org.tmatesoft.hg.core.Nodeid;
+import org.tmatesoft.hg.repo.HgChangelog;
+import org.tmatesoft.hg.repo.HgDataFile;
+import org.tmatesoft.hg.repo.HgRepository;
+import org.tmatesoft.hg.util.Pair;
+import org.tmatesoft.hg.util.Path;
+import org.tmatesoft.hg.util.LogFacility.Severity;
+
+/**
+ * WORK IN PROGRESS
+ * Name: CommitObject, FutureCommit or PendingCommit
+ * Only public API now: {@link HgCommitCommand}.
+ * 
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+@Experimental(reason="Work in progress")
+public final class CommitFacility {
+	private final Internals repo;
+	private final int p1Commit, p2Commit;
+	private Map<Path, Pair<HgDataFile, ByteDataSupplier>> files = new LinkedHashMap<Path, Pair<HgDataFile, ByteDataSupplier>>();
+	private Set<Path> removals = new TreeSet<Path>();
+	private String branch, user;
+
+	public CommitFacility(Internals hgRepo, int parentCommit) {
+		this(hgRepo, parentCommit, NO_REVISION);
+	}
+	
+	public CommitFacility(Internals hgRepo, int parent1Commit, int parent2Commit) {
+		repo = hgRepo;
+		p1Commit = parent1Commit;
+		p2Commit = parent2Commit;
+		if (parent1Commit != NO_REVISION && parent1Commit == parent2Commit) {
+			throw new IllegalArgumentException("Merging same revision is dubious");
+		}
+	}
+
+	public boolean isMerge() {
+		return p1Commit != NO_REVISION && p2Commit != NO_REVISION;
+	}
+
+	public void add(HgDataFile dataFile, ByteDataSupplier content) {
+		if (content == null) {
+			throw new IllegalArgumentException();
+		}
+		removals.remove(dataFile.getPath());
+		files.put(dataFile.getPath(), new Pair<HgDataFile, ByteDataSupplier>(dataFile, content));
+	}
+
+	public void forget(HgDataFile dataFile) {
+		files.remove(dataFile.getPath());
+		removals.add(dataFile.getPath());
+	}
+	
+	public void branch(String branchName) {
+		branch = branchName;
+	}
+	
+	public void user(String userName) {
+		user = userName;
+	}
+	
+	public Nodeid commit(String message) throws HgIOException, HgRepositoryLockException {
+		final HgChangelog clog = repo.getRepo().getChangelog();
+		final int clogRevisionIndex = clog.getRevisionCount();
+		ManifestRevision c1Manifest = new ManifestRevision(null, null);
+		ManifestRevision c2Manifest = new ManifestRevision(null, null);
+		if (p1Commit != NO_REVISION) {
+			repo.getRepo().getManifest().walk(p1Commit, p1Commit, c1Manifest);
+		}
+		if (p2Commit != NO_REVISION) {
+			repo.getRepo().getManifest().walk(p2Commit, p2Commit, c2Manifest);
+		}
+//		Pair<Integer, Integer> manifestParents = getManifestParents();
+		Pair<Integer, Integer> manifestParents = new Pair<Integer, Integer>(c1Manifest.revisionIndex(), c2Manifest.revisionIndex());
+		TreeMap<Path, Nodeid> newManifestRevision = new TreeMap<Path, Nodeid>();
+		HashMap<Path, Pair<Integer, Integer>> fileParents = new HashMap<Path, Pair<Integer,Integer>>();
+		for (Path f : c1Manifest.files()) {
+			HgDataFile df = repo.getRepo().getFileNode(f);
+			Nodeid fileKnownRev1 = c1Manifest.nodeid(f), fileKnownRev2;
+			final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev1);
+			final int fileRevIndex2;
+			if ((fileKnownRev2 = c2Manifest.nodeid(f)) != null) {
+				// merged files
+				fileRevIndex2 = df.getRevisionIndex(fileKnownRev2);
+			} else {
+				fileRevIndex2 = NO_REVISION;
+			}
+				
+			fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex1, fileRevIndex2));
+			newManifestRevision.put(f, fileKnownRev1);
+		}
+		//
+		// Forget removed
+		for (Path p : removals) {
+			newManifestRevision.remove(p);
+		}
+		//
+		// Register new/changed
+		ArrayList<Path> newlyAddedFiles = new ArrayList<Path>();
+		ArrayList<Path> touchInDirstate = new ArrayList<Path>();
+		for (Pair<HgDataFile, ByteDataSupplier> e : files.values()) {
+			HgDataFile df = e.first();
+			Pair<Integer, Integer> fp = fileParents.get(df.getPath());
+			if (fp == null) {
+				// NEW FILE
+				fp = new Pair<Integer, Integer>(NO_REVISION, NO_REVISION);
+			}
+			ByteDataSupplier bds = e.second();
+			// FIXME quickfix, instead, pass ByteDataSupplier directly to RevlogStreamWriter
+			ByteBuffer bb = ByteBuffer.allocate(2048);
+			ByteArrayChannel bac = new ByteArrayChannel();
+			while (bds.read(bb) != -1) {
+				bb.flip();
+				bac.write(bb);
+				bb.clear();
+			}
+			RevlogStream contentStream;
+			if (df.exists()) {
+				contentStream = repo.getImplAccess().getStream(df);
+			} else {
+				contentStream = repo.createStoreFile(df.getPath());
+				newlyAddedFiles.add(df.getPath());
+				// FIXME df doesn't get df.content updated, and clients
+				// that would attempt to access newly added file after commit would fail
+				// (despite the fact the file is in there)
+			}
+			RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo, contentStream);
+			Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second());
+			newManifestRevision.put(df.getPath(), fileRev);
+			touchInDirstate.add(df.getPath());
+		}
+		//
+		// Manifest
+		final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder();
+		for (Map.Entry<Path, Nodeid> me : newManifestRevision.entrySet()) {
+			manifestBuilder.add(me.getKey().toString(), me.getValue());
+		}
+		RevlogStreamWriter manifestWriter = new RevlogStreamWriter(repo, repo.getImplAccess().getManifestStream());
+		Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder.build(), clogRevisionIndex, manifestParents.first(), manifestParents.second());
+		//
+		// Changelog
+		final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder();
+		changelogBuilder.setModified(files.keySet());
+		changelogBuilder.branch(branch == null ? HgRepository.DEFAULT_BRANCH_NAME : branch);
+		changelogBuilder.user(String.valueOf(user));
+		byte[] clogContent = changelogBuilder.build(manifestRev, message);
+		RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo, repo.getImplAccess().getChangelogStream());
+		Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit);
+		// FIXME move fncache update to an external facility, along with dirstate update
+		if (!newlyAddedFiles.isEmpty() && repo.fncacheInUse()) {
+			FNCacheFile fncache = new FNCacheFile(repo);
+			for (Path p : newlyAddedFiles) {
+				fncache.add(p);
+			}
+			try {
+				fncache.write();
+			} catch (IOException ex) {
+				// see comment above for fnchache.read()
+				repo.getSessionContext().getLog().dump(getClass(), Severity.Error, ex, "Failed to write fncache, error ignored");
+			}
+		}
+		// bring dirstate up to commit state
+		final DirstateBuilder dirstateBuilder = new DirstateBuilder(repo);
+		dirstateBuilder.fillFrom(new DirstateReader(repo, new Path.SimpleSource()));
+		for (Path p : removals) {
+			dirstateBuilder.recordRemoved(p);
+		}
+		for (Path p : touchInDirstate) {
+			dirstateBuilder.recordUncertain(p);
+		}
+		dirstateBuilder.parents(changesetRev, Nodeid.NULL);
+		dirstateBuilder.serialize();
+		return changesetRev;
+	}
+/*
+	private Pair<Integer, Integer> getManifestParents() {
+		return new Pair<Integer, Integer>(extractManifestRevisionIndex(p1Commit), extractManifestRevisionIndex(p2Commit));
+	}
+
+	private int extractManifestRevisionIndex(int clogRevIndex) {
+		if (clogRevIndex == NO_REVISION) {
+			return NO_REVISION;
+		}
+		RawChangeset commitObject = repo.getChangelog().range(clogRevIndex, clogRevIndex).get(0);
+		Nodeid manifestRev = commitObject.manifest();
+		if (manifestRev.isNull()) {
+			return NO_REVISION;
+		}
+		return repo.getManifest().getRevisionIndex(manifestRev);
+	}
+*/
+
+	// unlike DataAccess (which provides structured access), this one 
+	// deals with a sequence of bytes, when there's no need in structure of the data
+	// FIXME java.nio.ReadableByteChannel or ByteStream/ByteSequence(read, length, reset)
+	// SHALL be inline with util.ByteChannel, reading bytes from HgDataFile, preferably DataAccess#readBytes(BB) to match API,
+	// and a wrap for ByteVector
+	public interface ByteDataSupplier { // TODO look if can resolve DataAccess in HgCloneCommand visibility issue
+		// FIXME needs lifecycle, e.g. for supplier that reads from WC
+		int read(ByteBuffer buf);
+	}
+	
+	public interface ByteDataConsumer {
+		void write(ByteBuffer buf);
+	}
+}
--- a/src/org/tmatesoft/hg/internal/FileContentSupplier.java	Mon Apr 29 17:04:51 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/FileContentSupplier.java	Tue Apr 30 18:55:42 2013 +0200
@@ -24,7 +24,6 @@
 import java.nio.channels.FileChannel;
 
 import org.tmatesoft.hg.core.HgIOException;
-import org.tmatesoft.hg.repo.CommitFacility;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.util.Path;
 
--- a/src/org/tmatesoft/hg/internal/Internals.java	Mon Apr 29 17:04:51 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/Internals.java	Tue Apr 30 18:55:42 2013 +0200
@@ -32,11 +32,13 @@
 import org.tmatesoft.hg.core.SessionContext;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgInternals;
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
 import org.tmatesoft.hg.repo.HgRepoConfig.ExtensionsSection;
 import org.tmatesoft.hg.repo.HgRepository;
 import org.tmatesoft.hg.repo.HgRepositoryFiles;
 import org.tmatesoft.hg.repo.HgRepositoryLock;
 import org.tmatesoft.hg.repo.HgRuntimeException;
+import org.tmatesoft.hg.util.Path;
 import org.tmatesoft.hg.util.PathRewrite;
 
 /**
@@ -115,25 +117,29 @@
 	private final HgRepository repo;
 	private final File repoDir;
 	private final boolean isCaseSensitiveFileSystem;
-	private final boolean shallCacheRevlogsInRepo;
 	private final DataAccessProvider dataAccess;
+	private final ImplAccess implAccess;
 	
 	private final int requiresFlags;
 
 	private final PathRewrite dataPathHelper; // access to file storage area (usually under .hg/store/data/), with filenames mangled  
 	private final PathRewrite repoPathHelper; // access to system files (under .hg/store if requires has 'store' flag)
 
-	public Internals(HgRepository hgRepo, File hgDir) throws HgRuntimeException {
+	private final RevlogStreamFactory streamProvider;
+
+	public Internals(HgRepository hgRepo, File hgDir, ImplAccess implementationAccess) throws HgRuntimeException {
 		repo = hgRepo;
 		repoDir = hgDir;
+		implAccess = implementationAccess;
 		isCaseSensitiveFileSystem = !runningOnWindows();
 		SessionContext ctx = repo.getSessionContext();
-		shallCacheRevlogsInRepo = new PropertyMarshal(ctx).getBoolean(CFG_PROPERTY_REVLOG_STREAM_CACHE, true);
 		dataAccess = new DataAccessProvider(ctx);
 		RepoInitializer repoInit = new RepoInitializer().initRequiresFromFile(repoDir);
 		requiresFlags = repoInit.getRequires();
 		dataPathHelper = repoInit.buildDataFilesHelper(getSessionContext());
 		repoPathHelper = repoInit.buildStoreFilesHelper();
+		boolean shallCacheRevlogsInRepo = new PropertyMarshal(ctx).getBoolean(CFG_PROPERTY_REVLOG_STREAM_CACHE, true);
+		streamProvider = new RevlogStreamFactory(this, shallCacheRevlogsInRepo); 
 	}
 	
 	public boolean isInvalid() {
@@ -371,6 +377,9 @@
 		return configFile;
 	}
 
+	/*package-local*/ImplAccess getImplAccess() {
+		return implAccess;
+	}
 	
 	private static List<File> getWindowsConfigFilesPerInstall(File hgInstallDir) {
 		File f = new File(hgInstallDir, "Mercurial.ini");
@@ -454,11 +463,25 @@
 		// fallback to default, let calling code fail with Exception if can't write
 		return new File(System.getProperty("user.home"), ".hgrc");
 	}
+	
+	public RevlogStream createManifestStream() {
+		File manifestFile = getFileFromStoreDir("00manifest.i");
+		return streamProvider.create(manifestFile);
+	}
 
-	public boolean shallCacheRevlogs() {
-		return shallCacheRevlogsInRepo;
+	public RevlogStream createChangelogStream() {
+		File chlogFile = getFileFromStoreDir("00changelog.i");
+		return streamProvider.create(chlogFile);
+	}
+
+	public RevlogStream resolveStoreFile(Path path) {
+		return streamProvider.resolveStoreFile(path);
 	}
 	
+	/*package-local*/ RevlogStream createStoreFile(Path path) throws HgInvalidControlFileException {
+		return streamProvider.createStoreFile(path);
+	}
+
 	// marker method
 	public static IllegalStateException notImplemented() {
 		return new IllegalStateException("Not implemented");
@@ -496,4 +519,11 @@
 		assert ((long) i) == l : "Loss of data!";
 		return i;
 	}
+
+	// access implementation details (fields, methods) of oth.repo package
+	public interface ImplAccess {
+		public RevlogStream getStream(HgDataFile df);
+		public RevlogStream getManifestStream();
+		public RevlogStream getChangelogStream();
+	}
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/org/tmatesoft/hg/internal/RevlogStreamFactory.java	Tue Apr 30 18:55:42 2013 +0200
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2013 TMate Software Ltd
+ *  
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * For information on how to redistribute this software under
+ * the terms of a license other than GNU General Public License
+ * contact TMate Software at support@hg4j.com
+ */
+package org.tmatesoft.hg.internal;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.ref.SoftReference;
+import java.util.HashMap;
+
+import org.tmatesoft.hg.repo.HgInvalidControlFileException;
+import org.tmatesoft.hg.util.Path;
+
+/**
+ * Factory to create {@link RevlogStream RevlogStreams}, cache-capable.
+ *   
+ * @author Artem Tikhomirov
+ * @author TMate Software Ltd.
+ */
+public final class RevlogStreamFactory {
+	
+	private final Internals repo;
+	private final HashMap<Path, SoftReference<RevlogStream>> streamsCache;
+
+
+	public RevlogStreamFactory(Internals hgRepo, boolean shallCacheRevlogs) {
+		repo = hgRepo;
+		if (shallCacheRevlogs) {
+			streamsCache = new HashMap<Path, SoftReference<RevlogStream>>();
+		} else {
+			streamsCache = null;
+		}
+	}
+	
+	/**
+	 * Creates a stream for specified file, doesn't cache stream
+	 */
+	/*package-local*/ RevlogStream create(File f) {
+		return new RevlogStream(repo.getDataAccess(), f);
+	}
+
+	/**
+	 * Perhaps, should be separate interface, like ContentLookup
+	 * @param path - normalized file name
+	 * @return <code>null</code> if path doesn't resolve to a existing file
+	 */
+	/*package-local*/ RevlogStream resolveStoreFile(Path path) {
+		final SoftReference<RevlogStream> ref = shallCacheRevlogs() ? streamsCache.get(path) : null;
+		RevlogStream cached = ref == null ? null : ref.get();
+		if (cached != null) {
+			return cached;
+		}
+		File f = repo.getFileFromDataDir(path);
+		if (f.exists()) {
+			RevlogStream s = create(f);
+			if (shallCacheRevlogs()) {
+				streamsCache.put(path, new SoftReference<RevlogStream>(s));
+			}
+			return s;
+		}
+		return null;
+	}
+	
+	/*package-local*/ RevlogStream createStoreFile(Path path) throws HgInvalidControlFileException {
+		File f = repo.getFileFromDataDir(path);
+		try {
+			if (!f.exists()) {
+				f.getParentFile().mkdirs();
+				f.createNewFile();
+			}
+			RevlogStream s = create(f);
+			if (shallCacheRevlogs()) {
+				streamsCache.put(path, new SoftReference<RevlogStream>(s));
+			}
+			return s;
+		} catch (IOException ex) {
+			throw new HgInvalidControlFileException("Can't create a file in the storage", ex, f);
+		}
+	}
+
+	private boolean shallCacheRevlogs() {
+		return streamsCache != null;
+	}
+}
--- a/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java	Mon Apr 29 17:04:51 2013 +0200
+++ b/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java	Tue Apr 30 18:55:42 2013 +0200
@@ -44,11 +44,11 @@
 	private IntMap<Nodeid> revisionCache = new IntMap<Nodeid>(32);
 	private RevlogStream revlogStream;
 	
-	public RevlogStreamWriter(SessionContext ctx, RevlogStream stream) {
-		assert ctx != null;
+	public RevlogStreamWriter(SessionContext.Source ctxSource, RevlogStream stream) {
+		assert ctxSource != null;
 		assert stream != null;
 				
-		revlogDataZip = new RevlogCompressor(ctx);
+		revlogDataZip = new RevlogCompressor(ctxSource.getSessionContext());
 		revlogStream = stream;
 	}
 	
--- a/src/org/tmatesoft/hg/repo/CommitFacility.java	Mon Apr 29 17:04:51 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,250 +0,0 @@
-/*
- * Copyright (c) 2013 TMate Software Ltd
- *  
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; version 2 of the License.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- * GNU General Public License for more details.
- *
- * For information on how to redistribute this software under
- * the terms of a license other than GNU General Public License
- * contact TMate Software at support@hg4j.com
- */
-package org.tmatesoft.hg.repo;
-
-import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.tmatesoft.hg.core.HgCommitCommand;
-import org.tmatesoft.hg.core.HgIOException;
-import org.tmatesoft.hg.core.HgRepositoryLockException;
-import org.tmatesoft.hg.core.Nodeid;
-import org.tmatesoft.hg.internal.ByteArrayChannel;
-import org.tmatesoft.hg.internal.ChangelogEntryBuilder;
-import org.tmatesoft.hg.internal.DirstateBuilder;
-import org.tmatesoft.hg.internal.DirstateReader;
-import org.tmatesoft.hg.internal.Experimental;
-import org.tmatesoft.hg.internal.FNCacheFile;
-import org.tmatesoft.hg.internal.Internals;
-import org.tmatesoft.hg.internal.ManifestEntryBuilder;
-import org.tmatesoft.hg.internal.ManifestRevision;
-import org.tmatesoft.hg.internal.RevlogStream;
-import org.tmatesoft.hg.internal.RevlogStreamWriter;
-import org.tmatesoft.hg.util.Pair;
-import org.tmatesoft.hg.util.Path;
-import org.tmatesoft.hg.util.LogFacility.Severity;
-
-/**
- * WORK IN PROGRESS
- * Name: CommitObject, FutureCommit or PendingCommit
- * Only public API now: {@link HgCommitCommand}. TEMPORARILY lives in the oth.repo public packages, until code interdependencies are resolved
- * 
- * @author Artem Tikhomirov
- * @author TMate Software Ltd.
- */
-@Experimental(reason="Work in progress")
-public final class CommitFacility {
-	private final HgRepository repo;
-	private final int p1Commit, p2Commit;
-	private Map<Path, Pair<HgDataFile, ByteDataSupplier>> files = new LinkedHashMap<Path, Pair<HgDataFile, ByteDataSupplier>>();
-	private Set<Path> removals = new TreeSet<Path>();
-	private String branch, user;
-
-	public CommitFacility(HgRepository hgRepo, int parentCommit) {
-		this(hgRepo, parentCommit, NO_REVISION);
-	}
-	
-	public CommitFacility(HgRepository hgRepo, int parent1Commit, int parent2Commit) {
-		repo = hgRepo;
-		p1Commit = parent1Commit;
-		p2Commit = parent2Commit;
-		if (parent1Commit != NO_REVISION && parent1Commit == parent2Commit) {
-			throw new IllegalArgumentException("Merging same revision is dubious");
-		}
-	}
-
-	public boolean isMerge() {
-		return p1Commit != NO_REVISION && p2Commit != NO_REVISION;
-	}
-
-	public void add(HgDataFile dataFile, ByteDataSupplier content) {
-		if (content == null) {
-			throw new IllegalArgumentException();
-		}
-		removals.remove(dataFile.getPath());
-		files.put(dataFile.getPath(), new Pair<HgDataFile, ByteDataSupplier>(dataFile, content));
-	}
-
-	public void forget(HgDataFile dataFile) {
-		files.remove(dataFile.getPath());
-		removals.add(dataFile.getPath());
-	}
-	
-	public void branch(String branchName) {
-		branch = branchName;
-	}
-	
-	public void user(String userName) {
-		user = userName;
-	}
-	
-	public Nodeid commit(String message) throws HgIOException, HgRepositoryLockException {
-		
-		final HgChangelog clog = repo.getChangelog();
-		final int clogRevisionIndex = clog.getRevisionCount();
-		ManifestRevision c1Manifest = new ManifestRevision(null, null);
-		ManifestRevision c2Manifest = new ManifestRevision(null, null);
-		if (p1Commit != NO_REVISION) {
-			repo.getManifest().walk(p1Commit, p1Commit, c1Manifest);
-		}
-		if (p2Commit != NO_REVISION) {
-			repo.getManifest().walk(p2Commit, p2Commit, c2Manifest);
-		}
-//		Pair<Integer, Integer> manifestParents = getManifestParents();
-		Pair<Integer, Integer> manifestParents = new Pair<Integer, Integer>(c1Manifest.revisionIndex(), c2Manifest.revisionIndex());
-		TreeMap<Path, Nodeid> newManifestRevision = new TreeMap<Path, Nodeid>();
-		HashMap<Path, Pair<Integer, Integer>> fileParents = new HashMap<Path, Pair<Integer,Integer>>();
-		for (Path f : c1Manifest.files()) {
-			HgDataFile df = repo.getFileNode(f);
-			Nodeid fileKnownRev1 = c1Manifest.nodeid(f), fileKnownRev2;
-			final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev1);
-			final int fileRevIndex2;
-			if ((fileKnownRev2 = c2Manifest.nodeid(f)) != null) {
-				// merged files
-				fileRevIndex2 = df.getRevisionIndex(fileKnownRev2);
-			} else {
-				fileRevIndex2 = NO_REVISION;
-			}
-				
-			fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex1, fileRevIndex2));
-			newManifestRevision.put(f, fileKnownRev1);
-		}
-		//
-		// Forget removed
-		for (Path p : removals) {
-			newManifestRevision.remove(p);
-		}
-		//
-		// Register new/changed
-		ArrayList<Path> newlyAddedFiles = new ArrayList<Path>();
-		ArrayList<Path> touchInDirstate = new ArrayList<Path>();
-		for (Pair<HgDataFile, ByteDataSupplier> e : files.values()) {
-			HgDataFile df = e.first();
-			Pair<Integer, Integer> fp = fileParents.get(df.getPath());
-			if (fp == null) {
-				// NEW FILE
-				fp = new Pair<Integer, Integer>(NO_REVISION, NO_REVISION);
-			}
-			ByteDataSupplier bds = e.second();
-			// FIXME quickfix, instead, pass ByteDataSupplier directly to RevlogStreamWriter
-			ByteBuffer bb = ByteBuffer.allocate(2048);
-			ByteArrayChannel bac = new ByteArrayChannel();
-			while (bds.read(bb) != -1) {
-				bb.flip();
-				bac.write(bb);
-				bb.clear();
-			}
-			RevlogStream contentStream;
-			if (df.exists()) {
-				contentStream = df.content;
-			} else {
-				contentStream = repo.createStoreFile(df.getPath());
-				newlyAddedFiles.add(df.getPath());
-				// FIXME df doesn't get df.content updated, and clients
-				// that would attempt to access newly added file after commit would fail
-				// (despite the fact the file is in there)
-			}
-			RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo.getSessionContext(), contentStream);
-			Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second());
-			newManifestRevision.put(df.getPath(), fileRev);
-			touchInDirstate.add(df.getPath());
-		}
-		//
-		// Manifest
-		final ManifestEntryBuilder manifestBuilder = new ManifestEntryBuilder();
-		for (Map.Entry<Path, Nodeid> me : newManifestRevision.entrySet()) {
-			manifestBuilder.add(me.getKey().toString(), me.getValue());
-		}
-		RevlogStreamWriter manifestWriter = new RevlogStreamWriter(repo.getSessionContext(), repo.getManifest().content);
-		Nodeid manifestRev = manifestWriter.addRevision(manifestBuilder.build(), clogRevisionIndex, manifestParents.first(), manifestParents.second());
-		//
-		// Changelog
-		final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder();
-		changelogBuilder.setModified(files.keySet());
-		changelogBuilder.branch(branch == null ? HgRepository.DEFAULT_BRANCH_NAME : branch);
-		changelogBuilder.user(String.valueOf(user));
-		byte[] clogContent = changelogBuilder.build(manifestRev, message);
-		RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo.getSessionContext(), clog.content);
-		Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit);
-		// FIXME move fncache update to an external facility, along with dirstate update
-		if (!newlyAddedFiles.isEmpty() && repo.getImplHelper().fncacheInUse()) {
-			FNCacheFile fncache = new FNCacheFile(repo.getImplHelper());
-			for (Path p : newlyAddedFiles) {
-				fncache.add(p);
-			}
-			try {
-				fncache.write();
-			} catch (IOException ex) {
-				// see comment above for fnchache.read()
-				repo.getSessionContext().getLog().dump(getClass(), Severity.Error, ex, "Failed to write fncache, error ignored");
-			}
-		}
-		// bring dirstate up to commit state
-		Internals implRepo = Internals.getInstance(repo);
-		final DirstateBuilder dirstateBuilder = new DirstateBuilder(implRepo);
-		dirstateBuilder.fillFrom(new DirstateReader(implRepo, new Path.SimpleSource()));
-		for (Path p : removals) {
-			dirstateBuilder.recordRemoved(p);
-		}
-		for (Path p : touchInDirstate) {
-			dirstateBuilder.recordUncertain(p);
-		}
-		dirstateBuilder.parents(changesetRev, Nodeid.NULL);
-		dirstateBuilder.serialize();
-		return changesetRev;
-	}
-/*
-	private Pair<Integer, Integer> getManifestParents() {
-		return new Pair<Integer, Integer>(extractManifestRevisionIndex(p1Commit), extractManifestRevisionIndex(p2Commit));
-	}
-
-	private int extractManifestRevisionIndex(int clogRevIndex) {
-		if (clogRevIndex == NO_REVISION) {
-			return NO_REVISION;
-		}
-		RawChangeset commitObject = repo.getChangelog().range(clogRevIndex, clogRevIndex).get(0);
-		Nodeid manifestRev = commitObject.manifest();
-		if (manifestRev.isNull()) {
-			return NO_REVISION;
-		}
-		return repo.getManifest().getRevisionIndex(manifestRev);
-	}
-*/
-
-	// unlike DataAccess (which provides structured access), this one 
-	// deals with a sequence of bytes, when there's no need in structure of the data
-	// FIXME java.nio.ReadableByteChannel or ByteStream/ByteSequence(read, length, reset)
-	// SHALL be inline with util.ByteChannel, reading bytes from HgDataFile, preferably DataAccess#readBytes(BB) to match API,
-	// and a wrap for ByteVector
-	public interface ByteDataSupplier { // TODO look if can resolve DataAccess in HgCloneCommand visibility issue
-		// FIXME needs lifecycle, e.g. for supplier that reads from WC
-		int read(ByteBuffer buf);
-	}
-	
-	public interface ByteDataConsumer {
-		void write(ByteBuffer buf);
-	}
-}
--- a/src/org/tmatesoft/hg/repo/HgRepository.java	Mon Apr 29 17:04:51 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/HgRepository.java	Tue Apr 30 18:55:42 2013 +0200
@@ -23,11 +23,9 @@
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.StringReader;
-import java.lang.ref.SoftReference;
 import java.nio.CharBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 
 import org.tmatesoft.hg.core.Nodeid;
@@ -94,7 +92,6 @@
 	 */
 	public static final String DEFAULT_BRANCH_NAME = "default";
 
-	private final File repoDir; // .hg folder
 	private final File workingDir; // .hg/../
 	private final String repoLocation;
 	/*
@@ -114,9 +111,6 @@
 	private HgBookmarks bookmarks;
 	private HgExtensionsManager extManager;
 
-	// XXX perhaps, shall enable caching explicitly
-	private final HashMap<Path, SoftReference<RevlogStream>> streamsCache = new HashMap<Path, SoftReference<RevlogStream>>();
-	
 	private final org.tmatesoft.hg.internal.Internals impl;
 	private HgIgnore ignore;
 	private HgRepoConfig repoConfig;
@@ -130,7 +124,6 @@
 
 	
 	HgRepository(String repositoryPath) {
-		repoDir = null;
 		workingDir = null;
 		repoLocation = repositoryPath;
 		normalizePath = null;
@@ -146,14 +139,24 @@
 		assert repositoryPath != null; 
 		assert repositoryRoot != null;
 		assert ctx != null;
-		repoDir = repositoryRoot;
-		workingDir = repoDir.getParentFile();
+		workingDir = repositoryRoot.getParentFile();
 		if (workingDir == null) {
-			throw new IllegalArgumentException(repoDir.toString());
+			throw new IllegalArgumentException(repositoryRoot.toString());
 		}
 		repoLocation = repositoryPath;
 		sessionContext = ctx;
-		impl = new org.tmatesoft.hg.internal.Internals(this, repositoryRoot);
+		impl = new Internals(this, repositoryRoot, new Internals.ImplAccess() {
+			
+			public RevlogStream getStream(HgDataFile df) {
+				return df.content;
+			}
+			public RevlogStream getManifestStream() {
+				return HgRepository.this.getManifest().content;
+			}
+			public RevlogStream getChangelogStream() {
+				return HgRepository.this.getChangelog().content;
+			}
+		});
 		normalizePath = impl.buildNormalizePathRewrite(); 
 	}
 
@@ -174,7 +177,7 @@
 	 * @return repository location information, never <code>null</code>
 	 */
 	public String getLocation() {
-		return repoLocation;
+		return repoLocation; // XXX field to keep this is bit too much 
 	}
 
 	public boolean isInvalid() {
@@ -183,8 +186,7 @@
 	
 	public HgChangelog getChangelog() {
 		if (changelog == null) {
-			File chlogFile = impl.getFileFromStoreDir("00changelog.i");
-			RevlogStream content = new RevlogStream(impl.getDataAccess(), chlogFile);
+			RevlogStream content = impl.createChangelogStream();
 			changelog = new HgChangelog(this, content);
 		}
 		return changelog;
@@ -192,8 +194,7 @@
 	
 	public HgManifest getManifest() {
 		if (manifest == null) {
-			File manifestFile = impl.getFileFromStoreDir("00manifest.i");
-			RevlogStream content = new RevlogStream(impl.getDataAccess(), manifestFile);
+			RevlogStream content = impl.createManifestStream();
 			manifest = new HgManifest(this, content, impl.buildFileNameEncodingHelper());
 		}
 		return manifest;
@@ -271,7 +272,7 @@
 	}
 
 	public HgDataFile getFileNode(Path path) {
-		RevlogStream content = resolveStoreFile(path);
+		RevlogStream content = impl.resolveStoreFile(path);
 		if (content == null) {
 			return new HgDataFile(this, path);
 		}
@@ -486,45 +487,6 @@
 	public SessionContext getSessionContext() {
 		return sessionContext;
 	}
-
-	/**
-	 * Perhaps, should be separate interface, like ContentLookup
-	 * @param path - normalized file name
-	 * @return <code>null</code> if path doesn't resolve to a existing file
-	 */
-	/*package-local*/ RevlogStream resolveStoreFile(Path path) {
-		final SoftReference<RevlogStream> ref = streamsCache.get(path);
-		RevlogStream cached = ref == null ? null : ref.get();
-		if (cached != null) {
-			return cached;
-		}
-		File f = impl.getFileFromDataDir(path);
-		if (f.exists()) {
-			RevlogStream s = new RevlogStream(impl.getDataAccess(), f);
-			if (impl.shallCacheRevlogs()) {
-				streamsCache.put(path, new SoftReference<RevlogStream>(s));
-			}
-			return s;
-		}
-		return null;
-	}
-	
-	/*package-local*/ RevlogStream createStoreFile(Path path) throws HgInvalidControlFileException {
-		File f = impl.getFileFromDataDir(path);
-		try {
-			if (!f.exists()) {
-				f.getParentFile().mkdirs();
-				f.createNewFile();
-			}
-			RevlogStream s = new RevlogStream(impl.getDataAccess(), f);
-			if (impl.shallCacheRevlogs()) {
-				streamsCache.put(path, new SoftReference<RevlogStream>(s));
-			}
-			return s;
-		} catch (IOException ex) {
-			throw new HgInvalidControlFileException("Can't create a file in the storage", ex, f);
-		}
-	}
 	
 	/*package-local*/ List<Filter> getFiltersFromRepoToWorkingDir(Path p) {
 		return instantiateFilters(p, new Filter.Options(Filter.Direction.FromRepo));
--- a/test/org/tmatesoft/hg/test/TestCommit.java	Mon Apr 29 17:04:51 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestCommit.java	Tue Apr 30 18:55:42 2013 +0200
@@ -35,8 +35,9 @@
 import org.tmatesoft.hg.core.HgStatusCommand;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.ByteArrayChannel;
+import org.tmatesoft.hg.internal.CommitFacility;
 import org.tmatesoft.hg.internal.FileContentSupplier;
-import org.tmatesoft.hg.repo.CommitFacility;
+import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.repo.HgDataFile;
 import org.tmatesoft.hg.repo.HgLookup;
 import org.tmatesoft.hg.repo.HgRepository;
@@ -62,7 +63,7 @@
 		new ExecHelper(new OutputParser.Stub(), repoLoc).run("hg", "commit", "--addremove", "-m", "FIRST");
 		//
 		HgRepository hgRepo = new HgLookup().detect(repoLoc);
-		CommitFacility cf = new CommitFacility(hgRepo, 0);
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), 0);
 		// FIXME test diff for processing changed newlines (ie \r\n -> \n or vice verse) - if a whole line or 
 		// just changed endings are in the patch!
 		HgDataFile df = hgRepo.getFileNode("file1");
@@ -90,7 +91,7 @@
 		//
 		HgRepository hgRepo = new HgLookup().detect(repoLoc);
 		assertEquals("[sanity]", 0, new HgLogCommand(hgRepo).execute().size());
-		CommitFacility cf = new CommitFacility(hgRepo, NO_REVISION);
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), NO_REVISION);
 		HgDataFile df = hgRepo.getFileNode(fname);
 		final byte[] initialContent = "hello\nworld".getBytes();
 		cf.add(df, new ByteArraySupplier(initialContent));
@@ -125,7 +126,7 @@
 		assertEquals("[sanity]", DEFAULT_BRANCH_NAME, parentCset.getBranch());
 		//
 		RepoUtils.modifyFileAppend(fileD, "A CHANGE\n");
-		CommitFacility cf = new CommitFacility(hgRepo, parentCsetRevIndex);
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), parentCsetRevIndex);
 		FileContentSupplier contentProvider = new FileContentSupplier(fileD);
 		cf.add(dfD, contentProvider);
 		cf.branch("branch1");
@@ -155,7 +156,7 @@
 		assertTrue("[sanity]", new File(repoLoc, "d").canRead());
 		RepoUtils.createFile(new File(repoLoc, "xx"), "xyz");
 		new HgAddRemoveCommand(hgRepo).add(Path.create("xx")).remove(Path.create("d")).execute();
-		CommitFacility cf = new CommitFacility(hgRepo, hgRepo.getChangelog().getLastRevision());
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), hgRepo.getChangelog().getLastRevision());
 		FileContentSupplier contentProvider = new FileContentSupplier(new File(repoLoc, "xx"));
 		cf.add(hgRepo.getFileNode("xx"), contentProvider);
 		cf.forget(hgRepo.getFileNode("d"));
@@ -191,7 +192,7 @@
 		//
 		RepoUtils.modifyFileAppend(fileD, " 1 \n");
 		final int parentCsetRevIndex = hgRepo.getChangelog().getLastRevision();
-		CommitFacility cf = new CommitFacility(hgRepo, parentCsetRevIndex);
+		CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), parentCsetRevIndex);
 		FileContentSupplier contentProvider = new FileContentSupplier(fileD);
 		cf.add(dfD, contentProvider);
 		cf.branch("branch1");