# HG changeset patch # User Artem Tikhomirov # Date 1361818100 -3600 # Node ID 6ca3d0c5b4bc64ec66f723a255fa87133c8a026e # Parent 154718ae23edb4714c07a10cda4ae5145e32d119 Commit: tests and fixes for defects discovered diff -r 154718ae23ed -r 6ca3d0c5b4bc src/org/tmatesoft/hg/core/HgLogCommand.java --- a/src/org/tmatesoft/hg/core/HgLogCommand.java Mon Feb 25 18:41:44 2013 +0100 +++ b/src/org/tmatesoft/hg/core/HgLogCommand.java Mon Feb 25 19:48:20 2013 +0100 @@ -298,10 +298,13 @@ if (csetTransform != null) { throw new ConcurrentModificationException(); } + if (repo.getChangelog().getRevisionCount() == 0) { + return; + } final int lastCset = endRev == TIP ? repo.getChangelog().getLastRevision() : endRev; // XXX pretty much like HgInternals.checkRevlogRange if (lastCset < 0 || lastCset > repo.getChangelog().getLastRevision()) { - throw new HgBadArgumentException(String.format("Bad value %d for end revision", endRev), null); + throw new HgBadArgumentException(String.format("Bad value %d for end revision", lastCset), null); } if (startRev < 0 || startRev > lastCset) { throw new HgBadArgumentException(String.format("Bad value %d for start revision for range [%1$d..%d]", startRev, lastCset), null); diff -r 154718ae23ed -r 6ca3d0c5b4bc src/org/tmatesoft/hg/internal/FNCacheFile.java --- a/src/org/tmatesoft/hg/internal/FNCacheFile.java Mon Feb 25 18:41:44 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/FNCacheFile.java Mon Feb 25 19:48:20 2013 +0100 @@ -21,27 +21,35 @@ import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; +import java.util.List; import org.tmatesoft.hg.util.Path; /** + * Append-only fncache support + * *
* The fncache file contains the paths of all filelog files in the store as encoded by mercurial.filelog.encodedir. The paths are separated by '\n' (LF). *
* @see http://mercurial.selenic.com/wiki/fncacheRepoFormat + * + * * @author Artem Tikhomirov * @author TMate Software Ltd. */ public class FNCacheFile { private final Internals repo; - private final ArrayList files; +// private final List files; + private List added; public FNCacheFile(Internals internalRepo) { repo = internalRepo; - files = new ArrayList(); +// files = new ArrayList(); } + /* + * For append-only option, we don't care reading the original content public void read(Path.Source pathFactory) throws IOException { File f = fncacheFile(); files.clear(); @@ -52,20 +60,22 @@ // names in fncache are in local encoding, shall translate to unicode new LineReader(f, repo.getSessionContext().getLog(), repo.getFilenameEncoding()).read(new LineReader.SimpleLineCollector(), entries); for (String e : entries) { + // FIXME plain wrong, need either to decode paths and strip off .i/.d or (if keep names as is) change write() files.add(pathFactory.path(e)); } } + */ public void write() throws IOException { - if (files.isEmpty()) { + if (added == null || added.isEmpty()) { return; } File f = fncacheFile(); f.getParentFile().mkdirs(); final Charset filenameEncoding = repo.getFilenameEncoding(); - FileOutputStream fncacheFile = new FileOutputStream(f); - for (Path p : files) { - String s = "data/" + p.toString() + ".i"; // TODO post-1.0 this is plain wrong. (a) likely need .d files, too; (b) what about dh/ location? + FileOutputStream fncacheFile = new FileOutputStream(f, true); + for (Path p : added) { + String s = "data/" + p.toString() + ".i"; // TODO post-1.0 this is plain wrong. (a) need .d files, too; (b) what about dh/ location? fncacheFile.write(s.getBytes(filenameEncoding)); fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat } @@ -73,7 +83,10 @@ } public void add(Path p) { - files.add(p); + if (added == null) { + added = new ArrayList(); + } + added.add(p); } private File fncacheFile() { diff -r 154718ae23ed -r 6ca3d0c5b4bc src/org/tmatesoft/hg/internal/Internals.java --- a/src/org/tmatesoft/hg/internal/Internals.java Mon Feb 25 18:41:44 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/Internals.java Mon Feb 25 19:48:20 2013 +0100 @@ -202,6 +202,10 @@ return new EncodingHelper(getFilenameEncoding(), repo.getSessionContext()); } + public boolean fncacheInUse() { + return (getRequiresFlags() & RequiresFile.FNCACHE) != 0; + } + /*package-local*/ Charset getFilenameEncoding() { return getFileEncoding(getSessionContext()); } diff -r 154718ae23ed -r 6ca3d0c5b4bc src/org/tmatesoft/hg/internal/RevlogStream.java --- a/src/org/tmatesoft/hg/internal/RevlogStream.java Mon Feb 25 18:41:44 2013 +0100 +++ b/src/org/tmatesoft/hg/internal/RevlogStream.java Mon Feb 25 19:48:20 2013 +0100 @@ -348,6 +348,7 @@ if (inline && indexRecordOffset != null) { assert indexRecordOffset.length == revisionIndex; int[] indexRecordOffsetCopy = new int[indexRecordOffset.length + 1]; + System.arraycopy(indexRecordOffset, 0, indexRecordOffsetCopy, 0, indexRecordOffset.length); indexRecordOffsetCopy[indexRecordOffset.length] = offsetFieldToInlineFileOffset(revisionOffset, revisionIndex); indexRecordOffset = indexRecordOffsetCopy; } diff -r 154718ae23ed -r 6ca3d0c5b4bc src/org/tmatesoft/hg/repo/CommitFacility.java --- a/src/org/tmatesoft/hg/repo/CommitFacility.java Mon Feb 25 18:41:44 2013 +0100 +++ b/src/org/tmatesoft/hg/repo/CommitFacility.java Mon Feb 25 19:48:20 2013 +0100 @@ -24,7 +24,9 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Set; import java.util.TreeMap; +import java.util.TreeSet; import org.tmatesoft.hg.core.HgRepositoryLockException; import org.tmatesoft.hg.core.Nodeid; @@ -48,11 +50,12 @@ * @author TMate Software Ltd. */ @Experimental(reason="Work in progress") -public class CommitFacility { +public final class CommitFacility { private final HgRepository repo; private final int p1Commit, p2Commit; private Map> files = new LinkedHashMap>(); - + private Set removals = new TreeSet(); + private String branch; public CommitFacility(HgRepository hgRepo, int parentCommit) { this(hgRepo, parentCommit, NO_REVISION); @@ -72,8 +75,21 @@ } public void add(HgDataFile dataFile, ByteDataSupplier content) { + if (content == null) { + throw new IllegalArgumentException(); + } + removals.remove(dataFile.getPath()); files.put(dataFile.getPath(), new Pair(dataFile, content)); } + + public void forget(HgDataFile dataFile) { + files.remove(dataFile.getPath()); + removals.add(dataFile.getPath()); + } + + public void branch(String branchName) { + branch = branchName; + } public Nodeid commit(String message) throws HgRepositoryLockException { @@ -87,37 +103,32 @@ if (p2Commit != NO_REVISION) { repo.getManifest().walk(p2Commit, p2Commit, c2Manifest); } - FNCacheFile fncache = null; - if ((repo.getImplHelper().getRequiresFlags() & RequiresFile.FNCACHE) != 0) { - fncache = new FNCacheFile(repo.getImplHelper()); - try { - fncache.read(new Path.SimpleSource()); - } catch (IOException ex) { - // fncache may be restored using native client, so do not treat failure to read it as severe enough to stop - repo.getSessionContext().getLog().dump(getClass(), Severity.Error, ex, "Failed to read fncache, attempt commit nevertheless"); - } - } // Pair manifestParents = getManifestParents(); Pair manifestParents = new Pair(c1Manifest.revisionIndex(), c2Manifest.revisionIndex()); TreeMap newManifestRevision = new TreeMap(); HashMap> fileParents = new HashMap>(); for (Path f : c1Manifest.files()) { HgDataFile df = repo.getFileNode(f); - Nodeid fileKnownRev = c1Manifest.nodeid(f); - final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev); + Nodeid fileKnownRev1 = c1Manifest.nodeid(f), fileKnownRev2; + final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev1); final int fileRevIndex2; - if ((fileKnownRev = c2Manifest.nodeid(f)) != null) { + if ((fileKnownRev2 = c2Manifest.nodeid(f)) != null) { // merged files - fileRevIndex2 = df.getRevisionIndex(fileKnownRev); + fileRevIndex2 = df.getRevisionIndex(fileKnownRev2); } else { fileRevIndex2 = NO_REVISION; } fileParents.put(f, new Pair(fileRevIndex1, fileRevIndex2)); - newManifestRevision.put(f, fileKnownRev); + newManifestRevision.put(f, fileKnownRev1); } // - // Files + // Forget removed + for (Path p : removals) { + newManifestRevision.remove(p); + } + // + // Register new/changed ArrayList newlyAddedFiles = new ArrayList(); for (Pair e : files.values()) { HgDataFile df = e.first(); @@ -161,10 +172,13 @@ // Changelog final ChangelogEntryBuilder changelogBuilder = new ChangelogEntryBuilder(); changelogBuilder.setModified(files.keySet()); + changelogBuilder.branch(branch == null ? HgRepository.DEFAULT_BRANCH_NAME : branch); byte[] clogContent = changelogBuilder.build(manifestRev, message); RevlogStreamWriter changelogWriter = new RevlogStreamWriter(repo.getSessionContext(), clog.content); Nodeid changesetRev = changelogWriter.addRevision(clogContent, clogRevisionIndex, p1Commit, p2Commit); - if (!newlyAddedFiles.isEmpty() && fncache != null) { + // FIXME move fncache update to an external facility, along with dirstate update + if (!newlyAddedFiles.isEmpty() && repo.getImplHelper().fncacheInUse()) { + FNCacheFile fncache = new FNCacheFile(repo.getImplHelper()); for (Path p : newlyAddedFiles) { fncache.add(p); } @@ -198,6 +212,7 @@ // unlike DataAccess (which provides structured access), this one // deals with a sequence of bytes, when there's no need in structure of the data public interface ByteDataSupplier { // TODO look if can resolve DataAccess in HgCloneCommand visibility issue + // FIXME needs lifecycle, e.g. for supplier that reads from WC int read(ByteBuffer buf); } diff -r 154718ae23ed -r 6ca3d0c5b4bc test/org/tmatesoft/hg/test/RepoUtils.java --- a/test/org/tmatesoft/hg/test/RepoUtils.java Mon Feb 25 18:41:44 2013 +0100 +++ b/test/org/tmatesoft/hg/test/RepoUtils.java Mon Feb 25 19:48:20 2013 +0100 @@ -22,6 +22,7 @@ import java.io.File; import java.io.FileOutputStream; +import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; @@ -60,7 +61,8 @@ File testRepoLoc = createEmptyDir(name); ExecHelper eh = new ExecHelper(new OutputParser.Stub(), testRepoLoc.getParentFile()); ArrayList cmd = new ArrayList(); - cmd.add("hg"); cmd.add("clone"); + cmd.add("hg"); + cmd.add("clone"); if (noupdate) { cmd.add("--noupdate"); } @@ -71,11 +73,33 @@ return testRepoLoc; } - static void modifyFileAppend(File f) throws IOException { + static void modifyFileAppend(File f, Object content) throws IOException { assertTrue(f.isFile()); FileOutputStream fos = new FileOutputStream(f, true); - fos.write("XXX".getBytes()); + if (content == null) { + content = "XXX".getBytes(); + } + if (content instanceof byte[]) { + fos.write((byte[]) content); + } else { + fos.write(String.valueOf(content).getBytes()); + } fos.close(); } + static void createFile(File f, Object content) throws IOException { + if (content == null) { + f.createNewFile(); + return; + } + if (content instanceof byte[]) { + FileOutputStream fos = new FileOutputStream(f); + fos.write((byte[]) content); + fos.close(); + } else { + FileWriter fw = new FileWriter(f); + fw.write(String.valueOf(content)); + fw.close(); + } + } } diff -r 154718ae23ed -r 6ca3d0c5b4bc test/org/tmatesoft/hg/test/TestAddRemove.java --- a/test/org/tmatesoft/hg/test/TestAddRemove.java Mon Feb 25 18:41:44 2013 +0100 +++ b/test/org/tmatesoft/hg/test/TestAddRemove.java Mon Feb 25 19:48:20 2013 +0100 @@ -17,10 +17,9 @@ package org.tmatesoft.hg.test; import static org.junit.Assert.assertEquals; +import static org.tmatesoft.hg.test.RepoUtils.createFile; import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; import org.junit.Rule; import org.junit.Test; @@ -86,11 +85,4 @@ eh.run("hg", "status", "-A"); assertEquals(2, statusParser.getRemoved().size()); } - - private static void createFile(File f, Object content) throws IOException { - FileOutputStream fos = new FileOutputStream(f, true); - fos.write(String.valueOf(content).getBytes()); - fos.close(); - } - } diff -r 154718ae23ed -r 6ca3d0c5b4bc test/org/tmatesoft/hg/test/TestCommit.java --- a/test/org/tmatesoft/hg/test/TestCommit.java Mon Feb 25 18:41:44 2013 +0100 +++ b/test/org/tmatesoft/hg/test/TestCommit.java Mon Feb 25 19:48:20 2013 +0100 @@ -16,18 +16,36 @@ */ package org.tmatesoft.hg.test; +import static org.junit.Assert.*; +import static org.tmatesoft.hg.repo.HgRepository.*; +import static org.tmatesoft.hg.repo.HgRepository.DEFAULT_BRANCH_NAME; import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; import java.io.File; +import java.io.FileInputStream; import java.io.FileWriter; +import java.io.IOException; import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.util.List; +import org.hamcrest.CoreMatchers; import org.junit.Test; +import org.tmatesoft.hg.core.HgAddRemoveCommand; +import org.tmatesoft.hg.core.HgCatCommand; +import org.tmatesoft.hg.core.HgChangeset; +import org.tmatesoft.hg.core.HgLogCommand; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.ByteArrayChannel; import org.tmatesoft.hg.repo.CommitFacility; +import org.tmatesoft.hg.repo.HgDataFile; import org.tmatesoft.hg.repo.HgLookup; import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; /** + * Handy for debug to see patch content: + * ...RevlogDump /tmp/test-commit2non-empty/.hg/ store/data/file1.i dumpData * * @author Artem Tikhomirov * @author TMate Software Ltd. @@ -37,31 +55,181 @@ @Test public void testCommitToNonEmpty() throws Exception { File repoLoc = RepoUtils.initEmptyTempRepo("test-commit2non-empty"); - FileWriter fw = new FileWriter(new File(repoLoc, "file1")); - fw.write("hello"); - fw.close(); - new ExecHelper(new OutputParser.Stub(true), repoLoc).run("hg", "commit", "--addremove", "-m", "FIRST"); + RepoUtils.createFile(new File(repoLoc, "file1"), "hello\n"); + new ExecHelper(new OutputParser.Stub(), repoLoc).run("hg", "commit", "--addremove", "-m", "FIRST"); // HgRepository hgRepo = new HgLookup().detect(repoLoc); CommitFacility cf = new CommitFacility(hgRepo, 0); - // FIXME test diff for processing changed newlines - if a whole line or just changed endings are in the patch! - cf.add(hgRepo.getFileNode("file1"), new ByteArraySupplier("hello\nworld".getBytes())); - cf.commit("SECOND"); - // /tmp/test-commit2non-empty/.hg/ store/data/file1.i dumpData + // FIXME test diff for processing changed newlines (ie \r\n -> \n or vice verse) - if a whole line or + // just changed endings are in the patch! + HgDataFile df = hgRepo.getFileNode("file1"); + cf.add(df, new ByteArraySupplier("hello\nworld".getBytes())); + Nodeid secondRev = cf.commit("SECOND"); + // + List commits = new HgLogCommand(hgRepo).execute(); + assertEquals(2, commits.size()); + HgChangeset c1 = commits.get(0); + HgChangeset c2 = commits.get(1); + assertEquals("FIRST", c1.getComment()); + assertEquals("SECOND", c2.getComment()); + assertEquals(df.getPath(), c2.getAffectedFiles().get(0)); + assertEquals(c1.getNodeid(), c2.getFirstParentRevision()); + assertEquals(Nodeid.NULL, c2.getSecondParentRevision()); + assertEquals(secondRev, c2.getNodeid()); } @Test public void testCommitToEmpty() throws Exception { File repoLoc = RepoUtils.initEmptyTempRepo("test-commit2empty"); - FileWriter fw = new FileWriter(new File(repoLoc, "file1")); - fw.write("hello"); - fw.close(); + String fname = "file1"; + RepoUtils.createFile(new File(repoLoc, fname), null); + new ExecHelper(new OutputParser.Stub(), repoLoc).run("hg", "add", fname); // HgRepository hgRepo = new HgLookup().detect(repoLoc); + assertEquals("[sanity]", 0, new HgLogCommand(hgRepo).execute().size()); CommitFacility cf = new CommitFacility(hgRepo, NO_REVISION); - // FIXME test diff for processing changed newlines - if a whole line or just changed endings are in the patch! - cf.add(hgRepo.getFileNode("file1"), new ByteArraySupplier("hello\nworld".getBytes())); - cf.commit("commit 1"); + HgDataFile df = hgRepo.getFileNode(fname); + final byte[] initialContent = "hello\nworld".getBytes(); + cf.add(df, new ByteArraySupplier(initialContent)); + String comment = "commit 1"; + Nodeid c1Rev = cf.commit(comment); + List commits = new HgLogCommand(hgRepo).execute(); + assertEquals(1, commits.size()); + HgChangeset c1 = commits.get(0); + assertEquals(1, c1.getAffectedFiles().size()); + assertEquals(df.getPath(), c1.getAffectedFiles().get(0)); + assertEquals(0, c1.getRevisionIndex()); + assertEquals(Nodeid.NULL, c1.getFirstParentRevision()); + assertEquals(Nodeid.NULL, c1.getSecondParentRevision()); + assertEquals(HgRepository.DEFAULT_BRANCH_NAME, c1.getBranch()); + assertEquals(comment, c1.getComment()); + assertEquals(c1Rev, c1.getNodeid()); + ByteArrayChannel bac = new ByteArrayChannel(); + new HgCatCommand(hgRepo).file(df.getPath()).execute(bac); + assertArrayEquals(initialContent, bac.toArray()); + } + + @Test + public void testCommitIntoBranch() throws Exception { + File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-add-remove-commit", false); + HgRepository hgRepo = new HgLookup().detect(repoLoc); + HgDataFile dfD = hgRepo.getFileNode("d"); + assertTrue("[sanity]", dfD.exists()); + File fileD = new File(repoLoc, "d"); + assertTrue("[sanity]", fileD.canRead()); + final int parentCsetRevIndex = hgRepo.getChangelog().getLastRevision(); + HgChangeset parentCset = new HgLogCommand(hgRepo).range(parentCsetRevIndex, parentCsetRevIndex).execute().get(0); + assertEquals("[sanity]", DEFAULT_BRANCH_NAME, parentCset.getBranch()); + // + RepoUtils.modifyFileAppend(fileD, "A CHANGE\n"); + CommitFacility cf = new CommitFacility(hgRepo, parentCsetRevIndex); + FileContentSupplier contentProvider = new FileContentSupplier(fileD); + cf.add(dfD, contentProvider); + cf.branch("branch1"); + Nodeid commitRev1 = cf.commit("FIRST"); + contentProvider.done(); + // + // FIXME requirement to reload repository is disgusting + hgRepo = new HgLookup().detect(repoLoc); + List commits = new HgLogCommand(hgRepo).range(parentCsetRevIndex+1, TIP).execute(); + assertEquals(1, commits.size()); + HgChangeset c1 = commits.get(0); + assertEquals(c1.getNodeid(), commitRev1); + assertEquals("branch1", c1.getBranch()); + assertEquals("FIRST", c1.getComment()); + // + assertHgVerifyOk(repoLoc); + } + + /** + * use own add and remove commands and then commit + */ + @Test + public void testCommitWithAddRemove() throws Exception { + File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-add-remove-commit", false); + HgRepository hgRepo = new HgLookup().detect(repoLoc); + assertTrue("[sanity]", hgRepo.getFileNode("d").exists()); + assertTrue("[sanity]", new File(repoLoc, "d").canRead()); + RepoUtils.createFile(new File(repoLoc, "xx"), "xyz"); + new HgAddRemoveCommand(hgRepo).add(Path.create("xx")).remove(Path.create("d")).execute(); + CommitFacility cf = new CommitFacility(hgRepo, hgRepo.getChangelog().getLastRevision()); + FileContentSupplier contentProvider = new FileContentSupplier(new File(repoLoc, "xx")); + cf.add(hgRepo.getFileNode("xx"), contentProvider); + cf.forget(hgRepo.getFileNode("d")); + Nodeid commitRev = cf.commit("Commit with add/remove cmd"); + contentProvider.done(); + // Note, working directory still points to original revision, CommitFacility doesn't update dirstate + // + // FIXME requirement to reload repository is disgusting + hgRepo = new HgLookup().detect(repoLoc); + List commits = new HgLogCommand(hgRepo).changeset(commitRev).execute(); + HgChangeset cmt = commits.get(0); + assertEquals(1, cmt.getAddedFiles().size()); + assertEquals("xx", cmt.getAddedFiles().get(0).getPath().toString()); + assertEquals(1, cmt.getRemovedFiles().size()); + assertEquals("d", cmt.getRemovedFiles().get(0).toString()); + ByteArrayChannel sink = new ByteArrayChannel(); + new HgCatCommand(hgRepo).file(Path.create("xx")).changeset(commitRev).execute(sink); + assertArrayEquals("xyz".getBytes(), sink.toArray()); + // + assertHgVerifyOk(repoLoc); + } + /** + * perform few commits one by one, into different branches + */ + @Test + public void testSequentialCommits() throws Exception { + File repoLoc = RepoUtils.cloneRepoToTempLocation("log-1", "test-add-remove-commit", false); + HgRepository hgRepo = new HgLookup().detect(repoLoc); + HgDataFile dfD = hgRepo.getFileNode("d"); + assertTrue("[sanity]", dfD.exists()); + File fileD = new File(repoLoc, "d"); + assertTrue("[sanity]", fileD.canRead()); + // + RepoUtils.modifyFileAppend(fileD, " 1 \n"); + final int parentCsetRevIndex = hgRepo.getChangelog().getLastRevision(); + CommitFacility cf = new CommitFacility(hgRepo, parentCsetRevIndex); + FileContentSupplier contentProvider = new FileContentSupplier(fileD); + cf.add(dfD, contentProvider); + cf.branch("branch1"); + Nodeid commitRev1 = cf.commit("FIRST"); + contentProvider.done(); + // + RepoUtils.modifyFileAppend(fileD, " 2 \n"); + cf.add(dfD, contentProvider = new FileContentSupplier(fileD)); + cf.branch("branch2"); + Nodeid commitRev2 = cf.commit("SECOND"); + contentProvider.done(); + // + RepoUtils.modifyFileAppend(fileD, " 2 \n"); + cf.add(dfD, contentProvider = new FileContentSupplier(fileD)); + cf.branch(DEFAULT_BRANCH_NAME); + Nodeid commitRev3 = cf.commit("THIRD"); + contentProvider.done(); + // + // FIXME requirement to reload repository is disgusting + hgRepo = new HgLookup().detect(repoLoc); + List commits = new HgLogCommand(hgRepo).range(parentCsetRevIndex+1, TIP).execute(); + assertEquals(3, commits.size()); + HgChangeset c1 = commits.get(0); + HgChangeset c2 = commits.get(1); + HgChangeset c3 = commits.get(2); + assertEquals(c1.getNodeid(), commitRev1); + assertEquals(c2.getNodeid(), commitRev2); + assertEquals(c3.getNodeid(), commitRev3); + assertEquals("branch1", c1.getBranch()); + assertEquals("branch2", c2.getBranch()); + assertEquals(DEFAULT_BRANCH_NAME, c3.getBranch()); + assertEquals("FIRST", c1.getComment()); + assertEquals("SECOND", c2.getComment()); + assertEquals("THIRD", c3.getComment()); + assertHgVerifyOk(repoLoc); + } + + private void assertHgVerifyOk(File repoLoc) throws InterruptedException, IOException { + ExecHelper verifyRun = new ExecHelper(new OutputParser.Stub(), repoLoc); + verifyRun.run("hg", "verify"); + assertEquals("hg verify", 0, verifyRun.getExitValue()); } public static void main(String[] args) throws Exception { @@ -109,4 +277,35 @@ return count; } } + + static class FileContentSupplier implements CommitFacility.ByteDataSupplier { + private final FileChannel channel; + private IOException error; + + public FileContentSupplier(File f) throws IOException { + if (!f.canRead()) { + throw new IOException(String.format("Can't read file %s", f)); + } + channel = new FileInputStream(f).getChannel(); + } + + public int read(ByteBuffer buf) { + if (error != null) { + return -1; + } + try { + return channel.read(buf); + } catch (IOException ex) { + error = ex; + } + return -1; + } + + public void done() throws IOException { + channel.close(); + if (error != null) { + throw error; + } + } + } } diff -r 154718ae23ed -r 6ca3d0c5b4bc test/org/tmatesoft/hg/test/TestRevert.java --- a/test/org/tmatesoft/hg/test/TestRevert.java Mon Feb 25 18:41:44 2013 +0100 +++ b/test/org/tmatesoft/hg/test/TestRevert.java Mon Feb 25 19:48:20 2013 +0100 @@ -52,7 +52,7 @@ repo = new HgLookup().detect(testRepoLoc); Path targetFile = Path.create("b"); - RepoUtils.modifyFileAppend(new File(testRepoLoc, targetFile.toString())); + RepoUtils.modifyFileAppend(new File(testRepoLoc, targetFile.toString()), "XXX"); StatusOutputParser statusParser = new StatusOutputParser(); eh = new ExecHelper(statusParser, testRepoLoc);