changeset 588:41218d84842a

Update dirstate after commit
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Mon, 29 Apr 2013 16:37:57 +0200
parents a52f4cc56f9c
children c18095eedde0
files src/org/tmatesoft/hg/repo/CommitFacility.java test/org/tmatesoft/hg/test/TestCommit.java
diffstat 2 files changed, 68 insertions(+), 47 deletions(-) [+]
line wrap: on
line diff
--- a/src/org/tmatesoft/hg/repo/CommitFacility.java	Fri Apr 26 20:04:17 2013 +0200
+++ b/src/org/tmatesoft/hg/repo/CommitFacility.java	Mon Apr 29 16:37:57 2013 +0200
@@ -29,12 +29,16 @@
 import java.util.TreeSet;
 
 import org.tmatesoft.hg.core.HgCommitCommand;
+import org.tmatesoft.hg.core.HgIOException;
 import org.tmatesoft.hg.core.HgRepositoryLockException;
 import org.tmatesoft.hg.core.Nodeid;
 import org.tmatesoft.hg.internal.ByteArrayChannel;
 import org.tmatesoft.hg.internal.ChangelogEntryBuilder;
+import org.tmatesoft.hg.internal.DirstateBuilder;
+import org.tmatesoft.hg.internal.DirstateReader;
 import org.tmatesoft.hg.internal.Experimental;
 import org.tmatesoft.hg.internal.FNCacheFile;
+import org.tmatesoft.hg.internal.Internals;
 import org.tmatesoft.hg.internal.ManifestEntryBuilder;
 import org.tmatesoft.hg.internal.ManifestRevision;
 import org.tmatesoft.hg.internal.RevlogStream;
@@ -97,7 +101,7 @@
 		user = userName;
 	}
 	
-	public Nodeid commit(String message) throws HgRepositoryLockException {
+	public Nodeid commit(String message) throws HgIOException, HgRepositoryLockException {
 		
 		final HgChangelog clog = repo.getChangelog();
 		final int clogRevisionIndex = clog.getRevisionCount();
@@ -136,6 +140,7 @@
 		//
 		// Register new/changed
 		ArrayList<Path> newlyAddedFiles = new ArrayList<Path>();
+		ArrayList<Path> touchInDirstate = new ArrayList<Path>();
 		for (Pair<HgDataFile, ByteDataSupplier> e : files.values()) {
 			HgDataFile df = e.first();
 			Pair<Integer, Integer> fp = fileParents.get(df.getPath());
@@ -165,6 +170,7 @@
 			RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo.getSessionContext(), contentStream);
 			Nodeid fileRev = fileWriter.addRevision(bac.toArray(), clogRevisionIndex, fp.first(), fp.second());
 			newManifestRevision.put(df.getPath(), fileRev);
+			touchInDirstate.add(df.getPath());
 		}
 		//
 		// Manifest
@@ -196,6 +202,18 @@
 				repo.getSessionContext().getLog().dump(getClass(), Severity.Error, ex, "Failed to write fncache, error ignored");
 			}
 		}
+		// bring dirstate up to commit state
+		Internals implRepo = Internals.getInstance(repo);
+		final DirstateBuilder dirstateBuilder = new DirstateBuilder(implRepo);
+		dirstateBuilder.fillFrom(new DirstateReader(implRepo, new Path.SimpleSource()));
+		for (Path p : removals) {
+			dirstateBuilder.recordRemoved(p);
+		}
+		for (Path p : touchInDirstate) {
+			dirstateBuilder.recordUncertain(p);
+		}
+		dirstateBuilder.parents(changesetRev, Nodeid.NULL);
+		dirstateBuilder.serialize();
 		return changesetRev;
 	}
 /*
--- a/test/org/tmatesoft/hg/test/TestCommit.java	Fri Apr 26 20:04:17 2013 +0200
+++ b/test/org/tmatesoft/hg/test/TestCommit.java	Mon Apr 29 16:37:57 2013 +0200
@@ -24,13 +24,13 @@
 import java.nio.ByteBuffer;
 import java.util.List;
 
+import org.junit.Rule;
 import org.junit.Test;
 import org.tmatesoft.hg.core.HgAddRemoveCommand;
 import org.tmatesoft.hg.core.HgCatCommand;
 import org.tmatesoft.hg.core.HgChangeset;
 import org.tmatesoft.hg.core.HgCommitCommand;
 import org.tmatesoft.hg.core.HgLogCommand;
-import org.tmatesoft.hg.core.HgRevertCommand;
 import org.tmatesoft.hg.core.HgStatus.Kind;
 import org.tmatesoft.hg.core.HgStatusCommand;
 import org.tmatesoft.hg.core.Nodeid;
@@ -52,6 +52,9 @@
  */
 public class TestCommit {
 
+	@Rule
+	public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
+
 	@Test
 	public void testCommitToNonEmpty() throws Exception {
 		File repoLoc = RepoUtils.initEmptyTempRepo("test-commit2non-empty");
@@ -67,15 +70,15 @@
 		Nodeid secondRev = cf.commit("SECOND");
 		//
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).execute();
-		assertEquals(2, commits.size());
+		errorCollector.assertEquals(2, commits.size());
 		HgChangeset c1 = commits.get(0);
 		HgChangeset c2 = commits.get(1);
-		assertEquals("FIRST", c1.getComment());
-		assertEquals("SECOND", c2.getComment());
-		assertEquals(df.getPath(), c2.getAffectedFiles().get(0));
-		assertEquals(c1.getNodeid(), c2.getFirstParentRevision());
-		assertEquals(Nodeid.NULL, c2.getSecondParentRevision());
-		assertEquals(secondRev, c2.getNodeid());
+		errorCollector.assertEquals("FIRST", c1.getComment());
+		errorCollector.assertEquals("SECOND", c2.getComment());
+		errorCollector.assertEquals(df.getPath(), c2.getAffectedFiles().get(0));
+		errorCollector.assertEquals(c1.getNodeid(), c2.getFirstParentRevision());
+		errorCollector.assertEquals(Nodeid.NULL, c2.getSecondParentRevision());
+		errorCollector.assertEquals(secondRev, c2.getNodeid());
 	}
 	
 	@Test
@@ -94,16 +97,16 @@
 		String comment = "commit 1";
 		Nodeid c1Rev = cf.commit(comment);
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).execute();
-		assertEquals(1, commits.size());
+		errorCollector.assertEquals(1, commits.size());
 		HgChangeset c1 = commits.get(0);
-		assertEquals(1, c1.getAffectedFiles().size());
-		assertEquals(df.getPath(), c1.getAffectedFiles().get(0));
-		assertEquals(0, c1.getRevisionIndex());
-		assertEquals(Nodeid.NULL, c1.getFirstParentRevision());
-		assertEquals(Nodeid.NULL, c1.getSecondParentRevision());
-		assertEquals(HgRepository.DEFAULT_BRANCH_NAME, c1.getBranch());
-		assertEquals(comment, c1.getComment());
-		assertEquals(c1Rev, c1.getNodeid());
+		errorCollector.assertEquals(1, c1.getAffectedFiles().size());
+		errorCollector.assertEquals(df.getPath(), c1.getAffectedFiles().get(0));
+		errorCollector.assertEquals(0, c1.getRevisionIndex());
+		errorCollector.assertEquals(Nodeid.NULL, c1.getFirstParentRevision());
+		errorCollector.assertEquals(Nodeid.NULL, c1.getSecondParentRevision());
+		errorCollector.assertEquals(HgRepository.DEFAULT_BRANCH_NAME, c1.getBranch());
+		errorCollector.assertEquals(comment, c1.getComment());
+		errorCollector.assertEquals(c1Rev, c1.getNodeid());
 		ByteArrayChannel bac = new ByteArrayChannel();
 		new HgCatCommand(hgRepo).file(df.getPath()).execute(bac);
 		assertArrayEquals(initialContent, bac.toArray());
@@ -134,9 +137,9 @@
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).range(parentCsetRevIndex+1, TIP).execute();
 		assertEquals(1, commits.size());
 		HgChangeset c1 = commits.get(0);
-		assertEquals(c1.getNodeid(), commitRev1);
-		assertEquals("branch1", c1.getBranch());
-		assertEquals("FIRST", c1.getComment());
+		errorCollector.assertEquals(c1.getNodeid(), commitRev1);
+		errorCollector.assertEquals("branch1", c1.getBranch());
+		errorCollector.assertEquals("FIRST", c1.getComment());
 		//
 		assertHgVerifyOk(repoLoc);
 	}
@@ -164,10 +167,10 @@
 		hgRepo = new HgLookup().detect(repoLoc);
 		List<HgChangeset> commits = new HgLogCommand(hgRepo).changeset(commitRev).execute();
 		HgChangeset cmt = commits.get(0);
-		assertEquals(1, cmt.getAddedFiles().size());
-		assertEquals("xx", cmt.getAddedFiles().get(0).getPath().toString());
-		assertEquals(1, cmt.getRemovedFiles().size());
-		assertEquals("d", cmt.getRemovedFiles().get(0).toString());
+		errorCollector.assertEquals(1, cmt.getAddedFiles().size());
+		errorCollector.assertEquals("xx", cmt.getAddedFiles().get(0).getPath().toString());
+		errorCollector.assertEquals(1, cmt.getRemovedFiles().size());
+		errorCollector.assertEquals("d", cmt.getRemovedFiles().get(0).toString());
 		ByteArrayChannel sink = new ByteArrayChannel();
 		new HgCatCommand(hgRepo).file(Path.create("xx")).changeset(commitRev).execute(sink);
 		assertArrayEquals("xyz".getBytes(), sink.toArray());
@@ -214,15 +217,15 @@
 		HgChangeset c1 = commits.get(0);
 		HgChangeset c2 = commits.get(1);
 		HgChangeset c3 = commits.get(2);
-		assertEquals(c1.getNodeid(), commitRev1);
-		assertEquals(c2.getNodeid(), commitRev2);
-		assertEquals(c3.getNodeid(), commitRev3);
-		assertEquals("branch1", c1.getBranch());
-		assertEquals("branch2", c2.getBranch());
-		assertEquals(DEFAULT_BRANCH_NAME, c3.getBranch());
-		assertEquals("FIRST", c1.getComment());
-		assertEquals("SECOND", c2.getComment());
-		assertEquals("THIRD", c3.getComment());
+		errorCollector.assertEquals(c1.getNodeid(), commitRev1);
+		errorCollector.assertEquals(c2.getNodeid(), commitRev2);
+		errorCollector.assertEquals(c3.getNodeid(), commitRev3);
+		errorCollector.assertEquals("branch1", c1.getBranch());
+		errorCollector.assertEquals("branch2", c2.getBranch());
+		errorCollector.assertEquals(DEFAULT_BRANCH_NAME, c3.getBranch());
+		errorCollector.assertEquals("FIRST", c1.getComment());
+		errorCollector.assertEquals("SECOND", c2.getComment());
+		errorCollector.assertEquals("THIRD", c3.getComment());
 		assertHgVerifyOk(repoLoc);
 	}
 	
@@ -239,17 +242,17 @@
 		HgCommitCommand cmd = new HgCommitCommand(hgRepo);
 		assertFalse(cmd.isMergeCommit());
 		Outcome r = cmd.message("FIRST").execute();
-		assertTrue(r.isOk());
+		errorCollector.assertTrue(r.isOk());
 		Nodeid c1 = cmd.getCommittedRevision();
 		
+		// check that modified files are no longer reported as such
 		hgRepo = new HgLookup().detect(repoLoc);
-		//
-		new HgRevertCommand(hgRepo).file(dfB.getPath()).execute(); // FIXME Hack to emulate dirstate update
-		//
 		TestStatus.StatusCollector status = new TestStatus.StatusCollector();
-		new HgStatusCommand(hgRepo).defaults().execute(status);
-		assertTrue(status.getErrors().isEmpty());
-		assertTrue(status.get(Kind.Modified).isEmpty());
+		new HgStatusCommand(hgRepo).all().execute(status);
+		errorCollector.assertTrue(status.getErrors().isEmpty());
+		errorCollector.assertTrue(status.get(Kind.Modified).isEmpty());
+		errorCollector.assertEquals(1, status.get(dfB.getPath()).size());
+		errorCollector.assertTrue(status.get(dfB.getPath()).contains(Kind.Clean));
 		
 		HgDataFile dfD = hgRepo.getFileNode("d");
 		assertTrue("[sanity]", dfD.exists());
@@ -260,23 +263,23 @@
 		cmd = new HgCommitCommand(hgRepo);
 		assertFalse(cmd.isMergeCommit());
 		r = cmd.message("SECOND").execute();
-		assertTrue(r.isOk());
+		errorCollector.assertTrue(r.isOk());
 		Nodeid c2 = cmd.getCommittedRevision();
 		//
 		hgRepo = new HgLookup().detect(repoLoc);
 		int lastRev = hgRepo.getChangelog().getLastRevision();
 		List<HgChangeset> csets = new HgLogCommand(hgRepo).range(lastRev-1, lastRev).execute();
-		assertEquals(csets.get(0).getNodeid(), c1);
-		assertEquals(csets.get(1).getNodeid(), c2);
-		assertEquals(csets.get(0).getComment(), "FIRST");
-		assertEquals(csets.get(1).getComment(), "SECOND");
+		errorCollector.assertEquals(csets.get(0).getNodeid(), c1);
+		errorCollector.assertEquals(csets.get(1).getNodeid(), c2);
+		errorCollector.assertEquals(csets.get(0).getComment(), "FIRST");
+		errorCollector.assertEquals(csets.get(1).getComment(), "SECOND");
 		assertHgVerifyOk(repoLoc);
 	}
 	
 	private void assertHgVerifyOk(File repoLoc) throws InterruptedException, IOException {
 		ExecHelper verifyRun = new ExecHelper(new OutputParser.Stub(), repoLoc);
 		verifyRun.run("hg", "verify");
-		assertEquals("hg verify", 0, verifyRun.getExitValue());
+		errorCollector.assertEquals("hg verify", 0, verifyRun.getExitValue());
 	}
 
 	public static void main(String[] args) throws Exception {