# HG changeset patch # User Artem Tikhomirov # Date 1377016894 -7200 # Node ID 4ffc17c0b5340d543f5e87296c808c0ee4457b5d # Parent 42b88709e41d4a89550e844234efad8932c27b9d Merge: tests for resolver and complex scenario. Enable commit for merged revisions. Reuse file revisions if nothing changed diff -r 42b88709e41d -r 4ffc17c0b534 src/org/tmatesoft/hg/core/HgCommitCommand.java --- a/src/org/tmatesoft/hg/core/HgCommitCommand.java Fri Aug 16 19:22:59 2013 +0200 +++ b/src/org/tmatesoft/hg/core/HgCommitCommand.java Tue Aug 20 18:41:34 2013 +0200 @@ -102,9 +102,6 @@ try { int[] parentRevs = new int[2]; detectParentFromDirstate(parentRevs); - if (parentRevs[0] != NO_REVISION && parentRevs[1] != NO_REVISION) { - throw new HgBadArgumentException("Sorry, I'm not yet smart enough to perform merge commits", null); - } HgWorkingCopyStatusCollector sc = new HgWorkingCopyStatusCollector(repo); Record status = sc.status(HgRepository.WORKING_COPY); if (status.getModified().size() == 0 && status.getAdded().size() == 0 && status.getRemoved().size() == 0) { diff -r 42b88709e41d -r 4ffc17c0b534 src/org/tmatesoft/hg/core/HgMergeCommand.java --- a/src/org/tmatesoft/hg/core/HgMergeCommand.java Fri Aug 16 19:22:59 2013 +0200 +++ b/src/org/tmatesoft/hg/core/HgMergeCommand.java Tue Aug 20 18:41:34 2013 +0200 @@ -279,12 +279,11 @@ } /** - * Base mediator implementation, with regular resolution. - * Subclasses shall implement {@link #resolve(HgFileRevision, HgFileRevision, HgFileRevision, Resolver)} and - * may optionally provide extra logic (e.g. ask user) for other cases. + * Base mediator implementation, with regular resolution (and "don't delete anything" approach in mind). + * Subclasses shall override methods to provide alternative implementation or to add extra logic (e.g. ask user). */ @Experimental(reason="Provisional API. Work in progress") - public abstract static class MediatorBase implements Mediator { + public static class MediatorBase implements Mediator { /** * Implementation keeps this revision */ @@ -329,6 +328,13 @@ public void fastForwardB(HgFileRevision base, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException { resolver.use(second); } + + /** + * Implementation marks file as unresolved + */ + public void resolve(HgFileRevision base, HgFileRevision first, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException { + resolver.unresolved(); + } } private static class ResolverImpl implements Resolver { diff -r 42b88709e41d -r 4ffc17c0b534 src/org/tmatesoft/hg/internal/CommitFacility.java --- a/src/org/tmatesoft/hg/internal/CommitFacility.java Fri Aug 16 19:22:59 2013 +0200 +++ b/src/org/tmatesoft/hg/internal/CommitFacility.java Tue Aug 20 18:41:34 2013 +0200 @@ -19,14 +19,13 @@ import static org.tmatesoft.hg.repo.HgRepository.DEFAULT_BRANCH_NAME; import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; import static org.tmatesoft.hg.repo.HgRepositoryFiles.*; -import static org.tmatesoft.hg.repo.HgRepositoryFiles.Branch; -import static org.tmatesoft.hg.repo.HgRepositoryFiles.UndoBranch; import static org.tmatesoft.hg.util.LogFacility.Severity.Error; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; @@ -39,11 +38,14 @@ import org.tmatesoft.hg.core.HgIOException; import org.tmatesoft.hg.core.HgRepositoryLockException; import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.DataSerializer.ByteArraySerializer; import org.tmatesoft.hg.internal.DataSerializer.DataSource; import org.tmatesoft.hg.repo.HgChangelog; import org.tmatesoft.hg.repo.HgDataFile; import org.tmatesoft.hg.repo.HgPhase; import org.tmatesoft.hg.repo.HgRuntimeException; +import org.tmatesoft.hg.util.ByteChannel; +import org.tmatesoft.hg.util.CancelledException; import org.tmatesoft.hg.util.Pair; import org.tmatesoft.hg.util.Path; @@ -149,19 +151,42 @@ DataSource bds = e.second(); Pair fp = fileParents.get(df.getPath()); if (fp == null) { - // NEW FILE - fp = new Pair(NO_REVISION, NO_REVISION); + // NEW FILE, either just added or resurrected from p2 + Nodeid fileRevInP2; + if ((fileRevInP2 = c2Manifest.nodeid(df.getPath())) != null) { + fp = new Pair(df.getRevisionIndex(fileRevInP2), NO_REVISION); + } else { + // brand new + fp = new Pair(NO_REVISION, NO_REVISION); + } } - RevlogStream contentStream = repo.getImplAccess().getStream(df); + // TODO if fp.first() != NO_REVISION and fp.second() != NO_REVISION check if one + // revision is ancestor of another and use the latest as p1, then + Nodeid fileRev = null; final boolean isNewFile = !df.exists(); - RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo, contentStream, transaction); - Nodeid fileRev = fileWriter.addRevision(bds, clogRevisionIndex, fp.first(), fp.second()).second(); + if (fp.first() != NO_REVISION && fp.second() == NO_REVISION && !isNewFile) { + // compare file contents to see if anything has changed, and reuse old revision, if unchanged. + // XXX ineffective, need better access to revision conten + ByteArraySerializer bas = new ByteArraySerializer(); + bds.serialize(bas); + final byte[] newContent = bas.toByteArray(); + // unless there's a way to reset DataSource, replace it with the content just read + bds = new DataSerializer.ByteArrayDataSource(newContent); + if (new ComparatorChannel(newContent).same(df, fp.first())) { + fileRev = df.getRevision(fp.first()); + } + } + if (fileRev == null) { + RevlogStream contentStream = repo.getImplAccess().getStream(df); + RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo, contentStream, transaction); + fileRev = fileWriter.addRevision(bds, clogRevisionIndex, fp.first(), fp.second()).second(); + if (isNewFile) { + // registerNew shall go after fileWriter.addRevision as it needs to know if data is inlined or not + fncache.registerNew(df.getPath(), contentStream); + } + } newManifestRevision.put(df.getPath(), fileRev); touchInDirstate.add(df.getPath()); - if (isNewFile) { - // registerNew shall go after fileWriter.addRevision as it needs to know if data is inlined or not - fncache.registerNew(df.getPath(), contentStream); - } } // final EncodingHelper encHelper = repo.buildFileNameEncodingHelper(); @@ -251,6 +276,39 @@ new FileUtils(repo.getLog(), this).closeQuietly(w, lastMessage); } } + + private static class ComparatorChannel implements ByteChannel { + private int index; + private final byte[] content; + + public ComparatorChannel(byte[] contentToCompare) { + content = contentToCompare; + } + + public int write(ByteBuffer buffer) throws IOException, CancelledException { + int consumed = 0; + while (buffer.hasRemaining()) { + byte b = buffer.get(); + consumed++; + if (content[index++] != b) { + throw new CancelledException(); + } + } + return consumed; + } + + public boolean same(HgDataFile df, int fileRevIndex) { + index = 0; + try { + df.contentWithFilters(fileRevIndex, this); + return index == content.length; + } catch (CancelledException ex) { + // comparison failed, content differs, ok to go on + } + return false; + } + } + /* private Pair getManifestParents() { return new Pair(extractManifestRevisionIndex(p1Commit), extractManifestRevisionIndex(p2Commit)); diff -r 42b88709e41d -r 4ffc17c0b534 src/org/tmatesoft/hg/internal/RevlogStreamWriter.java --- a/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java Fri Aug 16 19:22:59 2013 +0200 +++ b/src/org/tmatesoft/hg/internal/RevlogStreamWriter.java Tue Aug 20 18:41:34 2013 +0200 @@ -144,15 +144,19 @@ populateLastEntryContent(); // byte[] contentByteArray = toByteArray(content); + Nodeid p1Rev = revision(p1); + Nodeid p2Rev = revision(p2); + Nodeid newRev = Nodeid.fromBinary(dh.sha1(p1Rev, p2Rev, contentByteArray).asBinary(), 0); + if (newRev.equals(p1Rev)) { // shall never happen, same content but different parents give new SHA. Doesn't hurt to check, though + assert p2Rev.isNull(); + return new Pair(p1, p1Rev); + } + // Patch patch = GeneratePatchInspector.delta(lastFullContent.second(), contentByteArray); int patchSerializedLength = patch.serializedLength(); - final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, contentByteArray.length); DataSerializer.DataSource dataSource = writeComplete ? new ByteArrayDataSource(contentByteArray) : patch.new PatchDataSource(); // - Nodeid p1Rev = revision(p1); - Nodeid p2Rev = revision(p2); - Nodeid newRev = Nodeid.fromBinary(dh.sha1(p1Rev, p2Rev, contentByteArray).asBinary(), 0); doAdd(newRev, p1, p2, linkRevision, writeComplete ? lastEntryIndex+1 : lastEntryBase, contentByteArray.length, dataSource); lastFullContent = new Pair(lastEntryIndex, contentByteArray); return new Pair(lastEntryIndex, lastEntryRevision); diff -r 42b88709e41d -r 4ffc17c0b534 src/org/tmatesoft/hg/repo/HgMergeState.java --- a/src/org/tmatesoft/hg/repo/HgMergeState.java Fri Aug 16 19:22:59 2013 +0200 +++ b/src/org/tmatesoft/hg/repo/HgMergeState.java Tue Aug 20 18:41:34 2013 +0200 @@ -32,6 +32,7 @@ import org.tmatesoft.hg.internal.LineReader; import org.tmatesoft.hg.internal.ManifestRevision; import org.tmatesoft.hg.internal.Pool; +import org.tmatesoft.hg.util.LogFacility.Severity; import org.tmatesoft.hg.util.Pair; import org.tmatesoft.hg.util.Path; import org.tmatesoft.hg.util.PathRewrite; @@ -127,7 +128,12 @@ final int rp1 = hgRepo.getChangelog().getRevisionIndex(stateParent); hgRepo.getManifest().walk(rp1, rp1, m1); while (lines.hasNext()) { + s = lines.next(); String[] r = s.split("\\00"); + if (r.length < 7) { + repo.getLog().dump(getClass(), Severity.Error, "Expect at least 7 zero-separated fields in the merge state file, not %d. Entry skipped", r.length); + continue; + } Path p1fname = pathPool.path(r[3]); Nodeid nidP1 = m1.nodeid(p1fname); Nodeid nidCA = nodeidPool.unify(Nodeid.fromAscii(r[5])); @@ -218,7 +224,7 @@ /** * List of conflicts as recorded in the merge state information file. - * Note, this information is not valid unless {@link #isStale()} is true. + * Note, this information is not valid when {@link #isStale()} is true. * * @return non-null list with both resolved and unresolved conflicts. */ diff -r 42b88709e41d -r 4ffc17c0b534 test/org/tmatesoft/hg/test/ComplexTest.java --- a/test/org/tmatesoft/hg/test/ComplexTest.java Fri Aug 16 19:22:59 2013 +0200 +++ b/test/org/tmatesoft/hg/test/ComplexTest.java Tue Aug 20 18:41:34 2013 +0200 @@ -26,9 +26,12 @@ import org.tmatesoft.hg.core.HgCheckoutCommand; import org.tmatesoft.hg.core.HgCommitCommand; import org.tmatesoft.hg.core.HgInitCommand; +import org.tmatesoft.hg.core.HgMergeCommand; import org.tmatesoft.hg.core.HgRevertCommand; import org.tmatesoft.hg.repo.HgManifest; +import org.tmatesoft.hg.repo.HgMergeState; import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Outcome; import org.tmatesoft.hg.util.Path; /** @@ -97,4 +100,54 @@ final HgManifest mf = hgRepo.getManifest(); errorCollector.assertEquals(mf.getFileRevision(0, fa), mf.getFileRevision(3, fa)); // "A2" was reverted } + + @Test + public void testMergeAndCommit() throws Exception { + File repoLoc = RepoUtils.createEmptyDir("composite-scenario-2"); + HgRepository hgRepo = new HgInitCommand().location(repoLoc).revlogV1().execute(); + Path fa = Path.create("file1"), fb = Path.create("file2"), fc = Path.create("file3"); + final File fileA = new File(repoLoc, fa.toString()); + final File fileB = new File(repoLoc, fb.toString()); + // rev0: +file1, +file2 + RepoUtils.createFile(fileA, "first file"); + RepoUtils.createFile(fileB, "second file"); + new HgAddRemoveCommand(hgRepo).add(fa, fb).execute(); + final HgCommitCommand commitCmd = new HgCommitCommand(hgRepo); + commitCmd.message("FIRST").execute(); + // rev1: *file1, *file2 + RepoUtils.modifyFileAppend(fileA, "A1"); + RepoUtils.modifyFileAppend(fileB, "B1"); + commitCmd.message("SECOND").execute(); + // rev2: *file1, -file2 + RepoUtils.modifyFileAppend(fileA, "A2"); + fileB.delete(); + new HgAddRemoveCommand(hgRepo).remove(fb).execute(); + commitCmd.message("THIRD").execute(); + // rev3: fork rev0, +file3, *file2 + new HgCheckoutCommand(hgRepo).changeset(0).clean(true).execute(); + final File fileC = new File(repoLoc, fc.toString()); + RepoUtils.createFile(fileC, "third file"); + RepoUtils.modifyFileAppend(fileB, "B2"); + new HgAddRemoveCommand(hgRepo).add(fc).execute(); + commitCmd.message("FOURTH").execute(); + // rev4: *file3 + RepoUtils.modifyFileAppend(fileC, "C1"); + commitCmd.message("FIFTH").execute(); + // rev5: merge rev2 with rev3 + new HgCheckoutCommand(hgRepo).changeset(2).clean(true).execute(); + new HgMergeCommand(hgRepo).changeset(3).execute(new HgMergeCommand.MediatorBase()); + commitCmd.message("SIXTH: merge rev2 and rev3"); + errorCollector.assertTrue(commitCmd.isMergeCommit()); + HgMergeState ms = hgRepo.getMergeState(); + ms.refresh(); + errorCollector.assertTrue(ms.isMerging()); + errorCollector.assertFalse(ms.isStale()); + errorCollector.assertEquals(0, ms.getConflicts().size()); + Outcome o = commitCmd.execute(); + errorCollector.assertTrue(o.getMessage(), o.isOk()); + ms.refresh(); + errorCollector.assertFalse(ms.isMerging()); + errorCollector.assertEquals(0, ms.getConflicts().size()); + RepoUtils.assertHgVerifyOk(errorCollector, repoLoc); + } } diff -r 42b88709e41d -r 4ffc17c0b534 test/org/tmatesoft/hg/test/TestMerge.java --- a/test/org/tmatesoft/hg/test/TestMerge.java Fri Aug 16 19:22:59 2013 +0200 +++ b/test/org/tmatesoft/hg/test/TestMerge.java Tue Aug 20 18:41:34 2013 +0200 @@ -16,8 +16,11 @@ */ package org.tmatesoft.hg.test; +import static org.tmatesoft.hg.util.Path.create; + import java.io.File; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import org.junit.Assert; @@ -27,8 +30,13 @@ import org.tmatesoft.hg.core.HgFileRevision; import org.tmatesoft.hg.core.HgMergeCommand; import org.tmatesoft.hg.core.HgMergeCommand.Resolver; +import org.tmatesoft.hg.core.HgStatus.Kind; +import org.tmatesoft.hg.core.HgStatusCommand; import org.tmatesoft.hg.repo.HgLookup; +import org.tmatesoft.hg.repo.HgMergeState; +import org.tmatesoft.hg.repo.HgMergeState.Entry; import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; /** * @@ -78,16 +86,31 @@ errorCollector.fail("There's no conflict in changesets 1 and 2 merge"); } }); - // FIXME run hg status to see changes + RepoUtils.assertHgVerifyOk(errorCollector, repoLoc1); + TestStatus.StatusCollector status = new TestStatus.StatusCollector(); + new HgStatusCommand(repo).all().execute(status); + final List clean = status.get(Kind.Clean); + final List modified = status.get(Kind.Modified); + Collections.sort(clean); + Collections.sort(modified); + errorCollector.assertEquals(new Path[] {create("file1"), create("file3"), create("file4")}, clean.toArray()); + errorCollector.assertEquals(new Path[] {create("file2"), create("file5")}, modified.toArray()); repo = new HgLookup().detect(repoLoc2); cmd = new HgMergeCommand(repo); - cmd.changeset(3).execute(new HgMergeCommand.MediatorBase() { - - public void resolve(HgFileRevision base, HgFileRevision first, HgFileRevision second, Resolver resolver) throws HgCallbackTargetException { - resolver.unresolved(); - } - }); - // FIXME run hg status and hg resolve to see changes + cmd.changeset(3).execute(new HgMergeCommand.MediatorBase()); + RepoUtils.assertHgVerifyOk(errorCollector, repoLoc2); + new HgStatusCommand(repo).all().execute(status = new TestStatus.StatusCollector()); + errorCollector.assertEquals(1, status.get(Kind.Modified).size()); + errorCollector.assertEquals(create("file1"), status.get(Kind.Modified).get(0)); + final HgMergeState ms = repo.getMergeState(); + ms.refresh(); + errorCollector.assertTrue(ms.isMerging()); + errorCollector.assertFalse(ms.isStale()); + errorCollector.assertFalse(ms.getStateParent().isNull()); + errorCollector.assertEquals(1, ms.getConflicts().size()); + final Entry entry = ms.getConflicts().get(0); + errorCollector.assertEquals(create("file1"), entry.getActualFile()); + errorCollector.assertEquals(HgMergeState.Kind.Unresolved, entry.getState()); } private static class MergeNotificationCollector implements HgMergeCommand.Mediator {