Mercurial > hg4j
changeset 651:6e98d34eaca8
Push: tests (push to empty, push changes, respect secret)
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Mon, 01 Jul 2013 21:19:53 +0200 |
parents | 3b275cc2d2aa |
children | cd77bf51b562 |
files | build.xml src/org/tmatesoft/hg/internal/BundleGenerator.java src/org/tmatesoft/hg/internal/RevisionSet.java src/org/tmatesoft/hg/repo/HgRemoteRepository.java test/org/tmatesoft/hg/test/TestPush.java test/org/tmatesoft/hg/test/TestRevisionSet.java |
diffstat | 6 files changed, 287 insertions(+), 33 deletions(-) [+] |
line wrap: on
line diff
--- a/build.xml Fri Jun 28 19:27:26 2013 +0200 +++ b/build.xml Mon Jul 01 21:19:53 2013 +0200 @@ -111,6 +111,7 @@ <test name="org.tmatesoft.hg.test.TestDiffHelper" /> <test name="org.tmatesoft.hg.test.TestRepositoryLock" /> <test name="org.tmatesoft.hg.test.TestRevisionSet" /> + <test name="org.tmatesoft.hg.test.TestPush" /> <test name="org.tmatesoft.hg.test.ComplexTest" /> </junit> </target>
--- a/src/org/tmatesoft/hg/internal/BundleGenerator.java Fri Jun 28 19:27:26 2013 +0200 +++ b/src/org/tmatesoft/hg/internal/BundleGenerator.java Mon Jul 01 21:19:53 2013 +0200 @@ -68,7 +68,6 @@ } clogRevsVector.sort(true); final int[] clogRevs = clogRevsVector.toArray(); - System.out.printf("Changelog: %s\n", Arrays.toString(clogRevs)); final IntMap<Nodeid> clogMap = new IntMap<Nodeid>(changesets.size()); final IntVector manifestRevs = new IntVector(changesets.size(), 0); final List<HgDataFile> files = new ArrayList<HgDataFile>(); @@ -88,28 +87,8 @@ } }, clogRevs); manifestRevs.sort(true); - System.out.printf("Manifest: %s\n", Arrays.toString(manifestRevs.toArray(true))); - /////////////// - for (HgDataFile df : sortedByName(files)) { - RevlogStream s = repo.getImplAccess().getStream(df); - final IntVector fileRevs = new IntVector(); - s.iterate(0, TIP, false, new RevlogStream.Inspector() { - - public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { - if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) { - fileRevs.add(revisionIndex); - } - } - }); - fileRevs.sort(true); - System.out.printf("%s: %s\n", df.getPath(), Arrays.toString(fileRevs.toArray(true))); - } - if (Boolean.FALSE.booleanValue()) { - return null; - } - /////////////// // - final File bundleFile = File.createTempFile("hg4j-", "bundle"); + final File bundleFile = File.createTempFile("hg4j-", ".bundle"); final FileOutputStream osBundle = new FileOutputStream(bundleFile); final OutputStreamSerializer outRaw = new OutputStreamSerializer(osBundle); outRaw.write("HG10UN".getBytes(), 0, 6); @@ -187,7 +166,7 @@ public ChunkGenerator(DataSerializer dataSerializer, IntMap<Nodeid> clogNodeidMap) { ds = dataSerializer; - parentMap = new IntMap<Nodeid>(clogNodeidMap.size());; + parentMap = new IntMap<Nodeid>(clogNodeidMap.size()); clogMap = clogNodeidMap; } @@ -203,9 +182,29 @@ revs2read[0] = startParent; System.arraycopy(revisions, 0, revs2read, 1, revisions.length); } + // FIXME this is a hack to fill parentsMap with + // parents of elements that we are not going to meet with regular + // iteration, e.g. changes from a different branch (with some older parent), + // scenario: two revisions added to two different branches + // revisions[10, 11], parents(10) == 9, parents(11) == 7 + // revs2read == [9,10,11], and parentsMap lacks entry for parent rev7. + fillMissingParentsMap(s, revisions); s.iterate(revs2read, true, this); } + private void fillMissingParentsMap(RevlogStream s, int[] revisions) throws HgRuntimeException { + int[] p = new int[2]; + for (int i = 1; i < revisions.length; i++) { + s.parents(revisions[i], p); + if (p[0] != NO_REVISION && Arrays.binarySearch(revisions, p[0]) < 0) { + parentMap.put(p[0], Nodeid.fromBinary(s.nodeid(p[0]), 0)); + } + if (p[1] != NO_REVISION && Arrays.binarySearch(revisions, p[1]) < 0) { + parentMap.put(p[1], Nodeid.fromBinary(s.nodeid(p[1]), 0)); + } + } + } + public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { try { parentMap.put(revisionIndex, Nodeid.fromBinary(nodeid, 0));
--- a/src/org/tmatesoft/hg/internal/RevisionSet.java Fri Jun 28 19:27:26 2013 +0200 +++ b/src/org/tmatesoft/hg/internal/RevisionSet.java Mon Jul 01 21:19:53 2013 +0200 @@ -92,7 +92,8 @@ } /** - * Any ancestor of an element from the supplied children set found in this one. + * Any ancestor of an element from the supplied child set found in this one. + * Elements of the supplied child set are not part of return value. */ public RevisionSet ancestors(RevisionSet children, HgParentChildMap<HgChangelog> parentHelper) { if (isEmpty()) { @@ -190,6 +191,9 @@ return elements.isEmpty(); } + public int size() { + return elements.size(); + } public List<Nodeid> asList() { return new ArrayList<Nodeid>(elements);
--- a/src/org/tmatesoft/hg/repo/HgRemoteRepository.java Fri Jun 28 19:27:26 2013 +0200 +++ b/src/org/tmatesoft/hg/repo/HgRemoteRepository.java Mon Jul 01 21:19:53 2013 +0200 @@ -802,7 +802,6 @@ * @return list of draft roots on remote server */ public List<Nodeid> draftRoots() { - assert !pub; return droots; }
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/TestPush.java Mon Jul 01 21:19:53 2013 +0200 @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2013 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import static org.junit.Assert.*; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; + +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.tmatesoft.hg.core.HgCheckoutCommand; +import org.tmatesoft.hg.core.HgCommitCommand; +import org.tmatesoft.hg.core.HgOutgoingCommand; +import org.tmatesoft.hg.core.HgPushCommand; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.PhasesHelper; +import org.tmatesoft.hg.internal.RevisionSet; +import org.tmatesoft.hg.repo.HgChangelog; +import org.tmatesoft.hg.repo.HgInternals; +import org.tmatesoft.hg.repo.HgLookup; +import org.tmatesoft.hg.repo.HgRemoteRepository; +import org.tmatesoft.hg.repo.HgRepository; + +/** + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class TestPush { + + @Rule + public ErrorCollectorExt errorCollector = new ErrorCollectorExt(); + + @Test + public void testPushToEmpty() throws Exception { + File srcRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-push2empty-src", false); + File dstRepoLoc = RepoUtils.initEmptyTempRepo("test-push2empty-dst"); + HgServer server = new HgServer().start(dstRepoLoc); + try { + final HgLookup hgLookup = new HgLookup(); + HgRepository srcRepo = hgLookup.detect(srcRepoLoc); + HgPushCommand cmd = new HgPushCommand(srcRepo); + final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL()); + cmd.destination(dstRemote); + cmd.execute(); + final HgRepository dstRepo = hgLookup.detect(dstRepoLoc); + checkRepositoriesAreSame(srcRepo, dstRepo); + final List<Nodeid> outgoing = new HgOutgoingCommand(srcRepo).against(dstRemote).executeLite(); + errorCollector.assertTrue(outgoing.toString(), outgoing.isEmpty()); + } finally { + server.stop(); + } + } + + @Test + public void testPushChanges() throws Exception { + File srcRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-push-src", false); + File dstRepoLoc = RepoUtils.cloneRepoToTempLocation("test-annotate", "test-push-dst", false); + File f1 = new File(srcRepoLoc, "file1"); + assertTrue("[sanity]", f1.canWrite()); + HgServer server = new HgServer().start(dstRepoLoc); + try { + final HgLookup hgLookup = new HgLookup(); + final HgRepository srcRepo = hgLookup.detect(srcRepoLoc); + final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL()); + RepoUtils.modifyFileAppend(f1, "change1"); + new HgCommitCommand(srcRepo).message("Commit 1").execute(); + new HgCheckoutCommand(srcRepo).changeset(7).clean(true).execute(); + assertEquals("[sanity]", "no-merge", srcRepo.getWorkingCopyBranchName()); + RepoUtils.modifyFileAppend(f1, "change2"); + new HgCommitCommand(srcRepo).message("Commit 2").execute(); + // + new HgPushCommand(srcRepo).destination(dstRemote).execute(); + checkRepositoriesAreSame(srcRepo, hgLookup.detect(dstRepoLoc)); + final List<Nodeid> outgoing = new HgOutgoingCommand(srcRepo).against(dstRemote).executeLite(); + errorCollector.assertTrue(outgoing.toString(), outgoing.isEmpty()); + } finally { + server.stop(); + } + } + + @Test + public void testPushToNonPublishingServer() throws Exception { + Assert.fail(); + } + + @Test + public void testPushToPublishingServer() throws Exception { + Assert.fail(); + } + + @Test + public void testPushSecretChangesets() throws Exception { + // copy, not clone as latter updates phase information + File srcRepoLoc = RepoUtils.copyRepoToTempLocation("test-phases", "test-push-no-secret-src"); + File dstRepoLoc = RepoUtils.initEmptyTempRepo("test-push-no-secret-dst"); + HgServer server = new HgServer().start(dstRepoLoc); + try { + final HgLookup hgLookup = new HgLookup(); + final HgRepository srcRepo = hgLookup.detect(srcRepoLoc); + final HgRemoteRepository dstRemote = hgLookup.detect(server.getURL()); + PhasesHelper phaseHelper = new PhasesHelper(HgInternals.getImplementationRepo(srcRepo)); + final RevisionSet allSecret = phaseHelper.allSecret(); + assertFalse("[sanity]", allSecret.isEmpty()); + new HgPushCommand(srcRepo).destination(dstRemote).execute(); + HgRepository dstRepo = hgLookup.detect(dstRepoLoc); + final HgChangelog srcClog = srcRepo.getChangelog(); + final HgChangelog dstClog = dstRepo.getChangelog(); + errorCollector.assertEquals(srcClog.getRevisionCount() - allSecret.size(), dstClog.getRevisionCount()); + for (Nodeid n : allSecret) { + errorCollector.assertTrue(n.toString(), !dstClog.isKnown(n)); + } + } finally { + server.stop(); + } + } + + @Test + public void testUpdateBookmarkOnPush() throws Exception { + Assert.fail(); + } + + + private void checkRepositoriesAreSame(HgRepository srcRepo, HgRepository dstRepo) { + errorCollector.assertEquals(srcRepo.getChangelog().getRevisionCount(), dstRepo.getChangelog().getRevisionCount()); + errorCollector.assertEquals(srcRepo.getChangelog().getRevision(0), dstRepo.getChangelog().getRevision(0)); + errorCollector.assertEquals(srcRepo.getChangelog().getRevision(TIP), dstRepo.getChangelog().getRevision(TIP)); + } + + static class HgServer { + private Process serverProcess; + + public HgServer start(File dir) throws IOException, InterruptedException { + if (serverProcess != null) { + stop(); + } + List<String> cmdline = new ArrayList<String>(); + cmdline.add("hg"); + cmdline.add("--config"); + cmdline.add("web.allow_push=*"); + cmdline.add("--config"); + cmdline.add("web.push_ssl=False"); + cmdline.add("--config"); + cmdline.add("server.validate=True"); + cmdline.add("--config"); + cmdline.add(String.format("web.port=%d", port())); + cmdline.add("serve"); + serverProcess = new ProcessBuilder(cmdline).directory(dir).start(); + Thread.sleep(500); + return this; + } + + public URL getURL() throws MalformedURLException { + return new URL(String.format("http://localhost:%d/", port())); + } + + public int port() { + return 9090; + } + + public void stop() { + if (serverProcess == null) { + return; + } + // if Process#destroy() doesn't perform well with scripts and child processes + // may need to write server pid to a file and send a kill <pid> here + serverProcess.destroy(); + serverProcess = null; + } + } +}
--- a/test/org/tmatesoft/hg/test/TestRevisionSet.java Fri Jun 28 19:27:26 2013 +0200 +++ b/test/org/tmatesoft/hg/test/TestRevisionSet.java Mon Jul 01 21:19:53 2013 +0200 @@ -22,6 +22,9 @@ import org.junit.Test; import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.internal.RevisionSet; +import org.tmatesoft.hg.repo.HgChangelog; +import org.tmatesoft.hg.repo.HgParentChildMap; +import org.tmatesoft.hg.repo.HgRepository; /** * @@ -39,13 +42,13 @@ Nodeid n2 = Nodeid.fromAscii("3b7d51ed4c65082f9235e3459e282d7ff723aa97"); Nodeid n3 = Nodeid.fromAscii("14dac192aa262feb8ff6645a102648498483a188"); Nodeid n4 = Nodeid.fromAscii("1deea2f332183c947937f6df988c2c6417efc217"); - RevisionSet a = f(n1, n2, n3); - RevisionSet b = f(n3, n4); - RevisionSet union_ab = f(n1, n2, n3, n4); - RevisionSet intersect_ab = f(n3); - RevisionSet subtract_ab = f(n1, n2); - RevisionSet subtract_ba = f(n4); - RevisionSet symDiff_ab = f(n1, n2, n4); + RevisionSet a = rs(n1, n2, n3); + RevisionSet b = rs(n3, n4); + RevisionSet union_ab = rs(n1, n2, n3, n4); + RevisionSet intersect_ab = rs(n3); + RevisionSet subtract_ab = rs(n1, n2); + RevisionSet subtract_ba = rs(n4); + RevisionSet symDiff_ab = rs(n1, n2, n4); errorCollector.assertEquals(union_ab, a.union(b)); errorCollector.assertEquals(union_ab, b.union(a)); @@ -55,10 +58,67 @@ errorCollector.assertEquals(subtract_ba, b.subtract(a)); errorCollector.assertEquals(symDiff_ab, a.symmetricDifference(b)); errorCollector.assertEquals(symDiff_ab, b.symmetricDifference(a)); + errorCollector.assertTrue(rs(n1, n2, n4).equals(rs(n4, n1, n2))); + errorCollector.assertTrue(rs().equals(rs())); + errorCollector.assertFalse(rs(n1).equals(rs(n2))); + } + + @Test + public void testRootsAndHeads() throws Exception { + final HgRepository repo = Configuration.get().find("test-annotate"); + Nodeid[] allRevs = allRevisions(repo); + HgParentChildMap<HgChangelog> parentHelper = new HgParentChildMap<HgChangelog>(repo.getChangelog()); + parentHelper.init(); + final RevisionSet complete = rs(allRevs); + // roots + errorCollector.assertEquals(rs(allRevs[0]), complete.roots(parentHelper)); + RevisionSet fromR2 = complete.subtract(rs(allRevs[0], allRevs[1])); + RevisionSet fromR3 = complete.subtract(rs(allRevs[0], allRevs[1], allRevs[2])); + errorCollector.assertEquals(rs(allRevs[2], allRevs[3]), fromR2.roots(parentHelper)); + errorCollector.assertEquals(rs(allRevs[3], allRevs[4], allRevs[5]), fromR3.roots(parentHelper)); + // heads + errorCollector.assertEquals(rs(allRevs[9], allRevs[7]), complete.heads(parentHelper)); + RevisionSet toR7 = complete.subtract(rs(allRevs[9], allRevs[8])); + errorCollector.assertEquals(rs(allRevs[7], allRevs[6], allRevs[4]), toR7.heads(parentHelper)); + RevisionSet withoutNoMergeBranch = toR7.subtract(rs(allRevs[5], allRevs[7])); + errorCollector.assertEquals(rs(allRevs[6], allRevs[4]), withoutNoMergeBranch.heads(parentHelper)); + errorCollector.assertEquals(complete.heads(parentHelper), complete.heads(parentHelper).heads(parentHelper)); + } + + @Test + public void testAncestorsAndChildren() throws Exception { + final HgRepository repo = Configuration.get().find("test-annotate"); + Nodeid[] allRevs = allRevisions(repo); + HgParentChildMap<HgChangelog> parentHelper = new HgParentChildMap<HgChangelog>(repo.getChangelog()); + parentHelper.init(); + final RevisionSet complete = rs(allRevs); + // children + errorCollector.assertTrue(rs().children(parentHelper).isEmpty()); + errorCollector.assertEquals(rs(allRevs[8], allRevs[9]), rs(allRevs[4]).children(parentHelper)); + // default branch and no-merge branch both from r2 + RevisionSet s1 = rs(allRevs[8], allRevs[9], allRevs[4], allRevs[5], allRevs[7]); + errorCollector.assertEquals(s1, rs(allRevs[2]).children(parentHelper)); + // ancestors + RevisionSet fromR2 = complete.subtract(rs(allRevs[0], allRevs[1])); + // no-merge branch and r9 are not in ancestors of r8 (as well as r8 itself) + RevisionSet s3 = fromR2.subtract(rs(allRevs[9], allRevs[5], allRevs[7], allRevs[8])); + errorCollector.assertEquals(s3, fromR2.ancestors(rs(allRevs[8]), parentHelper)); + // ancestors of no-merge branch + RevisionSet branchNoMerge = rs(allRevs[5], allRevs[7]); + errorCollector.assertEquals(rs(allRevs[0], allRevs[1], allRevs[2]), complete.ancestors(branchNoMerge, parentHelper)); + errorCollector.assertEquals(rs(allRevs[2]), fromR2.ancestors(branchNoMerge, parentHelper)); + } + + private static Nodeid[] allRevisions(HgRepository repo) { + Nodeid[] allRevs = new Nodeid[repo.getChangelog().getRevisionCount()]; + for (int i = 0; i < allRevs.length; i++) { + allRevs[i] = repo.getChangelog().getRevision(i); + } + return allRevs; } - private static RevisionSet f(Nodeid... nodes) { + private static RevisionSet rs(Nodeid... nodes) { return new RevisionSet(Arrays.asList(nodes)); } }