tikhomirov@186: /* tikhomirov@530: * Copyright (c) 2011-2013 TMate Software Ltd tikhomirov@186: * tikhomirov@186: * This program is free software; you can redistribute it and/or modify tikhomirov@186: * it under the terms of the GNU General Public License as published by tikhomirov@186: * the Free Software Foundation; version 2 of the License. tikhomirov@186: * tikhomirov@186: * This program is distributed in the hope that it will be useful, tikhomirov@186: * but WITHOUT ANY WARRANTY; without even the implied warranty of tikhomirov@186: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the tikhomirov@186: * GNU General Public License for more details. tikhomirov@186: * tikhomirov@186: * For information on how to redistribute this software under tikhomirov@186: * the terms of a license other than GNU General Public License tikhomirov@186: * contact TMate Software at support@hg4j.com tikhomirov@186: */ tikhomirov@186: package org.tmatesoft.hg.core; tikhomirov@186: tikhomirov@186: import static org.tmatesoft.hg.core.Nodeid.NULL; tikhomirov@186: import static org.tmatesoft.hg.internal.RequiresFile.*; tikhomirov@534: import static org.tmatesoft.hg.internal.RevlogStreamWriter.preferCompleteOverPatch; tikhomirov@534: import static org.tmatesoft.hg.internal.RevlogStreamWriter.preferCompressedOverComplete; tikhomirov@186: tikhomirov@186: import java.io.File; tikhomirov@186: import java.io.FileOutputStream; tikhomirov@186: import java.io.IOException; tikhomirov@186: import java.util.ArrayList; tikhomirov@186: import java.util.Collections; tikhomirov@186: import java.util.TreeMap; tikhomirov@186: tikhomirov@186: import org.tmatesoft.hg.internal.ByteArrayDataAccess; tikhomirov@186: import org.tmatesoft.hg.internal.DataAccess; tikhomirov@534: import org.tmatesoft.hg.internal.DataAccessProvider; tikhomirov@534: import org.tmatesoft.hg.internal.DataSerializer; tikhomirov@186: import org.tmatesoft.hg.internal.DigestHelper; tikhomirov@539: import org.tmatesoft.hg.internal.FNCacheFile; tikhomirov@539: import org.tmatesoft.hg.internal.Internals; tikhomirov@512: import org.tmatesoft.hg.internal.Lifecycle; tikhomirov@490: import org.tmatesoft.hg.internal.RepoInitializer; tikhomirov@530: import org.tmatesoft.hg.internal.RevlogCompressor; tikhomirov@530: import org.tmatesoft.hg.internal.RevlogStreamWriter; tikhomirov@186: import org.tmatesoft.hg.repo.HgBundle; tikhomirov@186: import org.tmatesoft.hg.repo.HgBundle.GroupElement; tikhomirov@423: import org.tmatesoft.hg.repo.HgInvalidControlFileException; tikhomirov@423: import org.tmatesoft.hg.repo.HgInvalidFileException; tikhomirov@423: import org.tmatesoft.hg.repo.HgInvalidStateException; tikhomirov@186: import org.tmatesoft.hg.repo.HgLookup; tikhomirov@186: import org.tmatesoft.hg.repo.HgRemoteRepository; tikhomirov@186: import org.tmatesoft.hg.repo.HgRepository; tikhomirov@423: import org.tmatesoft.hg.repo.HgRuntimeException; tikhomirov@512: import org.tmatesoft.hg.util.CancelSupport; tikhomirov@186: import org.tmatesoft.hg.util.CancelledException; tikhomirov@539: import org.tmatesoft.hg.util.Path; tikhomirov@186: import org.tmatesoft.hg.util.PathRewrite; tikhomirov@512: import org.tmatesoft.hg.util.ProgressSupport; tikhomirov@186: tikhomirov@186: /** tikhomirov@186: * WORK IN PROGRESS, DO NOT USE tikhomirov@186: * tikhomirov@186: * @author Artem Tikhomirov tikhomirov@186: * @author TMate Software Ltd. tikhomirov@186: */ tikhomirov@423: public class HgCloneCommand extends HgAbstractCommand { tikhomirov@186: tikhomirov@186: private File destination; tikhomirov@186: private HgRemoteRepository srcRepo; tikhomirov@186: tikhomirov@186: public HgCloneCommand() { tikhomirov@186: } tikhomirov@186: tikhomirov@204: /** tikhomirov@204: * @param folder location to become root of the repository (i.e. where .hg folder would reside). Either tikhomirov@204: * shall not exist or be empty otherwise. tikhomirov@204: * @return this for convenience tikhomirov@204: */ tikhomirov@186: public HgCloneCommand destination(File folder) { tikhomirov@186: destination = folder; tikhomirov@186: return this; tikhomirov@186: } tikhomirov@186: tikhomirov@186: public HgCloneCommand source(HgRemoteRepository hgRemote) { tikhomirov@186: srcRepo = hgRemote; tikhomirov@186: return this; tikhomirov@186: } tikhomirov@186: tikhomirov@423: /** tikhomirov@423: * tikhomirov@423: * @return tikhomirov@423: * @throws HgBadArgumentException tikhomirov@423: * @throws HgRemoteConnectionException tikhomirov@423: * @throws HgRepositoryNotFoundException tikhomirov@423: * @throws HgException tikhomirov@423: * @throws CancelledException tikhomirov@423: * @throws HgRuntimeException subclass thereof to indicate issues with the library. Runtime exception tikhomirov@423: */ tikhomirov@423: public HgRepository execute() throws HgException, CancelledException { tikhomirov@186: if (destination == null) { tikhomirov@295: throw new IllegalArgumentException("Destination not set", null); tikhomirov@186: } tikhomirov@186: if (srcRepo == null || srcRepo.isInvalid()) { tikhomirov@186: throw new HgBadArgumentException("Bad source repository", null); tikhomirov@186: } tikhomirov@186: if (destination.exists()) { tikhomirov@186: if (!destination.isDirectory()) { tikhomirov@186: throw new HgBadArgumentException(String.format("%s is not a directory", destination), null); tikhomirov@186: } else if (destination.list().length > 0) { tikhomirov@530: throw new HgBadArgumentException(String.format("%s shall be empty", destination), null); tikhomirov@186: } tikhomirov@186: } else { tikhomirov@186: destination.mkdirs(); tikhomirov@186: } tikhomirov@512: ProgressSupport progress = getProgressSupport(null); tikhomirov@512: CancelSupport cancel = getCancelSupport(null, true); tikhomirov@512: cancel.checkCancelled(); tikhomirov@186: // if cloning remote repo, which can stream and no revision is specified - tikhomirov@186: // can use 'stream_out' wireproto tikhomirov@186: // tikhomirov@186: // pull all changes from the very beginning tikhomirov@512: // XXX consult getContext() if by any chance has a bundle ready, if not, then read and register tikhomirov@186: HgBundle completeChanges = srcRepo.getChanges(Collections.singletonList(NULL)); tikhomirov@512: cancel.checkCancelled(); tikhomirov@512: WriteDownMate mate = new WriteDownMate(srcRepo.getSessionContext(), destination, progress, cancel); tikhomirov@186: try { tikhomirov@186: // instantiate new repo in the destdir tikhomirov@186: mate.initEmptyRepository(); tikhomirov@186: // pull changes tikhomirov@186: completeChanges.inspectAll(mate); tikhomirov@512: mate.checkFailure(); tikhomirov@186: mate.complete(); tikhomirov@186: } catch (IOException ex) { tikhomirov@295: throw new HgInvalidFileException(getClass().getName(), ex); tikhomirov@186: } finally { tikhomirov@186: completeChanges.unlink(); tikhomirov@512: progress.done(); tikhomirov@186: } tikhomirov@186: return new HgLookup().detect(destination); tikhomirov@186: } tikhomirov@186: tikhomirov@186: tikhomirov@186: // 1. process changelog, memorize nodeids to index tikhomirov@186: // 2. process manifest, using map from step 3, collect manifest nodeids tikhomirov@186: // 3. process every file, using map from 3, and consult set from step 4 to ensure repo is correct tikhomirov@512: private static class WriteDownMate implements HgBundle.Inspector, Lifecycle { tikhomirov@186: private final File hgDir; tikhomirov@190: private final PathRewrite storagePathHelper; tikhomirov@512: private final ProgressSupport progressSupport; tikhomirov@512: private final CancelSupport cancelSupport; tikhomirov@534: private final SessionContext ctx; tikhomirov@571: private final Path.Source pathFactory; tikhomirov@186: private FileOutputStream indexFile; tikhomirov@190: private String filename; // human-readable name of the file being written, for log/exception purposes tikhomirov@186: tikhomirov@186: private final TreeMap changelogIndexes = new TreeMap(); tikhomirov@186: private boolean collectChangelogIndexes = false; tikhomirov@186: tikhomirov@186: private DataAccess prevRevContent; tikhomirov@186: private final DigestHelper dh = new DigestHelper(); tikhomirov@532: // recently processed nodes last, so that index in the array may be used as a linkRevision or baseRevision tikhomirov@532: private final ArrayList revisionSequence = new ArrayList(); tikhomirov@186: tikhomirov@539: private FNCacheFile fncacheFile; tikhomirov@490: private RepoInitializer repoInit; tikhomirov@512: private Lifecycle.Callback lifecycleCallback; tikhomirov@512: private CancelledException cancelException; tikhomirov@186: tikhomirov@534: private RevlogStreamWriter.HeaderWriter revlogHeader = new RevlogStreamWriter.HeaderWriter(true); tikhomirov@534: private RevlogCompressor revlogDataZip; tikhomirov@534: tikhomirov@534: public WriteDownMate(SessionContext sessionCtx, File destDir, ProgressSupport progress, CancelSupport cancel) { tikhomirov@534: ctx = sessionCtx; tikhomirov@186: hgDir = new File(destDir, ".hg"); tikhomirov@490: repoInit = new RepoInitializer(); tikhomirov@490: repoInit.setRequires(STORE | FNCACHE | DOTENCODE); tikhomirov@534: storagePathHelper = repoInit.buildDataFilesHelper(sessionCtx); tikhomirov@512: progressSupport = progress; tikhomirov@512: cancelSupport = cancel; tikhomirov@534: revlogDataZip = new RevlogCompressor(sessionCtx); tikhomirov@571: pathFactory = ctx.getPathFactory(); tikhomirov@186: } tikhomirov@186: tikhomirov@186: public void initEmptyRepository() throws IOException { tikhomirov@490: repoInit.initEmptyRepository(hgDir); tikhomirov@539: try { tikhomirov@539: assert (repoInit.getRequires() & FNCACHE) != 0; tikhomirov@539: fncacheFile = new FNCacheFile(Internals.getInstance(new HgLookup(ctx).detect(hgDir))); tikhomirov@539: } catch (HgRepositoryNotFoundException ex) { tikhomirov@539: // SHALL NOT HAPPEN provided we initialized empty repository successfully tikhomirov@539: // TODO perhaps, with WriteDownMate moving to a more appropriate location, tikhomirov@539: // we could instantiate HgRepository (or Internals) by other means, without exception? tikhomirov@539: throw new IOException("Can't access fncache for newly created repository", ex); tikhomirov@539: } tikhomirov@186: } tikhomirov@186: tikhomirov@186: public void complete() throws IOException { tikhomirov@539: fncacheFile.write(); tikhomirov@186: } tikhomirov@186: tikhomirov@186: public void changelogStart() { tikhomirov@186: try { tikhomirov@530: revlogHeader.offset(0).baseRevision(-1); tikhomirov@186: revisionSequence.clear(); tikhomirov@190: indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i")); tikhomirov@186: collectChangelogIndexes = true; tikhomirov@186: } catch (IOException ex) { tikhomirov@532: throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(hgDir, filename)); tikhomirov@186: } tikhomirov@513: stopIfCancelled(); tikhomirov@186: } tikhomirov@186: tikhomirov@186: public void changelogEnd() { tikhomirov@186: try { tikhomirov@532: clearPreviousContent(); tikhomirov@186: collectChangelogIndexes = false; tikhomirov@532: closeIndexFile(); tikhomirov@186: } catch (IOException ex) { tikhomirov@532: throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(hgDir, filename)); tikhomirov@186: } tikhomirov@513: progressSupport.worked(1); tikhomirov@513: stopIfCancelled(); tikhomirov@186: } tikhomirov@186: tikhomirov@186: public void manifestStart() { tikhomirov@186: try { tikhomirov@530: revlogHeader.offset(0).baseRevision(-1); tikhomirov@186: revisionSequence.clear(); tikhomirov@190: indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i")); tikhomirov@186: } catch (IOException ex) { tikhomirov@532: throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(hgDir, filename)); tikhomirov@186: } tikhomirov@513: stopIfCancelled(); tikhomirov@186: } tikhomirov@186: tikhomirov@186: public void manifestEnd() { tikhomirov@186: try { tikhomirov@532: clearPreviousContent(); tikhomirov@532: closeIndexFile(); tikhomirov@186: } catch (IOException ex) { tikhomirov@532: throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(hgDir, filename)); tikhomirov@186: } tikhomirov@513: progressSupport.worked(1); tikhomirov@513: stopIfCancelled(); tikhomirov@186: } tikhomirov@186: tikhomirov@186: public void fileStart(String name) { tikhomirov@186: try { tikhomirov@530: revlogHeader.offset(0).baseRevision(-1); tikhomirov@186: revisionSequence.clear(); tikhomirov@292: File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString()); tikhomirov@186: file.getParentFile().mkdirs(); tikhomirov@186: indexFile = new FileOutputStream(file); tikhomirov@186: } catch (IOException ex) { tikhomirov@423: String m = String.format("Failed to write file %s", filename); tikhomirov@423: throw new HgInvalidControlFileException(m, ex, new File(filename)); tikhomirov@186: } tikhomirov@513: stopIfCancelled(); tikhomirov@186: } tikhomirov@186: tikhomirov@186: public void fileEnd(String name) { tikhomirov@186: try { tikhomirov@616: fncacheFile.addIndex(pathFactory.path(name)); tikhomirov@532: clearPreviousContent(); tikhomirov@532: closeIndexFile(); tikhomirov@186: } catch (IOException ex) { tikhomirov@423: String m = String.format("Failed to write file %s", filename); tikhomirov@423: throw new HgInvalidControlFileException(m, ex, new File(filename)); tikhomirov@186: } tikhomirov@513: progressSupport.worked(1); tikhomirov@513: stopIfCancelled(); tikhomirov@186: } tikhomirov@532: tikhomirov@532: private void clearPreviousContent() { tikhomirov@532: if (prevRevContent != null) { tikhomirov@532: prevRevContent.done(); tikhomirov@532: prevRevContent = null; tikhomirov@532: } tikhomirov@532: } tikhomirov@532: tikhomirov@532: private void closeIndexFile() throws IOException { tikhomirov@532: indexFile.close(); tikhomirov@532: indexFile = null; tikhomirov@532: filename = null; tikhomirov@532: } tikhomirov@186: tikhomirov@186: private int knownRevision(Nodeid p) { tikhomirov@274: if (p.isNull()) { tikhomirov@186: return -1; tikhomirov@186: } else { tikhomirov@186: for (int i = revisionSequence.size() - 1; i >= 0; i--) { tikhomirov@186: if (revisionSequence.get(i).equals(p)) { tikhomirov@186: return i; tikhomirov@186: } tikhomirov@186: } tikhomirov@186: } tikhomirov@423: String m = String.format("Can't find index of %s for file %s", p.shortNotation(), filename); tikhomirov@532: throw new HgInvalidControlFileException(m, null, new File(hgDir, filename)).setRevision(p); tikhomirov@186: } tikhomirov@530: tikhomirov@186: public boolean element(GroupElement ge) { tikhomirov@186: try { tikhomirov@186: assert indexFile != null; tikhomirov@186: boolean writeComplete = false; tikhomirov@532: Nodeid deltaBase = ge.patchBase(); tikhomirov@532: if (deltaBase.isNull()) { tikhomirov@532: // NOTE, can't use both parents isNull == true to empty prevRevContent tikhomirov@532: // see build.gradle sample below why. tikhomirov@532: prevRevContent = new DataAccess(); // empty data tikhomirov@532: writeComplete = true; tikhomirov@532: // if (writeComplete) would set baseRevision correctly, tikhomirov@532: } else { tikhomirov@532: Nodeid prevRevision = revisionSequence.size() > 0 ? revisionSequence.get(revisionSequence.size()-1) : Nodeid.NULL; tikhomirov@532: if (!prevRevision.equals(deltaBase)) { tikhomirov@532: // presently, bundle group elements always patch previous, see tikhomirov@532: // (a) changegroup.py#builddeltaheader(): # do nothing with basenode, it is implicitly the previous one in HG10 tikhomirov@532: // (b) revlog.py#group(): prev, curr = revs[r], revs[r + 1] tikhomirov@532: // for c in bundler.revchunk(self, curr, prev): tikhomirov@532: // so there's no reason to have code here to extract contents of deltaBase revision tikhomirov@532: String m = String.format("Revision %s import failed: delta base %s is not the last node we've handled (and know content for) %s", ge.node(), deltaBase, prevRevision); tikhomirov@532: throw new HgInvalidStateException(m); tikhomirov@532: } tikhomirov@532: } tikhomirov@532: // tikhomirov@532: byte[] content = ge.apply(prevRevContent.byteArray()); tikhomirov@186: Nodeid p1 = ge.firstParent(); tikhomirov@186: Nodeid p2 = ge.secondParent(); tikhomirov@186: byte[] calculated = dh.sha1(p1, p2, content).asBinary(); tikhomirov@186: final Nodeid node = ge.node(); tikhomirov@186: if (!node.equalsTo(calculated)) { tikhomirov@531: String m = String.format("Checksum failed: expected %s, calculated %s. File %s", node, calculated, filename); tikhomirov@531: throw new HgRevisionIntegrityException(m, null, new File(hgDir, filename)); tikhomirov@186: } tikhomirov@530: revlogHeader.nodeid(node); tikhomirov@532: // tikhomirov@186: if (collectChangelogIndexes) { tikhomirov@186: changelogIndexes.put(node, revisionSequence.size()); tikhomirov@530: revlogHeader.linkRevision(revisionSequence.size()); tikhomirov@186: } else { tikhomirov@186: Integer csRev = changelogIndexes.get(ge.cset()); tikhomirov@186: if (csRev == null) { tikhomirov@423: throw new HgInvalidStateException(String.format("Changelog doesn't contain revision %s of %s", ge.cset().shortNotation(), filename)); tikhomirov@186: } tikhomirov@530: revlogHeader.linkRevision(csRev.intValue()); tikhomirov@186: } tikhomirov@532: // tikhomirov@530: revlogHeader.parents(knownRevision(p1), knownRevision(p2)); tikhomirov@532: // tikhomirov@358: byte[] patchContent = ge.rawDataByteArray(); tikhomirov@532: // no reason to keep patch if it's close (here, >75%) in size to the complete contents, tikhomirov@532: // save patching effort in this case tikhomirov@534: writeComplete = writeComplete || preferCompleteOverPatch(patchContent.length, content.length); tikhomirov@532: tikhomirov@186: if (writeComplete) { tikhomirov@530: revlogHeader.baseRevision(revisionSequence.size()); tikhomirov@186: } tikhomirov@532: assert revlogHeader.baseRevision() >= 0; tikhomirov@532: tikhomirov@358: final byte[] sourceData = writeComplete ? content : patchContent; tikhomirov@534: revlogDataZip.reset(new DataSerializer.ByteArrayDataSource(sourceData)); tikhomirov@530: final int compressedLen; tikhomirov@534: final boolean useUncompressedData = preferCompressedOverComplete(revlogDataZip.getCompressedLength(), sourceData.length); tikhomirov@530: if (useUncompressedData) { tikhomirov@186: // compression wasn't too effective, tikhomirov@530: compressedLen = sourceData.length + 1 /*1 byte for 'u' - uncompressed prefix byte*/; tikhomirov@186: } else { tikhomirov@534: compressedLen= revlogDataZip.getCompressedLength(); tikhomirov@186: } tikhomirov@530: tikhomirov@530: revlogHeader.length(content.length, compressedLen); tikhomirov@530: tikhomirov@534: // XXX may be wise not to create DataSerializer for each revision, but for a file tikhomirov@534: DataAccessProvider.StreamDataSerializer sds = new DataAccessProvider.StreamDataSerializer(ctx.getLog(), indexFile) { tikhomirov@534: @Override tikhomirov@534: public void done() { tikhomirov@534: // override parent behavior not to close stream in use tikhomirov@534: } tikhomirov@534: }; tikhomirov@534: revlogHeader.serialize(sds); tikhomirov@186: tikhomirov@530: if (useUncompressedData) { tikhomirov@530: indexFile.write((byte) 'u'); tikhomirov@530: indexFile.write(sourceData); tikhomirov@186: } else { tikhomirov@534: int actualCompressedLenWritten = revlogDataZip.writeCompressedData(sds); tikhomirov@530: if (actualCompressedLenWritten != compressedLen) { tikhomirov@530: throw new HgInvalidStateException(String.format("Expected %d bytes of compressed data, but actually wrote %d in %s", compressedLen, actualCompressedLenWritten, filename)); tikhomirov@530: } tikhomirov@186: } tikhomirov@534: sds.done(); tikhomirov@186: // tikhomirov@186: revisionSequence.add(node); tikhomirov@186: prevRevContent.done(); tikhomirov@186: prevRevContent = new ByteArrayDataAccess(content); tikhomirov@186: } catch (IOException ex) { tikhomirov@423: String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename); tikhomirov@531: throw new HgInvalidControlFileException(m, ex, new File(hgDir, filename)); tikhomirov@186: } tikhomirov@513: return cancelException == null; tikhomirov@186: } tikhomirov@530: /* tikhomirov@530: $ hg debugindex build.gradle tikhomirov@530: rev offset length base linkrev nodeid p1 p2 tikhomirov@530: 0 0 857 0 454 b2a1b20d1933 000000000000 000000000000 tikhomirov@530: 1 857 319 0 455 5324c8f2b550 b2a1b20d1933 000000000000 tikhomirov@530: 2 1176 533 0 460 4011d52141cd 5324c8f2b550 000000000000 tikhomirov@530: 3 1709 85 0 463 d0be58845306 4011d52141cd 000000000000 tikhomirov@530: 4 1794 105 0 464 3ddd456244a0 d0be58845306 000000000000 tikhomirov@530: 5 1899 160 0 466 a3f374fbf33a 3ddd456244a0 000000000000 tikhomirov@530: 6 2059 133 0 468 0227d28e0db6 a3f374fbf33a 000000000000 tikhomirov@530: tikhomirov@530: once we get a bundle for this repository and look into it for the same file: tikhomirov@530: tikhomirov@530: $hg debugbundle -a /tmp/hg-bundle-4418325145435980614.tmp tikhomirov@530: format: id, p1, p2, cset, delta base, len(delta) tikhomirov@530: tikhomirov@530: build.gradle tikhomirov@530: 62a101b7994c6c5b0423ba6c802f8c64d24ef784 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 6ec4af642ba8024edd636af15e672c97cc3294e4 0000000000000000000000000000000000000000 1368 tikhomirov@530: b2a1b20d1933d0605aab6780ee52fe5ab3073832 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 7dcc920e2d57d5850ee9f08ac863251460565bd3 62a101b7994c6c5b0423ba6c802f8c64d24ef784 2373 tikhomirov@530: 5324c8f2b5503a4d1ead3ac40a9851c27572166b b2a1b20d1933d0605aab6780ee52fe5ab3073832 0000000000000000000000000000000000000000 7b883bf03b14ccea8ee74db0a34f9f66ca644a3c b2a1b20d1933d0605aab6780ee52fe5ab3073832 579 tikhomirov@530: 4011d52141cd717c92cbf350a93522d2f3ee415e 5324c8f2b5503a4d1ead3ac40a9851c27572166b 0000000000000000000000000000000000000000 55e9588b84b83aa96fe76a06ee8bf067c5d3c90e 5324c8f2b5503a4d1ead3ac40a9851c27572166b 1147 tikhomirov@530: d0be588453068787dcb3ee05f8edfe47fdd5ae78 4011d52141cd717c92cbf350a93522d2f3ee415e 0000000000000000000000000000000000000000 ad0322a4af204547c400e1846b2b83d446ab8da5 4011d52141cd717c92cbf350a93522d2f3ee415e 85 tikhomirov@530: 3ddd456244a08f81779163d9faf922a6dcd9e53e d0be588453068787dcb3ee05f8edfe47fdd5ae78 0000000000000000000000000000000000000000 3ace1fc95d0a1a941b6427c60b6e624f96dd71ad d0be588453068787dcb3ee05f8edfe47fdd5ae78 151 tikhomirov@530: a3f374fbf33aba1cc3b4f472db022b5185880f5d 3ddd456244a08f81779163d9faf922a6dcd9e53e 0000000000000000000000000000000000000000 3ca4ae7bdd3890b8ed89bfea1b42af593e04b373 3ddd456244a08f81779163d9faf922a6dcd9e53e 195 tikhomirov@530: 0227d28e0db69afebee34cd5a4151889fb6271da a3f374fbf33aba1cc3b4f472db022b5185880f5d 0000000000000000000000000000000000000000 31bd09da0dcfe48e1fc662143f91ff402238aa84 a3f374fbf33aba1cc3b4f472db022b5185880f5d 145 tikhomirov@530: tikhomirov@532: but there's no delta base information in the bundle file, it's merely a hard-coded convention tikhomirov@530: tikhomirov@530: It's unclear where the first chunk (identified 62a101b7...) comes from (by the way, there's no such changeset as 6ec4af... as specified in the chunk, while 7dcc920e.. IS changeset 454) tikhomirov@530: tikhomirov@530: EXPLANATION: tikhomirov@530: if cloned repository comes from svnkit repo (where's the gradle branch): tikhomirov@530: $hg debugindex build.gradle tikhomirov@530: rev offset length base linkrev nodeid p1 p2 tikhomirov@530: 0 0 590 0 213 62a101b7994c 000000000000 000000000000 tikhomirov@530: 1 590 872 0 452 b2a1b20d1933 000000000000 000000000000 tikhomirov@530: 2 1462 319 0 453 5324c8f2b550 b2a1b20d1933 000000000000 tikhomirov@530: 3 1781 533 0 459 4011d52141cd 5324c8f2b550 000000000000 tikhomirov@530: 4 2314 85 0 462 d0be58845306 4011d52141cd 000000000000 tikhomirov@530: 5 2399 105 0 466 3ddd456244a0 d0be58845306 000000000000 tikhomirov@530: 6 2504 160 0 468 a3f374fbf33a 3ddd456244a0 000000000000 tikhomirov@530: 7 2664 133 0 470 0227d28e0db6 a3f374fbf33a 000000000000 tikhomirov@530: tikhomirov@530: and the aforementioned bundle was result of hg incoming svnkit!!! tikhomirov@530: */ tikhomirov@512: tikhomirov@512: public void start(int count, Callback callback, Object token) { tikhomirov@512: progressSupport.start(count); tikhomirov@512: lifecycleCallback = callback; tikhomirov@512: } tikhomirov@512: tikhomirov@512: public void finish(Object token) { tikhomirov@512: progressSupport.done(); tikhomirov@512: lifecycleCallback = null; tikhomirov@512: } tikhomirov@512: tikhomirov@512: public void checkFailure() throws CancelledException { tikhomirov@512: if (cancelException != null) { tikhomirov@512: throw cancelException; tikhomirov@512: } tikhomirov@512: } tikhomirov@512: tikhomirov@512: private void stopIfCancelled() { tikhomirov@512: try { tikhomirov@512: cancelSupport.checkCancelled(); tikhomirov@512: return; tikhomirov@512: } catch (CancelledException ex) { tikhomirov@512: cancelException = ex; tikhomirov@512: lifecycleCallback.stop(); tikhomirov@512: } tikhomirov@512: } tikhomirov@186: } tikhomirov@186: }