tikhomirov@644: /* tikhomirov@644: * Copyright (c) 2013 TMate Software Ltd tikhomirov@644: * tikhomirov@644: * This program is free software; you can redistribute it and/or modify tikhomirov@644: * it under the terms of the GNU General Public License as published by tikhomirov@644: * the Free Software Foundation; version 2 of the License. tikhomirov@644: * tikhomirov@644: * This program is distributed in the hope that it will be useful, tikhomirov@644: * but WITHOUT ANY WARRANTY; without even the implied warranty of tikhomirov@644: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the tikhomirov@644: * GNU General Public License for more details. tikhomirov@644: * tikhomirov@644: * For information on how to redistribute this software under tikhomirov@644: * the terms of a license other than GNU General Public License tikhomirov@644: * contact TMate Software at support@hg4j.com tikhomirov@644: */ tikhomirov@644: package org.tmatesoft.hg.internal; tikhomirov@644: tikhomirov@644: import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; tikhomirov@644: import static org.tmatesoft.hg.repo.HgRepository.TIP; tikhomirov@644: tikhomirov@644: import java.io.File; tikhomirov@644: import java.io.FileOutputStream; tikhomirov@644: import java.io.IOException; tikhomirov@644: import java.util.ArrayList; tikhomirov@644: import java.util.Arrays; tikhomirov@644: import java.util.Collection; tikhomirov@644: import java.util.Collections; tikhomirov@644: import java.util.Comparator; tikhomirov@645: import java.util.HashSet; tikhomirov@644: import java.util.List; tikhomirov@645: import java.util.Set; tikhomirov@644: tikhomirov@644: import org.tmatesoft.hg.core.HgIOException; tikhomirov@644: import org.tmatesoft.hg.core.Nodeid; tikhomirov@645: import org.tmatesoft.hg.internal.DataSerializer.OutputStreamSerializer; tikhomirov@644: import org.tmatesoft.hg.internal.Patch.PatchDataSource; tikhomirov@644: import org.tmatesoft.hg.repo.HgBundle; tikhomirov@644: import org.tmatesoft.hg.repo.HgChangelog; tikhomirov@644: import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; tikhomirov@644: import org.tmatesoft.hg.repo.HgDataFile; tikhomirov@644: import org.tmatesoft.hg.repo.HgInternals; tikhomirov@644: import org.tmatesoft.hg.repo.HgInvalidControlFileException; tikhomirov@644: import org.tmatesoft.hg.repo.HgLookup; tikhomirov@644: import org.tmatesoft.hg.repo.HgManifest; tikhomirov@644: import org.tmatesoft.hg.repo.HgRepository; tikhomirov@644: import org.tmatesoft.hg.repo.HgRuntimeException; tikhomirov@644: tikhomirov@644: /** tikhomirov@644: * @see http://mercurial.selenic.com/wiki/BundleFormat tikhomirov@644: * @author Artem Tikhomirov tikhomirov@644: * @author TMate Software Ltd. tikhomirov@644: */ tikhomirov@644: public class BundleGenerator { tikhomirov@644: tikhomirov@644: private final Internals repo; tikhomirov@644: tikhomirov@644: public BundleGenerator(Internals hgRepo) { tikhomirov@644: repo = hgRepo; tikhomirov@644: } tikhomirov@644: tikhomirov@644: public File create(List changesets) throws HgIOException, IOException { tikhomirov@644: final HgChangelog clog = repo.getRepo().getChangelog(); tikhomirov@644: final HgManifest manifest = repo.getRepo().getManifest(); tikhomirov@644: IntVector clogRevsVector = new IntVector(changesets.size(), 0); tikhomirov@644: for (Nodeid n : changesets) { tikhomirov@644: clogRevsVector.add(clog.getRevisionIndex(n)); tikhomirov@644: } tikhomirov@644: clogRevsVector.sort(true); tikhomirov@644: final int[] clogRevs = clogRevsVector.toArray(); tikhomirov@644: final IntMap clogMap = new IntMap(changesets.size()); tikhomirov@644: final IntVector manifestRevs = new IntVector(changesets.size(), 0); tikhomirov@644: final List files = new ArrayList(); tikhomirov@644: clog.range(new HgChangelog.Inspector() { tikhomirov@645: private Set seenFiles = new HashSet(); tikhomirov@644: public void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException { tikhomirov@644: clogMap.put(revisionIndex, nodeid); tikhomirov@644: manifestRevs.add(manifest.getRevisionIndex(cset.manifest())); tikhomirov@644: for (String f : cset.files()) { tikhomirov@645: if (seenFiles.contains(f)) { tikhomirov@645: continue; tikhomirov@645: } tikhomirov@645: seenFiles.add(f); tikhomirov@644: HgDataFile df = repo.getRepo().getFileNode(f); tikhomirov@645: files.add(df); tikhomirov@644: } tikhomirov@644: } tikhomirov@644: }, clogRevs); tikhomirov@644: manifestRevs.sort(true); tikhomirov@644: // tikhomirov@651: final File bundleFile = File.createTempFile("hg4j-", ".bundle"); tikhomirov@645: final FileOutputStream osBundle = new FileOutputStream(bundleFile); tikhomirov@645: final OutputStreamSerializer outRaw = new OutputStreamSerializer(osBundle); tikhomirov@644: outRaw.write("HG10UN".getBytes(), 0, 6); tikhomirov@644: // tikhomirov@644: RevlogStream clogStream = repo.getImplAccess().getChangelogStream(); tikhomirov@644: new ChunkGenerator(outRaw, clogMap).iterate(clogStream, clogRevs); tikhomirov@644: outRaw.writeInt(0); // null chunk for changelog group tikhomirov@644: // tikhomirov@644: RevlogStream manifestStream = repo.getImplAccess().getManifestStream(); tikhomirov@644: new ChunkGenerator(outRaw, clogMap).iterate(manifestStream, manifestRevs.toArray(true)); tikhomirov@644: outRaw.writeInt(0); // null chunk for manifest group tikhomirov@644: // tikhomirov@667: EncodingHelper fnEncoder = repo.buildFileNameEncodingHelper(); tikhomirov@644: for (HgDataFile df : sortedByName(files)) { tikhomirov@644: RevlogStream s = repo.getImplAccess().getStream(df); tikhomirov@644: final IntVector fileRevs = new IntVector(); tikhomirov@644: s.iterate(0, TIP, false, new RevlogStream.Inspector() { tikhomirov@644: tikhomirov@644: public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { tikhomirov@644: if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) { tikhomirov@644: fileRevs.add(revisionIndex); tikhomirov@644: } tikhomirov@644: } tikhomirov@644: }); tikhomirov@644: fileRevs.sort(true); tikhomirov@644: if (!fileRevs.isEmpty()) { tikhomirov@644: // although BundleFormat page says "filename length, filename" for a file, tikhomirov@644: // in fact there's a sort of 'filename chunk', i.e. filename length field includes tikhomirov@644: // not only length of filename, but also length of the field itseld, i.e. filename.length+sizeof(int) tikhomirov@667: byte[] fnameBytes = fnEncoder.toBundle(df.getPath()); tikhomirov@644: outRaw.writeInt(fnameBytes.length + 4); tikhomirov@644: outRaw.writeByte(fnameBytes); tikhomirov@644: new ChunkGenerator(outRaw, clogMap).iterate(s, fileRevs.toArray(true)); tikhomirov@644: outRaw.writeInt(0); // null chunk for file group tikhomirov@644: } tikhomirov@644: } tikhomirov@645: outRaw.writeInt(0); // null chunk to indicate no more files (although BundleFormat page doesn't mention this) tikhomirov@644: outRaw.done(); tikhomirov@645: osBundle.flush(); tikhomirov@645: osBundle.close(); tikhomirov@644: //return new HgBundle(repo.getSessionContext(), repo.getDataAccess(), bundleFile); tikhomirov@644: return bundleFile; tikhomirov@644: } tikhomirov@644: tikhomirov@644: private static Collection sortedByName(List files) { tikhomirov@644: Collections.sort(files, new Comparator() { tikhomirov@644: tikhomirov@644: public int compare(HgDataFile o1, HgDataFile o2) { tikhomirov@644: return o1.getPath().compareTo(o2.getPath()); tikhomirov@644: } tikhomirov@644: }); tikhomirov@644: return files; tikhomirov@644: } tikhomirov@644: tikhomirov@644: tikhomirov@644: public static void main(String[] args) throws Exception { tikhomirov@644: final HgLookup hgLookup = new HgLookup(); tikhomirov@644: HgRepository hgRepo = hgLookup.detectFromWorkingDir(); tikhomirov@644: BundleGenerator bg = new BundleGenerator(HgInternals.getImplementationRepo(hgRepo)); tikhomirov@644: ArrayList l = new ArrayList(); tikhomirov@644: l.add(Nodeid.fromAscii("9ef1fab9f5e3d51d70941121dc27410e28069c2d")); // 640 tikhomirov@644: l.add(Nodeid.fromAscii("2f33f102a8fa59274a27ebbe1c2903cecac6c5d5")); // 639 tikhomirov@644: l.add(Nodeid.fromAscii("d074971287478f69ab0a64176ce2284d8c1e91c3")); // 638 tikhomirov@644: File bundleFile = bg.create(l); tikhomirov@644: HgBundle b = hgLookup.loadBundle(bundleFile); tikhomirov@644: // Bundle.dump(b); // FIXME dependency from dependant code tikhomirov@644: } tikhomirov@644: tikhomirov@644: private static class ChunkGenerator implements RevlogStream.Inspector { tikhomirov@644: tikhomirov@644: private final DataSerializer ds; tikhomirov@644: private final IntMap parentMap; tikhomirov@644: private final IntMap clogMap; tikhomirov@644: private byte[] prevContent; tikhomirov@644: private int startParent; tikhomirov@644: tikhomirov@644: public ChunkGenerator(DataSerializer dataSerializer, IntMap clogNodeidMap) { tikhomirov@644: ds = dataSerializer; tikhomirov@651: parentMap = new IntMap(clogNodeidMap.size()); tikhomirov@644: clogMap = clogNodeidMap; tikhomirov@644: } tikhomirov@644: tikhomirov@644: public void iterate(RevlogStream s, int[] revisions) throws HgRuntimeException { tikhomirov@644: int[] p = s.parents(revisions[0], new int[2]); tikhomirov@644: startParent = p[0]; tikhomirov@644: int[] revs2read; tikhomirov@644: if (startParent == NO_REVISION) { tikhomirov@644: revs2read = revisions; tikhomirov@644: prevContent = new byte[0]; tikhomirov@644: } else { tikhomirov@644: revs2read = new int[revisions.length + 1]; tikhomirov@644: revs2read[0] = startParent; tikhomirov@644: System.arraycopy(revisions, 0, revs2read, 1, revisions.length); tikhomirov@644: } tikhomirov@651: // FIXME this is a hack to fill parentsMap with tikhomirov@651: // parents of elements that we are not going to meet with regular tikhomirov@651: // iteration, e.g. changes from a different branch (with some older parent), tikhomirov@651: // scenario: two revisions added to two different branches tikhomirov@651: // revisions[10, 11], parents(10) == 9, parents(11) == 7 tikhomirov@651: // revs2read == [9,10,11], and parentsMap lacks entry for parent rev7. tikhomirov@651: fillMissingParentsMap(s, revisions); tikhomirov@644: s.iterate(revs2read, true, this); tikhomirov@644: } tikhomirov@644: tikhomirov@651: private void fillMissingParentsMap(RevlogStream s, int[] revisions) throws HgRuntimeException { tikhomirov@651: int[] p = new int[2]; tikhomirov@651: for (int i = 1; i < revisions.length; i++) { tikhomirov@651: s.parents(revisions[i], p); tikhomirov@651: if (p[0] != NO_REVISION && Arrays.binarySearch(revisions, p[0]) < 0) { tikhomirov@651: parentMap.put(p[0], Nodeid.fromBinary(s.nodeid(p[0]), 0)); tikhomirov@651: } tikhomirov@651: if (p[1] != NO_REVISION && Arrays.binarySearch(revisions, p[1]) < 0) { tikhomirov@651: parentMap.put(p[1], Nodeid.fromBinary(s.nodeid(p[1]), 0)); tikhomirov@651: } tikhomirov@651: } tikhomirov@651: } tikhomirov@651: tikhomirov@644: public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { tikhomirov@644: try { tikhomirov@644: parentMap.put(revisionIndex, Nodeid.fromBinary(nodeid, 0)); tikhomirov@644: byte[] nextContent = data.byteArray(); tikhomirov@644: data.done(); tikhomirov@644: if (revisionIndex == startParent) { tikhomirov@644: prevContent = nextContent; tikhomirov@644: return; tikhomirov@644: } tikhomirov@644: Patch p = GeneratePatchInspector.delta(prevContent, nextContent); tikhomirov@644: prevContent = nextContent; tikhomirov@644: nextContent = null; tikhomirov@644: PatchDataSource pds = p.new PatchDataSource(); tikhomirov@644: int len = pds.serializeLength() + 84; tikhomirov@644: ds.writeInt(len); tikhomirov@644: ds.write(nodeid, 0, Nodeid.SIZE); tikhomirov@644: // TODO assert parents match those in previous group elements tikhomirov@644: if (parent1Revision != NO_REVISION) { tikhomirov@644: ds.writeByte(parentMap.get(parent1Revision).toByteArray()); tikhomirov@644: } else { tikhomirov@644: ds.writeByte(Nodeid.NULL.toByteArray()); tikhomirov@644: } tikhomirov@644: if (parent2Revision != NO_REVISION) { tikhomirov@644: ds.writeByte(parentMap.get(parent2Revision).toByteArray()); tikhomirov@644: } else { tikhomirov@644: ds.writeByte(Nodeid.NULL.toByteArray()); tikhomirov@644: } tikhomirov@644: ds.writeByte(clogMap.get(linkRevision).toByteArray()); tikhomirov@644: pds.serialize(ds); tikhomirov@644: } catch (IOException ex) { tikhomirov@644: // XXX odd to have object with IOException to use where no checked exception is allowed tikhomirov@644: throw new HgInvalidControlFileException(ex.getMessage(), ex, null); tikhomirov@644: } catch (HgIOException ex) { tikhomirov@644: throw new HgInvalidControlFileException(ex, true); // XXX any way to refactor ChunkGenerator not to get checked exception here? tikhomirov@644: } tikhomirov@644: } tikhomirov@644: } tikhomirov@644: }