tikhomirov@644: /* tikhomirov@644: * Copyright (c) 2013 TMate Software Ltd tikhomirov@644: * tikhomirov@644: * This program is free software; you can redistribute it and/or modify tikhomirov@644: * it under the terms of the GNU General Public License as published by tikhomirov@644: * the Free Software Foundation; version 2 of the License. tikhomirov@644: * tikhomirov@644: * This program is distributed in the hope that it will be useful, tikhomirov@644: * but WITHOUT ANY WARRANTY; without even the implied warranty of tikhomirov@644: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the tikhomirov@644: * GNU General Public License for more details. tikhomirov@644: * tikhomirov@644: * For information on how to redistribute this software under tikhomirov@644: * the terms of a license other than GNU General Public License tikhomirov@644: * contact TMate Software at support@hg4j.com tikhomirov@644: */ tikhomirov@644: package org.tmatesoft.hg.internal; tikhomirov@644: tikhomirov@644: import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; tikhomirov@644: import static org.tmatesoft.hg.repo.HgRepository.TIP; tikhomirov@644: tikhomirov@644: import java.io.File; tikhomirov@644: import java.io.FileOutputStream; tikhomirov@644: import java.io.IOException; tikhomirov@644: import java.io.OutputStream; tikhomirov@644: import java.util.ArrayList; tikhomirov@644: import java.util.Arrays; tikhomirov@644: import java.util.Collection; tikhomirov@644: import java.util.Collections; tikhomirov@644: import java.util.Comparator; tikhomirov@644: import java.util.List; tikhomirov@644: tikhomirov@644: import org.tmatesoft.hg.console.Bundle; tikhomirov@644: import org.tmatesoft.hg.core.HgIOException; tikhomirov@644: import org.tmatesoft.hg.core.Nodeid; tikhomirov@644: import org.tmatesoft.hg.internal.Patch.PatchDataSource; tikhomirov@644: import org.tmatesoft.hg.repo.HgBundle; tikhomirov@644: import org.tmatesoft.hg.repo.HgChangelog; tikhomirov@644: import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; tikhomirov@644: import org.tmatesoft.hg.repo.HgDataFile; tikhomirov@644: import org.tmatesoft.hg.repo.HgInternals; tikhomirov@644: import org.tmatesoft.hg.repo.HgInvalidControlFileException; tikhomirov@644: import org.tmatesoft.hg.repo.HgLookup; tikhomirov@644: import org.tmatesoft.hg.repo.HgManifest; tikhomirov@644: import org.tmatesoft.hg.repo.HgRepository; tikhomirov@644: import org.tmatesoft.hg.repo.HgRuntimeException; tikhomirov@644: tikhomirov@644: /** tikhomirov@644: * @see http://mercurial.selenic.com/wiki/BundleFormat tikhomirov@644: * @author Artem Tikhomirov tikhomirov@644: * @author TMate Software Ltd. tikhomirov@644: */ tikhomirov@644: public class BundleGenerator { tikhomirov@644: tikhomirov@644: private final Internals repo; tikhomirov@644: tikhomirov@644: public BundleGenerator(Internals hgRepo) { tikhomirov@644: repo = hgRepo; tikhomirov@644: } tikhomirov@644: tikhomirov@644: public File create(List changesets) throws HgIOException, IOException { tikhomirov@644: final HgChangelog clog = repo.getRepo().getChangelog(); tikhomirov@644: final HgManifest manifest = repo.getRepo().getManifest(); tikhomirov@644: IntVector clogRevsVector = new IntVector(changesets.size(), 0); tikhomirov@644: for (Nodeid n : changesets) { tikhomirov@644: clogRevsVector.add(clog.getRevisionIndex(n)); tikhomirov@644: } tikhomirov@644: clogRevsVector.sort(true); tikhomirov@644: final int[] clogRevs = clogRevsVector.toArray(); tikhomirov@644: System.out.printf("Changelog: %s\n", Arrays.toString(clogRevs)); tikhomirov@644: final IntMap clogMap = new IntMap(changesets.size()); tikhomirov@644: final IntVector manifestRevs = new IntVector(changesets.size(), 0); tikhomirov@644: final List files = new ArrayList(); tikhomirov@644: clog.range(new HgChangelog.Inspector() { tikhomirov@644: public void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException { tikhomirov@644: clogMap.put(revisionIndex, nodeid); tikhomirov@644: manifestRevs.add(manifest.getRevisionIndex(cset.manifest())); tikhomirov@644: for (String f : cset.files()) { tikhomirov@644: HgDataFile df = repo.getRepo().getFileNode(f); tikhomirov@644: if (!files.contains(df)) { tikhomirov@644: files.add(df); tikhomirov@644: } tikhomirov@644: } tikhomirov@644: } tikhomirov@644: }, clogRevs); tikhomirov@644: manifestRevs.sort(true); tikhomirov@644: System.out.printf("Manifest: %s\n", Arrays.toString(manifestRevs.toArray(true))); tikhomirov@644: /////////////// tikhomirov@644: for (HgDataFile df : sortedByName(files)) { tikhomirov@644: RevlogStream s = repo.getImplAccess().getStream(df); tikhomirov@644: final IntVector fileRevs = new IntVector(); tikhomirov@644: s.iterate(0, TIP, false, new RevlogStream.Inspector() { tikhomirov@644: tikhomirov@644: public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { tikhomirov@644: if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) { tikhomirov@644: fileRevs.add(revisionIndex); tikhomirov@644: } tikhomirov@644: } tikhomirov@644: }); tikhomirov@644: fileRevs.sort(true); tikhomirov@644: System.out.printf("%s: %s\n", df.getPath(), Arrays.toString(fileRevs.toArray(true))); tikhomirov@644: } tikhomirov@644: if (Boolean.FALSE.booleanValue()) { tikhomirov@644: return null; tikhomirov@644: } tikhomirov@644: /////////////// tikhomirov@644: // tikhomirov@644: final File bundleFile = File.createTempFile("hg4j-", "bundle"); tikhomirov@644: final OutputStreamSerializer outRaw = new OutputStreamSerializer(new FileOutputStream(bundleFile)); tikhomirov@644: outRaw.write("HG10UN".getBytes(), 0, 6); tikhomirov@644: // tikhomirov@644: RevlogStream clogStream = repo.getImplAccess().getChangelogStream(); tikhomirov@644: new ChunkGenerator(outRaw, clogMap).iterate(clogStream, clogRevs); tikhomirov@644: outRaw.writeInt(0); // null chunk for changelog group tikhomirov@644: // tikhomirov@644: RevlogStream manifestStream = repo.getImplAccess().getManifestStream(); tikhomirov@644: new ChunkGenerator(outRaw, clogMap).iterate(manifestStream, manifestRevs.toArray(true)); tikhomirov@644: outRaw.writeInt(0); // null chunk for manifest group tikhomirov@644: // tikhomirov@644: for (HgDataFile df : sortedByName(files)) { tikhomirov@644: RevlogStream s = repo.getImplAccess().getStream(df); tikhomirov@644: final IntVector fileRevs = new IntVector(); tikhomirov@644: s.iterate(0, TIP, false, new RevlogStream.Inspector() { tikhomirov@644: tikhomirov@644: public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { tikhomirov@644: if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) { tikhomirov@644: fileRevs.add(revisionIndex); tikhomirov@644: } tikhomirov@644: } tikhomirov@644: }); tikhomirov@644: fileRevs.sort(true); tikhomirov@644: if (!fileRevs.isEmpty()) { tikhomirov@644: // although BundleFormat page says "filename length, filename" for a file, tikhomirov@644: // in fact there's a sort of 'filename chunk', i.e. filename length field includes tikhomirov@644: // not only length of filename, but also length of the field itseld, i.e. filename.length+sizeof(int) tikhomirov@644: byte[] fnameBytes = df.getPath().toString().getBytes(); // FIXME check encoding in native hg (and fix accordingly in HgBundle) tikhomirov@644: outRaw.writeInt(fnameBytes.length + 4); tikhomirov@644: outRaw.writeByte(fnameBytes); tikhomirov@644: new ChunkGenerator(outRaw, clogMap).iterate(s, fileRevs.toArray(true)); tikhomirov@644: outRaw.writeInt(0); // null chunk for file group tikhomirov@644: } tikhomirov@644: } tikhomirov@644: outRaw.done(); tikhomirov@644: //return new HgBundle(repo.getSessionContext(), repo.getDataAccess(), bundleFile); tikhomirov@644: return bundleFile; tikhomirov@644: } tikhomirov@644: tikhomirov@644: private static Collection sortedByName(List files) { tikhomirov@644: Collections.sort(files, new Comparator() { tikhomirov@644: tikhomirov@644: public int compare(HgDataFile o1, HgDataFile o2) { tikhomirov@644: return o1.getPath().compareTo(o2.getPath()); tikhomirov@644: } tikhomirov@644: }); tikhomirov@644: return files; tikhomirov@644: } tikhomirov@644: tikhomirov@644: tikhomirov@644: public static void main(String[] args) throws Exception { tikhomirov@644: final HgLookup hgLookup = new HgLookup(); tikhomirov@644: HgRepository hgRepo = hgLookup.detectFromWorkingDir(); tikhomirov@644: BundleGenerator bg = new BundleGenerator(HgInternals.getImplementationRepo(hgRepo)); tikhomirov@644: ArrayList l = new ArrayList(); tikhomirov@644: l.add(Nodeid.fromAscii("9ef1fab9f5e3d51d70941121dc27410e28069c2d")); // 640 tikhomirov@644: l.add(Nodeid.fromAscii("2f33f102a8fa59274a27ebbe1c2903cecac6c5d5")); // 639 tikhomirov@644: l.add(Nodeid.fromAscii("d074971287478f69ab0a64176ce2284d8c1e91c3")); // 638 tikhomirov@644: File bundleFile = bg.create(l); tikhomirov@644: HgBundle b = hgLookup.loadBundle(bundleFile); tikhomirov@644: // Bundle.dump(b); // FIXME dependency from dependant code tikhomirov@644: } tikhomirov@644: tikhomirov@644: private static class ChunkGenerator implements RevlogStream.Inspector { tikhomirov@644: tikhomirov@644: private final DataSerializer ds; tikhomirov@644: private final IntMap parentMap; tikhomirov@644: private final IntMap clogMap; tikhomirov@644: private byte[] prevContent; tikhomirov@644: private int startParent; tikhomirov@644: tikhomirov@644: public ChunkGenerator(DataSerializer dataSerializer, IntMap clogNodeidMap) { tikhomirov@644: ds = dataSerializer; tikhomirov@644: parentMap = new IntMap(clogNodeidMap.size());; tikhomirov@644: clogMap = clogNodeidMap; tikhomirov@644: } tikhomirov@644: tikhomirov@644: public void iterate(RevlogStream s, int[] revisions) throws HgRuntimeException { tikhomirov@644: int[] p = s.parents(revisions[0], new int[2]); tikhomirov@644: startParent = p[0]; tikhomirov@644: int[] revs2read; tikhomirov@644: if (startParent == NO_REVISION) { tikhomirov@644: revs2read = revisions; tikhomirov@644: prevContent = new byte[0]; tikhomirov@644: } else { tikhomirov@644: revs2read = new int[revisions.length + 1]; tikhomirov@644: revs2read[0] = startParent; tikhomirov@644: System.arraycopy(revisions, 0, revs2read, 1, revisions.length); tikhomirov@644: } tikhomirov@644: s.iterate(revs2read, true, this); tikhomirov@644: } tikhomirov@644: tikhomirov@644: public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException { tikhomirov@644: try { tikhomirov@644: parentMap.put(revisionIndex, Nodeid.fromBinary(nodeid, 0)); tikhomirov@644: byte[] nextContent = data.byteArray(); tikhomirov@644: data.done(); tikhomirov@644: if (revisionIndex == startParent) { tikhomirov@644: prevContent = nextContent; tikhomirov@644: return; tikhomirov@644: } tikhomirov@644: Patch p = GeneratePatchInspector.delta(prevContent, nextContent); tikhomirov@644: prevContent = nextContent; tikhomirov@644: nextContent = null; tikhomirov@644: PatchDataSource pds = p.new PatchDataSource(); tikhomirov@644: int len = pds.serializeLength() + 84; tikhomirov@644: ds.writeInt(len); tikhomirov@644: ds.write(nodeid, 0, Nodeid.SIZE); tikhomirov@644: // TODO assert parents match those in previous group elements tikhomirov@644: if (parent1Revision != NO_REVISION) { tikhomirov@644: ds.writeByte(parentMap.get(parent1Revision).toByteArray()); tikhomirov@644: } else { tikhomirov@644: ds.writeByte(Nodeid.NULL.toByteArray()); tikhomirov@644: } tikhomirov@644: if (parent2Revision != NO_REVISION) { tikhomirov@644: ds.writeByte(parentMap.get(parent2Revision).toByteArray()); tikhomirov@644: } else { tikhomirov@644: ds.writeByte(Nodeid.NULL.toByteArray()); tikhomirov@644: } tikhomirov@644: ds.writeByte(clogMap.get(linkRevision).toByteArray()); tikhomirov@644: pds.serialize(ds); tikhomirov@644: } catch (IOException ex) { tikhomirov@644: // XXX odd to have object with IOException to use where no checked exception is allowed tikhomirov@644: throw new HgInvalidControlFileException(ex.getMessage(), ex, null); tikhomirov@644: } catch (HgIOException ex) { tikhomirov@644: throw new HgInvalidControlFileException(ex, true); // XXX any way to refactor ChunkGenerator not to get checked exception here? tikhomirov@644: } tikhomirov@644: } tikhomirov@644: } tikhomirov@644: tikhomirov@644: private static class OutputStreamSerializer extends DataSerializer { tikhomirov@644: private final OutputStream out; tikhomirov@644: public OutputStreamSerializer(OutputStream outputStream) { tikhomirov@644: out = outputStream; tikhomirov@644: } tikhomirov@644: tikhomirov@644: @Override tikhomirov@644: public void write(byte[] data, int offset, int length) throws HgIOException { tikhomirov@644: try { tikhomirov@644: out.write(data, offset, length); tikhomirov@644: } catch (IOException ex) { tikhomirov@644: throw new HgIOException(ex.getMessage(), ex, null); tikhomirov@644: } tikhomirov@644: } tikhomirov@644: tikhomirov@644: @Override tikhomirov@644: public void done() throws HgIOException { tikhomirov@644: try { tikhomirov@644: out.close(); tikhomirov@644: super.done(); tikhomirov@644: } catch (IOException ex) { tikhomirov@644: throw new HgIOException(ex.getMessage(), ex, null); tikhomirov@644: } tikhomirov@644: } tikhomirov@644: } tikhomirov@644: }