tikhomirov@170: /* tikhomirov@170: * Copyright (c) 2011 TMate Software Ltd tikhomirov@170: * tikhomirov@170: * This program is free software; you can redistribute it and/or modify tikhomirov@170: * it under the terms of the GNU General Public License as published by tikhomirov@170: * the Free Software Foundation; version 2 of the License. tikhomirov@170: * tikhomirov@170: * This program is distributed in the hope that it will be useful, tikhomirov@170: * but WITHOUT ANY WARRANTY; without even the implied warranty of tikhomirov@170: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the tikhomirov@170: * GNU General Public License for more details. tikhomirov@170: * tikhomirov@170: * For information on how to redistribute this software under tikhomirov@170: * the terms of a license other than GNU General Public License tikhomirov@170: * contact TMate Software at support@hg4j.com tikhomirov@170: */ tikhomirov@170: package org.tmatesoft.hg.console; tikhomirov@170: tikhomirov@170: import static org.tmatesoft.hg.core.Nodeid.NULL; tikhomirov@170: import static org.tmatesoft.hg.internal.RequiresFile.*; tikhomirov@170: import static org.tmatesoft.hg.internal.RequiresFile.DOTENCODE; tikhomirov@170: import static org.tmatesoft.hg.internal.RequiresFile.FNCACHE; tikhomirov@170: tikhomirov@170: import java.io.ByteArrayOutputStream; tikhomirov@170: import java.io.File; tikhomirov@170: import java.io.FileOutputStream; tikhomirov@170: import java.io.IOException; tikhomirov@170: import java.net.URL; tikhomirov@170: import java.nio.ByteBuffer; tikhomirov@170: import java.util.ArrayList; tikhomirov@170: import java.util.Collections; tikhomirov@170: import java.util.LinkedList; tikhomirov@170: import java.util.List; tikhomirov@170: import java.util.TreeMap; tikhomirov@170: import java.util.zip.DeflaterOutputStream; tikhomirov@170: tikhomirov@170: import org.tmatesoft.hg.core.HgBadStateException; tikhomirov@170: import org.tmatesoft.hg.core.HgRepoFacade; tikhomirov@170: import org.tmatesoft.hg.core.Nodeid; tikhomirov@170: import org.tmatesoft.hg.internal.ByteArrayDataAccess; tikhomirov@170: import org.tmatesoft.hg.internal.DataAccess; tikhomirov@170: import org.tmatesoft.hg.internal.DigestHelper; tikhomirov@170: import org.tmatesoft.hg.internal.Internals; tikhomirov@170: import org.tmatesoft.hg.internal.RequiresFile; tikhomirov@170: import org.tmatesoft.hg.internal.RevlogStream; tikhomirov@170: import org.tmatesoft.hg.repo.HgBundle; tikhomirov@170: import org.tmatesoft.hg.repo.HgLookup; tikhomirov@170: import org.tmatesoft.hg.repo.HgRemoteRepository; tikhomirov@170: import org.tmatesoft.hg.repo.HgBundle.GroupElement; tikhomirov@170: import org.tmatesoft.hg.repo.HgRepository; tikhomirov@170: import org.tmatesoft.hg.util.PathRewrite; tikhomirov@170: tikhomirov@170: /** tikhomirov@170: * WORK IN PROGRESS, DO NOT USE tikhomirov@170: * tikhomirov@170: * @author Artem Tikhomirov tikhomirov@170: * @author TMate Software Ltd. tikhomirov@170: */ tikhomirov@170: public class Clone { tikhomirov@170: /* tikhomirov@170: * Changegroup: tikhomirov@170: * http://mercurial.selenic.com/wiki/Merge tikhomirov@170: * http://mercurial.selenic.com/wiki/WireProtocol tikhomirov@170: * tikhomirov@170: * according to latter, bundleformat data is sent through zlib tikhomirov@170: * (there's no header like HG10?? with the server output, though, tikhomirov@170: * as one may expect according to http://mercurial.selenic.com/wiki/BundleFormat) tikhomirov@170: */ tikhomirov@170: public static void main(String[] args) throws Exception { tikhomirov@170: Options cmdLineOpts = Options.parse(args); tikhomirov@170: HgRepoFacade hgRepo = new HgRepoFacade(); tikhomirov@170: if (!hgRepo.init(cmdLineOpts.findRepository())) { tikhomirov@170: System.err.printf("Can't find repository in: %s\n", hgRepo.getRepository().getLocation()); tikhomirov@170: return; tikhomirov@170: } tikhomirov@170: File destDir = new File("/temp/hg/clone-01/"); tikhomirov@170: if (destDir.exists()) { tikhomirov@170: if (!destDir.isDirectory()) { tikhomirov@170: throw new IllegalArgumentException(); tikhomirov@170: } else if (destDir.list().length > 0) { tikhomirov@170: throw new IllegalArgumentException(); tikhomirov@170: } tikhomirov@170: } else { tikhomirov@170: destDir.mkdirs(); tikhomirov@170: } tikhomirov@170: // if cloning remote repo, which can stream and no revision is specified - tikhomirov@170: // can use 'stream_out' wireproto tikhomirov@170: // tikhomirov@170: // //////// 1. from Remote.java take code that asks changegroup from remote server and write it down to temp file tikhomirov@170: // //////// 2. then, read the file with HgBundle tikhomirov@170: // //////// 3. process changelog, memorize nodeids to index tikhomirov@170: // //////// 4. process manifest, using map from step 3, collect manifest nodeids tikhomirov@170: // //////// 5. process every file, using map from 3, and consult set from step 4 to ensure repo is correct tikhomirov@170: // access source tikhomirov@170: HgRemoteRepository remoteRepo = new HgRemoteRepository();// new HgLookup().detect(new URL("https://asd/hg/")); tikhomirov@170: // discover changes tikhomirov@170: HgBundle completeChanges = remoteRepo.getChanges(Collections.singletonList(NULL)); tikhomirov@170: WriteDownMate mate = new WriteDownMate(destDir); tikhomirov@170: // instantiate new repo in the destdir tikhomirov@170: mate.initEmptyRepository(); tikhomirov@170: // pull changes tikhomirov@170: completeChanges.inspectAll(mate); tikhomirov@170: mate.complete(); tikhomirov@170: // completeChanges.unlink(); tikhomirov@170: } tikhomirov@170: tikhomirov@170: private static class WriteDownMate implements HgBundle.Inspector { tikhomirov@170: private final File hgDir; tikhomirov@170: private FileOutputStream indexFile; tikhomirov@170: private final PathRewrite storagePathHelper; tikhomirov@170: tikhomirov@170: private final TreeMap changelogIndexes = new TreeMap(); tikhomirov@170: private boolean collectChangelogIndexes = false; tikhomirov@170: tikhomirov@170: private int base = -1; tikhomirov@170: private long offset = 0; tikhomirov@170: private DataAccess prevRevContent; tikhomirov@170: private final DigestHelper dh = new DigestHelper(); tikhomirov@170: private final ArrayList revisionSequence = new ArrayList(); // last visited nodes first tikhomirov@170: tikhomirov@170: private final LinkedList fncacheFiles = new LinkedList(); tikhomirov@170: tikhomirov@170: public WriteDownMate(File destDir) { tikhomirov@170: hgDir = new File(destDir, ".hg"); tikhomirov@170: Internals i = new Internals(); tikhomirov@170: i.setStorageConfig(1, STORE | FNCACHE | DOTENCODE); tikhomirov@170: storagePathHelper = i.buildDataFilesHelper(); tikhomirov@170: } tikhomirov@170: tikhomirov@170: public void initEmptyRepository() throws IOException { tikhomirov@170: hgDir.mkdir(); tikhomirov@170: FileOutputStream requiresFile = new FileOutputStream(new File(hgDir, "requires")); tikhomirov@170: requiresFile.write("revlogv1\nstore\nfncache\ndotencode\n".getBytes()); tikhomirov@170: requiresFile.close(); tikhomirov@170: new File(hgDir, "store").mkdir(); // with that, hg verify says ok. tikhomirov@170: } tikhomirov@170: tikhomirov@170: public void complete() throws IOException { tikhomirov@170: FileOutputStream fncacheFile = new FileOutputStream(new File(hgDir, "store/fncache")); tikhomirov@170: for (String s : fncacheFiles) { tikhomirov@170: fncacheFile.write(s.getBytes()); tikhomirov@170: fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat tikhomirov@170: } tikhomirov@170: fncacheFile.close(); tikhomirov@170: } tikhomirov@170: tikhomirov@170: public void changelogStart() { tikhomirov@170: try { tikhomirov@170: base = -1; tikhomirov@170: offset = 0; tikhomirov@170: revisionSequence.clear(); tikhomirov@170: indexFile = new FileOutputStream(new File(hgDir, "store/00changelog.i")); tikhomirov@170: collectChangelogIndexes = true; tikhomirov@170: } catch (IOException ex) { tikhomirov@170: throw new HgBadStateException(ex); tikhomirov@170: } tikhomirov@170: } tikhomirov@170: tikhomirov@170: public void changelogEnd() { tikhomirov@170: try { tikhomirov@170: if (prevRevContent != null) { tikhomirov@170: prevRevContent.done(); tikhomirov@170: prevRevContent = null; tikhomirov@170: } tikhomirov@170: collectChangelogIndexes = false; tikhomirov@170: indexFile.close(); tikhomirov@170: indexFile = null; tikhomirov@170: } catch (IOException ex) { tikhomirov@170: throw new HgBadStateException(ex); tikhomirov@170: } tikhomirov@170: } tikhomirov@170: tikhomirov@170: public void manifestStart() { tikhomirov@170: try { tikhomirov@170: base = -1; tikhomirov@170: offset = 0; tikhomirov@170: revisionSequence.clear(); tikhomirov@170: indexFile = new FileOutputStream(new File(hgDir, "store/00manifest.i")); tikhomirov@170: } catch (IOException ex) { tikhomirov@170: throw new HgBadStateException(ex); tikhomirov@170: } tikhomirov@170: } tikhomirov@170: tikhomirov@170: public void manifestEnd() { tikhomirov@170: try { tikhomirov@170: if (prevRevContent != null) { tikhomirov@170: prevRevContent.done(); tikhomirov@170: prevRevContent = null; tikhomirov@170: } tikhomirov@170: indexFile.close(); tikhomirov@170: indexFile = null; tikhomirov@170: } catch (IOException ex) { tikhomirov@170: throw new HgBadStateException(ex); tikhomirov@170: } tikhomirov@170: } tikhomirov@170: tikhomirov@170: public void fileStart(String name) { tikhomirov@170: try { tikhomirov@170: base = -1; tikhomirov@170: offset = 0; tikhomirov@170: revisionSequence.clear(); tikhomirov@170: fncacheFiles.add("data/" + name + ".i"); // FIXME this is pure guess, tikhomirov@170: // need to investigate more how filenames are kept in fncache tikhomirov@170: File file = new File(hgDir, storagePathHelper.rewrite(name)); tikhomirov@170: file.getParentFile().mkdirs(); tikhomirov@170: indexFile = new FileOutputStream(file); tikhomirov@170: } catch (IOException ex) { tikhomirov@170: throw new HgBadStateException(ex); tikhomirov@170: } tikhomirov@170: } tikhomirov@170: tikhomirov@170: public void fileEnd(String name) { tikhomirov@170: try { tikhomirov@170: if (prevRevContent != null) { tikhomirov@170: prevRevContent.done(); tikhomirov@170: prevRevContent = null; tikhomirov@170: } tikhomirov@170: indexFile.close(); tikhomirov@170: indexFile = null; tikhomirov@170: } catch (IOException ex) { tikhomirov@170: throw new HgBadStateException(ex); tikhomirov@170: } tikhomirov@170: } tikhomirov@170: tikhomirov@170: private int knownRevision(Nodeid p) { tikhomirov@170: if (NULL.equals(p)) { tikhomirov@170: return -1; tikhomirov@170: } else { tikhomirov@170: for (int i = revisionSequence.size() - 1; i >= 0; i--) { tikhomirov@170: if (revisionSequence.get(i).equals(p)) { tikhomirov@170: return i; tikhomirov@170: } tikhomirov@170: } tikhomirov@170: } tikhomirov@170: throw new HgBadStateException(String.format("Can't find index of %s", p.shortNotation())); tikhomirov@170: } tikhomirov@170: tikhomirov@170: public boolean element(GroupElement ge) { tikhomirov@170: try { tikhomirov@170: assert indexFile != null; tikhomirov@170: boolean writeComplete = false; tikhomirov@170: Nodeid p1 = ge.firstParent(); tikhomirov@170: Nodeid p2 = ge.secondParent(); tikhomirov@170: if (NULL.equals(p1) && NULL.equals(p2) /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) { tikhomirov@170: prevRevContent = new ByteArrayDataAccess(new byte[0]); tikhomirov@170: writeComplete = true; tikhomirov@170: } tikhomirov@170: byte[] content = ge.apply(prevRevContent); tikhomirov@170: byte[] calculated = dh.sha1(p1, p2, content).asBinary(); tikhomirov@170: final Nodeid node = ge.node(); tikhomirov@170: if (!node.equalsTo(calculated)) { tikhomirov@170: throw new HgBadStateException("Checksum failed"); tikhomirov@170: } tikhomirov@170: final int link; tikhomirov@170: if (collectChangelogIndexes) { tikhomirov@170: changelogIndexes.put(node, revisionSequence.size()); tikhomirov@170: link = revisionSequence.size(); tikhomirov@170: } else { tikhomirov@170: Integer csRev = changelogIndexes.get(ge.cset()); tikhomirov@170: if (csRev == null) { tikhomirov@170: throw new HgBadStateException(String.format("Changelog doesn't contain revision %s", ge.cset().shortNotation())); tikhomirov@170: } tikhomirov@170: link = csRev.intValue(); tikhomirov@170: } tikhomirov@170: final int p1Rev = knownRevision(p1), p2Rev = knownRevision(p2); tikhomirov@170: DataAccess patchContent = ge.rawData(); tikhomirov@170: writeComplete = writeComplete || patchContent.length() >= (/* 3/4 of actual */content.length - (content.length >>> 2)); tikhomirov@170: if (writeComplete) { tikhomirov@170: base = revisionSequence.size(); tikhomirov@170: } tikhomirov@170: final byte[] sourceData = writeComplete ? content : patchContent.byteArray(); tikhomirov@170: final byte[] data; tikhomirov@170: ByteArrayOutputStream bos = new ByteArrayOutputStream(content.length); tikhomirov@170: DeflaterOutputStream dos = new DeflaterOutputStream(bos); tikhomirov@170: dos.write(sourceData); tikhomirov@170: dos.close(); tikhomirov@170: final byte[] compressedData = bos.toByteArray(); tikhomirov@170: dos = null; tikhomirov@170: bos = null; tikhomirov@170: final Byte dataPrefix; tikhomirov@170: if (compressedData.length >= (sourceData.length - (sourceData.length >>> 2))) { tikhomirov@170: // compression wasn't too effective, tikhomirov@170: data = sourceData; tikhomirov@170: dataPrefix = 'u'; tikhomirov@170: } else { tikhomirov@170: data = compressedData; tikhomirov@170: dataPrefix = null; tikhomirov@170: } tikhomirov@170: tikhomirov@170: ByteBuffer header = ByteBuffer.allocate(64 /* REVLOGV1_RECORD_SIZE */); tikhomirov@170: if (offset == 0) { tikhomirov@170: final int INLINEDATA = 1 << 16; tikhomirov@170: header.putInt(1 /* RevlogNG */ | INLINEDATA); tikhomirov@170: header.putInt(0); tikhomirov@170: } else { tikhomirov@170: header.putLong(offset << 16); tikhomirov@170: } tikhomirov@170: final int compressedLen = data.length + (dataPrefix == null ? 0 : 1); tikhomirov@170: header.putInt(compressedLen); tikhomirov@170: header.putInt(content.length); tikhomirov@170: header.putInt(base); tikhomirov@170: header.putInt(link); tikhomirov@170: header.putInt(p1Rev); tikhomirov@170: header.putInt(p2Rev); tikhomirov@170: header.put(node.toByteArray()); tikhomirov@170: // assume 12 bytes left are zeros tikhomirov@170: indexFile.write(header.array()); tikhomirov@170: if (dataPrefix != null) { tikhomirov@170: indexFile.write(dataPrefix.byteValue()); tikhomirov@170: } tikhomirov@170: indexFile.write(data); tikhomirov@170: // tikhomirov@170: offset += compressedLen; tikhomirov@170: revisionSequence.add(node); tikhomirov@170: prevRevContent.done(); tikhomirov@170: prevRevContent = new ByteArrayDataAccess(content); tikhomirov@170: } catch (IOException ex) { tikhomirov@170: throw new HgBadStateException(ex); tikhomirov@170: } tikhomirov@170: return true; tikhomirov@170: } tikhomirov@170: } tikhomirov@170: }