kitaev@213: /* kitaev@213: * Copyright (c) 2011 TMate Software Ltd kitaev@213: * kitaev@213: * This program is free software; you can redistribute it and/or modify kitaev@213: * it under the terms of the GNU General Public License as published by kitaev@213: * the Free Software Foundation; version 2 of the License. kitaev@213: * kitaev@213: * This program is distributed in the hope that it will be useful, kitaev@213: * but WITHOUT ANY WARRANTY; without even the implied warranty of kitaev@213: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the kitaev@213: * GNU General Public License for more details. kitaev@213: * kitaev@213: * For information on how to redistribute this software under kitaev@213: * the terms of a license other than GNU General Public License kitaev@213: * contact TMate Software at support@hg4j.com kitaev@213: */ kitaev@213: package org.tmatesoft.hg.core; kitaev@213: kitaev@213: import static org.tmatesoft.hg.core.Nodeid.NULL; kitaev@213: import static org.tmatesoft.hg.internal.RequiresFile.*; kitaev@213: kitaev@213: import java.io.ByteArrayOutputStream; kitaev@213: import java.io.File; kitaev@213: import java.io.FileOutputStream; kitaev@213: import java.io.IOException; kitaev@213: import java.nio.ByteBuffer; kitaev@213: import java.util.ArrayList; kitaev@213: import java.util.Collections; kitaev@213: import java.util.LinkedList; kitaev@213: import java.util.TreeMap; kitaev@213: import java.util.zip.DeflaterOutputStream; kitaev@213: kitaev@213: import org.tmatesoft.hg.internal.ByteArrayDataAccess; kitaev@213: import org.tmatesoft.hg.internal.DataAccess; kitaev@213: import org.tmatesoft.hg.internal.DigestHelper; kitaev@213: import org.tmatesoft.hg.internal.Internals; kitaev@213: import org.tmatesoft.hg.repo.HgBundle; kitaev@213: import org.tmatesoft.hg.repo.HgBundle.GroupElement; kitaev@213: import org.tmatesoft.hg.repo.HgLookup; kitaev@213: import org.tmatesoft.hg.repo.HgRemoteRepository; kitaev@213: import org.tmatesoft.hg.repo.HgRepository; kitaev@213: import org.tmatesoft.hg.util.CancelledException; kitaev@213: import org.tmatesoft.hg.util.PathRewrite; kitaev@213: kitaev@213: /** kitaev@213: * WORK IN PROGRESS, DO NOT USE kitaev@213: * kitaev@213: * @author Artem Tikhomirov kitaev@213: * @author TMate Software Ltd. kitaev@213: */ kitaev@213: public class HgCloneCommand { kitaev@213: kitaev@213: private File destination; kitaev@213: private HgRemoteRepository srcRepo; kitaev@213: kitaev@213: public HgCloneCommand() { kitaev@213: } kitaev@213: kitaev@213: /** kitaev@213: * @param folder location to become root of the repository (i.e. where .hg folder would reside). Either kitaev@213: * shall not exist or be empty otherwise. kitaev@213: * @return this for convenience kitaev@213: */ kitaev@213: public HgCloneCommand destination(File folder) { kitaev@213: destination = folder; kitaev@213: return this; kitaev@213: } kitaev@213: kitaev@213: public HgCloneCommand source(HgRemoteRepository hgRemote) { kitaev@213: srcRepo = hgRemote; kitaev@213: return this; kitaev@213: } kitaev@213: kitaev@213: public HgRepository execute() throws HgException, CancelledException { kitaev@213: if (destination == null) { kitaev@213: throw new HgBadArgumentException("Destination not set", null); kitaev@213: } kitaev@213: if (srcRepo == null || srcRepo.isInvalid()) { kitaev@213: throw new HgBadArgumentException("Bad source repository", null); kitaev@213: } kitaev@213: if (destination.exists()) { kitaev@213: if (!destination.isDirectory()) { kitaev@213: throw new HgBadArgumentException(String.format("%s is not a directory", destination), null); kitaev@213: } else if (destination.list().length > 0) { kitaev@213: throw new HgBadArgumentException(String.format("% shall be empty", destination), null); kitaev@213: } kitaev@213: } else { kitaev@213: destination.mkdirs(); kitaev@213: } kitaev@213: // if cloning remote repo, which can stream and no revision is specified - kitaev@213: // can use 'stream_out' wireproto kitaev@213: // kitaev@213: // pull all changes from the very beginning kitaev@213: // XXX consult getContext() if by any chance has a bundle ready, if not, then read and register kitaev@213: HgBundle completeChanges = srcRepo.getChanges(Collections.singletonList(NULL)); kitaev@213: WriteDownMate mate = new WriteDownMate(destination); kitaev@213: try { kitaev@213: // instantiate new repo in the destdir kitaev@213: mate.initEmptyRepository(); kitaev@213: // pull changes kitaev@213: completeChanges.inspectAll(mate); kitaev@213: mate.complete(); kitaev@213: } catch (IOException ex) { kitaev@213: throw new HgException(ex); kitaev@213: } finally { kitaev@213: completeChanges.unlink(); kitaev@213: } kitaev@213: return new HgLookup().detect(destination); kitaev@213: } kitaev@213: kitaev@213: kitaev@213: // 1. process changelog, memorize nodeids to index kitaev@213: // 2. process manifest, using map from step 3, collect manifest nodeids kitaev@213: // 3. process every file, using map from 3, and consult set from step 4 to ensure repo is correct kitaev@213: private static class WriteDownMate implements HgBundle.Inspector { kitaev@213: private final File hgDir; kitaev@213: private final PathRewrite storagePathHelper; kitaev@213: private FileOutputStream indexFile; kitaev@213: private String filename; // human-readable name of the file being written, for log/exception purposes kitaev@213: kitaev@213: private final TreeMap changelogIndexes = new TreeMap(); kitaev@213: private boolean collectChangelogIndexes = false; kitaev@213: kitaev@213: private int base = -1; kitaev@213: private long offset = 0; kitaev@213: private DataAccess prevRevContent; kitaev@213: private final DigestHelper dh = new DigestHelper(); kitaev@213: private final ArrayList revisionSequence = new ArrayList(); // last visited nodes first kitaev@213: kitaev@213: private final LinkedList fncacheFiles = new LinkedList(); kitaev@213: private Internals implHelper; kitaev@213: kitaev@213: public WriteDownMate(File destDir) { kitaev@213: hgDir = new File(destDir, ".hg"); kitaev@213: implHelper = new Internals(); kitaev@213: implHelper.setStorageConfig(1, STORE | FNCACHE | DOTENCODE); kitaev@213: storagePathHelper = implHelper.buildDataFilesHelper(); kitaev@213: } kitaev@213: kitaev@213: public void initEmptyRepository() throws IOException { kitaev@213: implHelper.initEmptyRepository(hgDir); kitaev@213: } kitaev@213: kitaev@213: public void complete() throws IOException { kitaev@213: FileOutputStream fncacheFile = new FileOutputStream(new File(hgDir, "store/fncache")); kitaev@213: for (String s : fncacheFiles) { kitaev@213: fncacheFile.write(s.getBytes()); kitaev@213: fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat kitaev@213: } kitaev@213: fncacheFile.close(); kitaev@213: } kitaev@213: kitaev@213: public void changelogStart() { kitaev@213: try { kitaev@213: base = -1; kitaev@213: offset = 0; kitaev@213: revisionSequence.clear(); kitaev@213: indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i")); kitaev@213: collectChangelogIndexes = true; kitaev@213: } catch (IOException ex) { kitaev@213: throw new HgBadStateException(ex); kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: public void changelogEnd() { kitaev@213: try { kitaev@213: if (prevRevContent != null) { kitaev@213: prevRevContent.done(); kitaev@213: prevRevContent = null; kitaev@213: } kitaev@213: collectChangelogIndexes = false; kitaev@213: indexFile.close(); kitaev@213: indexFile = null; kitaev@213: filename = null; kitaev@213: } catch (IOException ex) { kitaev@213: throw new HgBadStateException(ex); kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: public void manifestStart() { kitaev@213: try { kitaev@213: base = -1; kitaev@213: offset = 0; kitaev@213: revisionSequence.clear(); kitaev@213: indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i")); kitaev@213: } catch (IOException ex) { kitaev@213: throw new HgBadStateException(ex); kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: public void manifestEnd() { kitaev@213: try { kitaev@213: if (prevRevContent != null) { kitaev@213: prevRevContent.done(); kitaev@213: prevRevContent = null; kitaev@213: } kitaev@213: indexFile.close(); kitaev@213: indexFile = null; kitaev@213: filename = null; kitaev@213: } catch (IOException ex) { kitaev@213: throw new HgBadStateException(ex); kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: public void fileStart(String name) { kitaev@213: try { kitaev@213: base = -1; kitaev@213: offset = 0; kitaev@213: revisionSequence.clear(); kitaev@213: fncacheFiles.add("data/" + name + ".i"); // FIXME this is pure guess, kitaev@213: // need to investigate more how filenames are kept in fncache kitaev@213: File file = new File(hgDir, filename = storagePathHelper.rewrite(name)); kitaev@213: file.getParentFile().mkdirs(); kitaev@213: indexFile = new FileOutputStream(file); kitaev@213: } catch (IOException ex) { kitaev@213: throw new HgBadStateException(ex); kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: public void fileEnd(String name) { kitaev@213: try { kitaev@213: if (prevRevContent != null) { kitaev@213: prevRevContent.done(); kitaev@213: prevRevContent = null; kitaev@213: } kitaev@213: indexFile.close(); kitaev@213: indexFile = null; kitaev@213: filename = null; kitaev@213: } catch (IOException ex) { kitaev@213: throw new HgBadStateException(ex); kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: private int knownRevision(Nodeid p) { kitaev@213: if (NULL.equals(p)) { kitaev@213: return -1; kitaev@213: } else { kitaev@213: for (int i = revisionSequence.size() - 1; i >= 0; i--) { kitaev@213: if (revisionSequence.get(i).equals(p)) { kitaev@213: return i; kitaev@213: } kitaev@213: } kitaev@213: } kitaev@213: throw new HgBadStateException(String.format("Can't find index of %s for file %s", p.shortNotation(), filename)); kitaev@213: } kitaev@213: kitaev@213: public boolean element(GroupElement ge) { kitaev@213: try { kitaev@213: assert indexFile != null; kitaev@213: boolean writeComplete = false; kitaev@213: Nodeid p1 = ge.firstParent(); kitaev@213: Nodeid p2 = ge.secondParent(); kitaev@213: if (NULL.equals(p1) && NULL.equals(p2) /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) { kitaev@213: prevRevContent = new ByteArrayDataAccess(new byte[0]); kitaev@213: writeComplete = true; kitaev@213: } kitaev@213: byte[] content = ge.apply(prevRevContent); kitaev@213: byte[] calculated = dh.sha1(p1, p2, content).asBinary(); kitaev@213: final Nodeid node = ge.node(); kitaev@213: if (!node.equalsTo(calculated)) { kitaev@213: throw new HgBadStateException(String.format("Checksum failed: expected %s, calculated %s. File %s", node, calculated, filename)); kitaev@213: } kitaev@213: final int link; kitaev@213: if (collectChangelogIndexes) { kitaev@213: changelogIndexes.put(node, revisionSequence.size()); kitaev@213: link = revisionSequence.size(); kitaev@213: } else { kitaev@213: Integer csRev = changelogIndexes.get(ge.cset()); kitaev@213: if (csRev == null) { kitaev@213: throw new HgBadStateException(String.format("Changelog doesn't contain revision %s of %s", ge.cset().shortNotation(), filename)); kitaev@213: } kitaev@213: link = csRev.intValue(); kitaev@213: } kitaev@213: final int p1Rev = knownRevision(p1), p2Rev = knownRevision(p2); kitaev@213: DataAccess patchContent = ge.rawData(); kitaev@213: writeComplete = writeComplete || patchContent.length() >= (/* 3/4 of actual */content.length - (content.length >>> 2)); kitaev@213: if (writeComplete) { kitaev@213: base = revisionSequence.size(); kitaev@213: } kitaev@213: final byte[] sourceData = writeComplete ? content : patchContent.byteArray(); kitaev@213: final byte[] data; kitaev@213: ByteArrayOutputStream bos = new ByteArrayOutputStream(content.length); kitaev@213: DeflaterOutputStream dos = new DeflaterOutputStream(bos); kitaev@213: dos.write(sourceData); kitaev@213: dos.close(); kitaev@213: final byte[] compressedData = bos.toByteArray(); kitaev@213: dos = null; kitaev@213: bos = null; kitaev@213: final Byte dataPrefix; kitaev@213: if (compressedData.length >= (sourceData.length - (sourceData.length >>> 2))) { kitaev@213: // compression wasn't too effective, kitaev@213: data = sourceData; kitaev@213: dataPrefix = 'u'; kitaev@213: } else { kitaev@213: data = compressedData; kitaev@213: dataPrefix = null; kitaev@213: } kitaev@213: kitaev@213: ByteBuffer header = ByteBuffer.allocate(64 /* REVLOGV1_RECORD_SIZE */); kitaev@213: if (offset == 0) { kitaev@213: final int INLINEDATA = 1 << 16; kitaev@213: header.putInt(1 /* RevlogNG */ | INLINEDATA); kitaev@213: header.putInt(0); kitaev@213: } else { kitaev@213: header.putLong(offset << 16); kitaev@213: } kitaev@213: final int compressedLen = data.length + (dataPrefix == null ? 0 : 1); kitaev@213: header.putInt(compressedLen); kitaev@213: header.putInt(content.length); kitaev@213: header.putInt(base); kitaev@213: header.putInt(link); kitaev@213: header.putInt(p1Rev); kitaev@213: header.putInt(p2Rev); kitaev@213: header.put(node.toByteArray()); kitaev@213: // assume 12 bytes left are zeros kitaev@213: indexFile.write(header.array()); kitaev@213: if (dataPrefix != null) { kitaev@213: indexFile.write(dataPrefix.byteValue()); kitaev@213: } kitaev@213: indexFile.write(data); kitaev@213: // kitaev@213: offset += compressedLen; kitaev@213: revisionSequence.add(node); kitaev@213: prevRevContent.done(); kitaev@213: prevRevContent = new ByteArrayDataAccess(content); kitaev@213: } catch (IOException ex) { kitaev@213: throw new HgBadStateException(ex); kitaev@213: } kitaev@213: return true; kitaev@213: } kitaev@213: } kitaev@213: kitaev@213: }