tikhomirov@186: /*
tikhomirov@186: * Copyright (c) 2011 TMate Software Ltd
tikhomirov@186: *
tikhomirov@186: * This program is free software; you can redistribute it and/or modify
tikhomirov@186: * it under the terms of the GNU General Public License as published by
tikhomirov@186: * the Free Software Foundation; version 2 of the License.
tikhomirov@186: *
tikhomirov@186: * This program is distributed in the hope that it will be useful,
tikhomirov@186: * but WITHOUT ANY WARRANTY; without even the implied warranty of
tikhomirov@186: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
tikhomirov@186: * GNU General Public License for more details.
tikhomirov@186: *
tikhomirov@186: * For information on how to redistribute this software under
tikhomirov@186: * the terms of a license other than GNU General Public License
tikhomirov@186: * contact TMate Software at support@hg4j.com
tikhomirov@186: */
tikhomirov@186: package org.tmatesoft.hg.core;
tikhomirov@186:
tikhomirov@186: import static org.tmatesoft.hg.core.Nodeid.NULL;
tikhomirov@186: import static org.tmatesoft.hg.internal.RequiresFile.*;
tikhomirov@186:
tikhomirov@186: import java.io.ByteArrayOutputStream;
tikhomirov@186: import java.io.File;
tikhomirov@186: import java.io.FileOutputStream;
tikhomirov@186: import java.io.IOException;
tikhomirov@186: import java.nio.ByteBuffer;
tikhomirov@186: import java.util.ArrayList;
tikhomirov@186: import java.util.Collections;
tikhomirov@186: import java.util.LinkedList;
tikhomirov@186: import java.util.TreeMap;
tikhomirov@186: import java.util.zip.DeflaterOutputStream;
tikhomirov@186:
tikhomirov@186: import org.tmatesoft.hg.internal.ByteArrayDataAccess;
tikhomirov@186: import org.tmatesoft.hg.internal.DataAccess;
tikhomirov@186: import org.tmatesoft.hg.internal.DigestHelper;
tikhomirov@186: import org.tmatesoft.hg.internal.Internals;
tikhomirov@186: import org.tmatesoft.hg.repo.HgBundle;
tikhomirov@186: import org.tmatesoft.hg.repo.HgBundle.GroupElement;
tikhomirov@186: import org.tmatesoft.hg.repo.HgLookup;
tikhomirov@186: import org.tmatesoft.hg.repo.HgRemoteRepository;
tikhomirov@186: import org.tmatesoft.hg.repo.HgRepository;
tikhomirov@186: import org.tmatesoft.hg.util.CancelledException;
tikhomirov@186: import org.tmatesoft.hg.util.PathRewrite;
tikhomirov@186:
tikhomirov@186: /**
tikhomirov@186: * WORK IN PROGRESS, DO NOT USE
tikhomirov@186: *
tikhomirov@186: * @author Artem Tikhomirov
tikhomirov@186: * @author TMate Software Ltd.
tikhomirov@186: */
tikhomirov@186: public class HgCloneCommand {
tikhomirov@186:
tikhomirov@186: private File destination;
tikhomirov@186: private HgRemoteRepository srcRepo;
tikhomirov@186:
tikhomirov@186: public HgCloneCommand() {
tikhomirov@186: }
tikhomirov@186:
tikhomirov@204: /**
tikhomirov@204: * @param folder location to become root of the repository (i.e. where .hg folder would reside). Either
tikhomirov@204: * shall not exist or be empty otherwise.
tikhomirov@204: * @return this
for convenience
tikhomirov@204: */
tikhomirov@186: public HgCloneCommand destination(File folder) {
tikhomirov@186: destination = folder;
tikhomirov@186: return this;
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public HgCloneCommand source(HgRemoteRepository hgRemote) {
tikhomirov@186: srcRepo = hgRemote;
tikhomirov@186: return this;
tikhomirov@186: }
tikhomirov@186:
tikhomirov@295: public HgRepository execute() throws HgBadArgumentException, HgRemoteConnectionException, HgInvalidFileException, CancelledException {
tikhomirov@186: if (destination == null) {
tikhomirov@295: throw new IllegalArgumentException("Destination not set", null);
tikhomirov@186: }
tikhomirov@186: if (srcRepo == null || srcRepo.isInvalid()) {
tikhomirov@186: throw new HgBadArgumentException("Bad source repository", null);
tikhomirov@186: }
tikhomirov@186: if (destination.exists()) {
tikhomirov@186: if (!destination.isDirectory()) {
tikhomirov@186: throw new HgBadArgumentException(String.format("%s is not a directory", destination), null);
tikhomirov@186: } else if (destination.list().length > 0) {
tikhomirov@186: throw new HgBadArgumentException(String.format("% shall be empty", destination), null);
tikhomirov@186: }
tikhomirov@186: } else {
tikhomirov@186: destination.mkdirs();
tikhomirov@186: }
tikhomirov@186: // if cloning remote repo, which can stream and no revision is specified -
tikhomirov@186: // can use 'stream_out' wireproto
tikhomirov@186: //
tikhomirov@186: // pull all changes from the very beginning
tikhomirov@186: // XXX consult getContext() if by any chance has a bundle ready, if not, then read and register
tikhomirov@186: HgBundle completeChanges = srcRepo.getChanges(Collections.singletonList(NULL));
tikhomirov@186: WriteDownMate mate = new WriteDownMate(destination);
tikhomirov@186: try {
tikhomirov@186: // instantiate new repo in the destdir
tikhomirov@186: mate.initEmptyRepository();
tikhomirov@186: // pull changes
tikhomirov@186: completeChanges.inspectAll(mate);
tikhomirov@186: mate.complete();
tikhomirov@186: } catch (IOException ex) {
tikhomirov@295: throw new HgInvalidFileException(getClass().getName(), ex);
tikhomirov@186: } finally {
tikhomirov@186: completeChanges.unlink();
tikhomirov@186: }
tikhomirov@186: return new HgLookup().detect(destination);
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186:
tikhomirov@186: // 1. process changelog, memorize nodeids to index
tikhomirov@186: // 2. process manifest, using map from step 3, collect manifest nodeids
tikhomirov@186: // 3. process every file, using map from 3, and consult set from step 4 to ensure repo is correct
tikhomirov@186: private static class WriteDownMate implements HgBundle.Inspector {
tikhomirov@186: private final File hgDir;
tikhomirov@190: private final PathRewrite storagePathHelper;
tikhomirov@186: private FileOutputStream indexFile;
tikhomirov@190: private String filename; // human-readable name of the file being written, for log/exception purposes
tikhomirov@186:
tikhomirov@186: private final TreeMap changelogIndexes = new TreeMap();
tikhomirov@186: private boolean collectChangelogIndexes = false;
tikhomirov@186:
tikhomirov@186: private int base = -1;
tikhomirov@186: private long offset = 0;
tikhomirov@186: private DataAccess prevRevContent;
tikhomirov@186: private final DigestHelper dh = new DigestHelper();
tikhomirov@186: private final ArrayList revisionSequence = new ArrayList(); // last visited nodes first
tikhomirov@186:
tikhomirov@186: private final LinkedList fncacheFiles = new LinkedList();
tikhomirov@202: private Internals implHelper;
tikhomirov@186:
tikhomirov@186: public WriteDownMate(File destDir) {
tikhomirov@186: hgDir = new File(destDir, ".hg");
tikhomirov@202: implHelper = new Internals();
tikhomirov@202: implHelper.setStorageConfig(1, STORE | FNCACHE | DOTENCODE);
tikhomirov@202: storagePathHelper = implHelper.buildDataFilesHelper();
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public void initEmptyRepository() throws IOException {
tikhomirov@202: implHelper.initEmptyRepository(hgDir);
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public void complete() throws IOException {
tikhomirov@186: FileOutputStream fncacheFile = new FileOutputStream(new File(hgDir, "store/fncache"));
tikhomirov@186: for (String s : fncacheFiles) {
tikhomirov@186: fncacheFile.write(s.getBytes());
tikhomirov@186: fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat
tikhomirov@186: }
tikhomirov@186: fncacheFile.close();
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public void changelogStart() {
tikhomirov@186: try {
tikhomirov@186: base = -1;
tikhomirov@186: offset = 0;
tikhomirov@186: revisionSequence.clear();
tikhomirov@190: indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i"));
tikhomirov@186: collectChangelogIndexes = true;
tikhomirov@186: } catch (IOException ex) {
tikhomirov@186: throw new HgBadStateException(ex);
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public void changelogEnd() {
tikhomirov@186: try {
tikhomirov@186: if (prevRevContent != null) {
tikhomirov@186: prevRevContent.done();
tikhomirov@186: prevRevContent = null;
tikhomirov@186: }
tikhomirov@186: collectChangelogIndexes = false;
tikhomirov@186: indexFile.close();
tikhomirov@186: indexFile = null;
tikhomirov@190: filename = null;
tikhomirov@186: } catch (IOException ex) {
tikhomirov@186: throw new HgBadStateException(ex);
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public void manifestStart() {
tikhomirov@186: try {
tikhomirov@186: base = -1;
tikhomirov@186: offset = 0;
tikhomirov@186: revisionSequence.clear();
tikhomirov@190: indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i"));
tikhomirov@186: } catch (IOException ex) {
tikhomirov@186: throw new HgBadStateException(ex);
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public void manifestEnd() {
tikhomirov@186: try {
tikhomirov@186: if (prevRevContent != null) {
tikhomirov@186: prevRevContent.done();
tikhomirov@186: prevRevContent = null;
tikhomirov@186: }
tikhomirov@186: indexFile.close();
tikhomirov@186: indexFile = null;
tikhomirov@190: filename = null;
tikhomirov@186: } catch (IOException ex) {
tikhomirov@186: throw new HgBadStateException(ex);
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public void fileStart(String name) {
tikhomirov@186: try {
tikhomirov@186: base = -1;
tikhomirov@186: offset = 0;
tikhomirov@186: revisionSequence.clear();
tikhomirov@186: fncacheFiles.add("data/" + name + ".i"); // FIXME this is pure guess,
tikhomirov@186: // need to investigate more how filenames are kept in fncache
tikhomirov@292: File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString());
tikhomirov@186: file.getParentFile().mkdirs();
tikhomirov@186: indexFile = new FileOutputStream(file);
tikhomirov@186: } catch (IOException ex) {
tikhomirov@186: throw new HgBadStateException(ex);
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public void fileEnd(String name) {
tikhomirov@186: try {
tikhomirov@186: if (prevRevContent != null) {
tikhomirov@186: prevRevContent.done();
tikhomirov@186: prevRevContent = null;
tikhomirov@186: }
tikhomirov@186: indexFile.close();
tikhomirov@186: indexFile = null;
tikhomirov@190: filename = null;
tikhomirov@186: } catch (IOException ex) {
tikhomirov@186: throw new HgBadStateException(ex);
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: private int knownRevision(Nodeid p) {
tikhomirov@274: if (p.isNull()) {
tikhomirov@186: return -1;
tikhomirov@186: } else {
tikhomirov@186: for (int i = revisionSequence.size() - 1; i >= 0; i--) {
tikhomirov@186: if (revisionSequence.get(i).equals(p)) {
tikhomirov@186: return i;
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@190: throw new HgBadStateException(String.format("Can't find index of %s for file %s", p.shortNotation(), filename));
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: public boolean element(GroupElement ge) {
tikhomirov@186: try {
tikhomirov@186: assert indexFile != null;
tikhomirov@186: boolean writeComplete = false;
tikhomirov@186: Nodeid p1 = ge.firstParent();
tikhomirov@186: Nodeid p2 = ge.secondParent();
tikhomirov@274: if (p1.isNull() && p2.isNull() /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) {
tikhomirov@186: prevRevContent = new ByteArrayDataAccess(new byte[0]);
tikhomirov@186: writeComplete = true;
tikhomirov@186: }
tikhomirov@358: byte[] content = ge.apply(prevRevContent.byteArray());
tikhomirov@186: byte[] calculated = dh.sha1(p1, p2, content).asBinary();
tikhomirov@186: final Nodeid node = ge.node();
tikhomirov@186: if (!node.equalsTo(calculated)) {
tikhomirov@190: throw new HgBadStateException(String.format("Checksum failed: expected %s, calculated %s. File %s", node, calculated, filename));
tikhomirov@186: }
tikhomirov@186: final int link;
tikhomirov@186: if (collectChangelogIndexes) {
tikhomirov@186: changelogIndexes.put(node, revisionSequence.size());
tikhomirov@186: link = revisionSequence.size();
tikhomirov@186: } else {
tikhomirov@186: Integer csRev = changelogIndexes.get(ge.cset());
tikhomirov@186: if (csRev == null) {
tikhomirov@190: throw new HgBadStateException(String.format("Changelog doesn't contain revision %s of %s", ge.cset().shortNotation(), filename));
tikhomirov@186: }
tikhomirov@186: link = csRev.intValue();
tikhomirov@186: }
tikhomirov@186: final int p1Rev = knownRevision(p1), p2Rev = knownRevision(p2);
tikhomirov@358: byte[] patchContent = ge.rawDataByteArray();
tikhomirov@358: writeComplete = writeComplete || patchContent.length >= (/* 3/4 of actual */content.length - (content.length >>> 2));
tikhomirov@186: if (writeComplete) {
tikhomirov@186: base = revisionSequence.size();
tikhomirov@186: }
tikhomirov@358: final byte[] sourceData = writeComplete ? content : patchContent;
tikhomirov@186: final byte[] data;
tikhomirov@186: ByteArrayOutputStream bos = new ByteArrayOutputStream(content.length);
tikhomirov@186: DeflaterOutputStream dos = new DeflaterOutputStream(bos);
tikhomirov@186: dos.write(sourceData);
tikhomirov@186: dos.close();
tikhomirov@186: final byte[] compressedData = bos.toByteArray();
tikhomirov@186: dos = null;
tikhomirov@186: bos = null;
tikhomirov@186: final Byte dataPrefix;
tikhomirov@186: if (compressedData.length >= (sourceData.length - (sourceData.length >>> 2))) {
tikhomirov@186: // compression wasn't too effective,
tikhomirov@186: data = sourceData;
tikhomirov@186: dataPrefix = 'u';
tikhomirov@186: } else {
tikhomirov@186: data = compressedData;
tikhomirov@186: dataPrefix = null;
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: ByteBuffer header = ByteBuffer.allocate(64 /* REVLOGV1_RECORD_SIZE */);
tikhomirov@186: if (offset == 0) {
tikhomirov@186: final int INLINEDATA = 1 << 16;
tikhomirov@186: header.putInt(1 /* RevlogNG */ | INLINEDATA);
tikhomirov@186: header.putInt(0);
tikhomirov@186: } else {
tikhomirov@186: header.putLong(offset << 16);
tikhomirov@186: }
tikhomirov@186: final int compressedLen = data.length + (dataPrefix == null ? 0 : 1);
tikhomirov@186: header.putInt(compressedLen);
tikhomirov@186: header.putInt(content.length);
tikhomirov@186: header.putInt(base);
tikhomirov@186: header.putInt(link);
tikhomirov@186: header.putInt(p1Rev);
tikhomirov@186: header.putInt(p2Rev);
tikhomirov@186: header.put(node.toByteArray());
tikhomirov@186: // assume 12 bytes left are zeros
tikhomirov@186: indexFile.write(header.array());
tikhomirov@186: if (dataPrefix != null) {
tikhomirov@186: indexFile.write(dataPrefix.byteValue());
tikhomirov@186: }
tikhomirov@186: indexFile.write(data);
tikhomirov@186: //
tikhomirov@186: offset += compressedLen;
tikhomirov@186: revisionSequence.add(node);
tikhomirov@186: prevRevContent.done();
tikhomirov@186: prevRevContent = new ByteArrayDataAccess(content);
tikhomirov@186: } catch (IOException ex) {
tikhomirov@186: throw new HgBadStateException(ex);
tikhomirov@186: }
tikhomirov@186: return true;
tikhomirov@186: }
tikhomirov@186: }
tikhomirov@186:
tikhomirov@186: }