Mercurial > jhg
comparison src/org/tmatesoft/hg/core/HgCloneCommand.java @ 186:44a34baabea0
Clone refactored into a command. HgBundle needs means to control its lifecycle, to be deleted when no longer needed
| author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
|---|---|
| date | Thu, 14 Apr 2011 00:47:04 +0200 |
| parents | |
| children | 9b99d27aeddc |
comparison
equal
deleted
inserted
replaced
| 185:c6fa4dbfc458 | 186:44a34baabea0 |
|---|---|
| 1 /* | |
| 2 * Copyright (c) 2011 TMate Software Ltd | |
| 3 * | |
| 4 * This program is free software; you can redistribute it and/or modify | |
| 5 * it under the terms of the GNU General Public License as published by | |
| 6 * the Free Software Foundation; version 2 of the License. | |
| 7 * | |
| 8 * This program is distributed in the hope that it will be useful, | |
| 9 * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
| 10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
| 11 * GNU General Public License for more details. | |
| 12 * | |
| 13 * For information on how to redistribute this software under | |
| 14 * the terms of a license other than GNU General Public License | |
| 15 * contact TMate Software at support@hg4j.com | |
| 16 */ | |
| 17 package org.tmatesoft.hg.core; | |
| 18 | |
| 19 import static org.tmatesoft.hg.core.Nodeid.NULL; | |
| 20 import static org.tmatesoft.hg.internal.RequiresFile.*; | |
| 21 | |
| 22 import java.io.ByteArrayOutputStream; | |
| 23 import java.io.File; | |
| 24 import java.io.FileOutputStream; | |
| 25 import java.io.IOException; | |
| 26 import java.nio.ByteBuffer; | |
| 27 import java.util.ArrayList; | |
| 28 import java.util.Collections; | |
| 29 import java.util.LinkedList; | |
| 30 import java.util.TreeMap; | |
| 31 import java.util.zip.DeflaterOutputStream; | |
| 32 | |
| 33 import org.tmatesoft.hg.internal.ByteArrayDataAccess; | |
| 34 import org.tmatesoft.hg.internal.DataAccess; | |
| 35 import org.tmatesoft.hg.internal.DigestHelper; | |
| 36 import org.tmatesoft.hg.internal.Internals; | |
| 37 import org.tmatesoft.hg.repo.HgBundle; | |
| 38 import org.tmatesoft.hg.repo.HgBundle.GroupElement; | |
| 39 import org.tmatesoft.hg.repo.HgLookup; | |
| 40 import org.tmatesoft.hg.repo.HgRemoteRepository; | |
| 41 import org.tmatesoft.hg.repo.HgRepository; | |
| 42 import org.tmatesoft.hg.util.CancelledException; | |
| 43 import org.tmatesoft.hg.util.PathRewrite; | |
| 44 | |
| 45 /** | |
| 46 * WORK IN PROGRESS, DO NOT USE | |
| 47 * | |
| 48 * @author Artem Tikhomirov | |
| 49 * @author TMate Software Ltd. | |
| 50 */ | |
| 51 public class HgCloneCommand { | |
| 52 | |
| 53 private File destination; | |
| 54 private HgRemoteRepository srcRepo; | |
| 55 | |
| 56 public HgCloneCommand() { | |
| 57 } | |
| 58 | |
| 59 public HgCloneCommand destination(File folder) { | |
| 60 destination = folder; | |
| 61 return this; | |
| 62 } | |
| 63 | |
| 64 public HgCloneCommand source(HgRemoteRepository hgRemote) { | |
| 65 srcRepo = hgRemote; | |
| 66 return this; | |
| 67 } | |
| 68 | |
| 69 public HgRepository execute() throws HgException, CancelledException { | |
| 70 if (destination == null) { | |
| 71 throw new HgBadArgumentException("Destination not set", null); | |
| 72 } | |
| 73 if (srcRepo == null || srcRepo.isInvalid()) { | |
| 74 throw new HgBadArgumentException("Bad source repository", null); | |
| 75 } | |
| 76 if (destination.exists()) { | |
| 77 if (!destination.isDirectory()) { | |
| 78 throw new HgBadArgumentException(String.format("%s is not a directory", destination), null); | |
| 79 } else if (destination.list().length > 0) { | |
| 80 throw new HgBadArgumentException(String.format("% shall be empty", destination), null); | |
| 81 } | |
| 82 } else { | |
| 83 destination.mkdirs(); | |
| 84 } | |
| 85 // if cloning remote repo, which can stream and no revision is specified - | |
| 86 // can use 'stream_out' wireproto | |
| 87 // | |
| 88 // pull all changes from the very beginning | |
| 89 // XXX consult getContext() if by any chance has a bundle ready, if not, then read and register | |
| 90 HgBundle completeChanges = srcRepo.getChanges(Collections.singletonList(NULL)); | |
| 91 WriteDownMate mate = new WriteDownMate(destination); | |
| 92 try { | |
| 93 // instantiate new repo in the destdir | |
| 94 mate.initEmptyRepository(); | |
| 95 // pull changes | |
| 96 completeChanges.inspectAll(mate); | |
| 97 mate.complete(); | |
| 98 } catch (IOException ex) { | |
| 99 throw new HgException(ex); | |
| 100 } finally { | |
| 101 completeChanges.unlink(); | |
| 102 } | |
| 103 return new HgLookup().detect(destination); | |
| 104 } | |
| 105 | |
| 106 | |
| 107 // 1. process changelog, memorize nodeids to index | |
| 108 // 2. process manifest, using map from step 3, collect manifest nodeids | |
| 109 // 3. process every file, using map from 3, and consult set from step 4 to ensure repo is correct | |
| 110 private static class WriteDownMate implements HgBundle.Inspector { | |
| 111 private final File hgDir; | |
| 112 private FileOutputStream indexFile; | |
| 113 private final PathRewrite storagePathHelper; | |
| 114 | |
| 115 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>(); | |
| 116 private boolean collectChangelogIndexes = false; | |
| 117 | |
| 118 private int base = -1; | |
| 119 private long offset = 0; | |
| 120 private DataAccess prevRevContent; | |
| 121 private final DigestHelper dh = new DigestHelper(); | |
| 122 private final ArrayList<Nodeid> revisionSequence = new ArrayList<Nodeid>(); // last visited nodes first | |
| 123 | |
| 124 private final LinkedList<String> fncacheFiles = new LinkedList<String>(); | |
| 125 | |
| 126 public WriteDownMate(File destDir) { | |
| 127 hgDir = new File(destDir, ".hg"); | |
| 128 Internals i = new Internals(); | |
| 129 i.setStorageConfig(1, STORE | FNCACHE | DOTENCODE); | |
| 130 storagePathHelper = i.buildDataFilesHelper(); | |
| 131 } | |
| 132 | |
| 133 public void initEmptyRepository() throws IOException { | |
| 134 hgDir.mkdir(); | |
| 135 FileOutputStream requiresFile = new FileOutputStream(new File(hgDir, "requires")); | |
| 136 requiresFile.write("revlogv1\nstore\nfncache\ndotencode\n".getBytes()); | |
| 137 requiresFile.close(); | |
| 138 new File(hgDir, "store").mkdir(); // with that, hg verify says ok. | |
| 139 } | |
| 140 | |
| 141 public void complete() throws IOException { | |
| 142 FileOutputStream fncacheFile = new FileOutputStream(new File(hgDir, "store/fncache")); | |
| 143 for (String s : fncacheFiles) { | |
| 144 fncacheFile.write(s.getBytes()); | |
| 145 fncacheFile.write(0x0A); // http://mercurial.selenic.com/wiki/fncacheRepoFormat | |
| 146 } | |
| 147 fncacheFile.close(); | |
| 148 } | |
| 149 | |
| 150 public void changelogStart() { | |
| 151 try { | |
| 152 base = -1; | |
| 153 offset = 0; | |
| 154 revisionSequence.clear(); | |
| 155 indexFile = new FileOutputStream(new File(hgDir, "store/00changelog.i")); | |
| 156 collectChangelogIndexes = true; | |
| 157 } catch (IOException ex) { | |
| 158 throw new HgBadStateException(ex); | |
| 159 } | |
| 160 } | |
| 161 | |
| 162 public void changelogEnd() { | |
| 163 try { | |
| 164 if (prevRevContent != null) { | |
| 165 prevRevContent.done(); | |
| 166 prevRevContent = null; | |
| 167 } | |
| 168 collectChangelogIndexes = false; | |
| 169 indexFile.close(); | |
| 170 indexFile = null; | |
| 171 } catch (IOException ex) { | |
| 172 throw new HgBadStateException(ex); | |
| 173 } | |
| 174 } | |
| 175 | |
| 176 public void manifestStart() { | |
| 177 try { | |
| 178 base = -1; | |
| 179 offset = 0; | |
| 180 revisionSequence.clear(); | |
| 181 indexFile = new FileOutputStream(new File(hgDir, "store/00manifest.i")); | |
| 182 } catch (IOException ex) { | |
| 183 throw new HgBadStateException(ex); | |
| 184 } | |
| 185 } | |
| 186 | |
| 187 public void manifestEnd() { | |
| 188 try { | |
| 189 if (prevRevContent != null) { | |
| 190 prevRevContent.done(); | |
| 191 prevRevContent = null; | |
| 192 } | |
| 193 indexFile.close(); | |
| 194 indexFile = null; | |
| 195 } catch (IOException ex) { | |
| 196 throw new HgBadStateException(ex); | |
| 197 } | |
| 198 } | |
| 199 | |
| 200 public void fileStart(String name) { | |
| 201 try { | |
| 202 base = -1; | |
| 203 offset = 0; | |
| 204 revisionSequence.clear(); | |
| 205 fncacheFiles.add("data/" + name + ".i"); // FIXME this is pure guess, | |
| 206 // need to investigate more how filenames are kept in fncache | |
| 207 File file = new File(hgDir, storagePathHelper.rewrite(name)); | |
| 208 file.getParentFile().mkdirs(); | |
| 209 indexFile = new FileOutputStream(file); | |
| 210 } catch (IOException ex) { | |
| 211 throw new HgBadStateException(ex); | |
| 212 } | |
| 213 } | |
| 214 | |
| 215 public void fileEnd(String name) { | |
| 216 try { | |
| 217 if (prevRevContent != null) { | |
| 218 prevRevContent.done(); | |
| 219 prevRevContent = null; | |
| 220 } | |
| 221 indexFile.close(); | |
| 222 indexFile = null; | |
| 223 } catch (IOException ex) { | |
| 224 throw new HgBadStateException(ex); | |
| 225 } | |
| 226 } | |
| 227 | |
| 228 private int knownRevision(Nodeid p) { | |
| 229 if (NULL.equals(p)) { | |
| 230 return -1; | |
| 231 } else { | |
| 232 for (int i = revisionSequence.size() - 1; i >= 0; i--) { | |
| 233 if (revisionSequence.get(i).equals(p)) { | |
| 234 return i; | |
| 235 } | |
| 236 } | |
| 237 } | |
| 238 throw new HgBadStateException(String.format("Can't find index of %s", p.shortNotation())); | |
| 239 } | |
| 240 | |
| 241 public boolean element(GroupElement ge) { | |
| 242 try { | |
| 243 assert indexFile != null; | |
| 244 boolean writeComplete = false; | |
| 245 Nodeid p1 = ge.firstParent(); | |
| 246 Nodeid p2 = ge.secondParent(); | |
| 247 if (NULL.equals(p1) && NULL.equals(p2) /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) { | |
| 248 prevRevContent = new ByteArrayDataAccess(new byte[0]); | |
| 249 writeComplete = true; | |
| 250 } | |
| 251 byte[] content = ge.apply(prevRevContent); | |
| 252 byte[] calculated = dh.sha1(p1, p2, content).asBinary(); | |
| 253 final Nodeid node = ge.node(); | |
| 254 if (!node.equalsTo(calculated)) { | |
| 255 throw new HgBadStateException("Checksum failed"); | |
| 256 } | |
| 257 final int link; | |
| 258 if (collectChangelogIndexes) { | |
| 259 changelogIndexes.put(node, revisionSequence.size()); | |
| 260 link = revisionSequence.size(); | |
| 261 } else { | |
| 262 Integer csRev = changelogIndexes.get(ge.cset()); | |
| 263 if (csRev == null) { | |
| 264 throw new HgBadStateException(String.format("Changelog doesn't contain revision %s", ge.cset().shortNotation())); | |
| 265 } | |
| 266 link = csRev.intValue(); | |
| 267 } | |
| 268 final int p1Rev = knownRevision(p1), p2Rev = knownRevision(p2); | |
| 269 DataAccess patchContent = ge.rawData(); | |
| 270 writeComplete = writeComplete || patchContent.length() >= (/* 3/4 of actual */content.length - (content.length >>> 2)); | |
| 271 if (writeComplete) { | |
| 272 base = revisionSequence.size(); | |
| 273 } | |
| 274 final byte[] sourceData = writeComplete ? content : patchContent.byteArray(); | |
| 275 final byte[] data; | |
| 276 ByteArrayOutputStream bos = new ByteArrayOutputStream(content.length); | |
| 277 DeflaterOutputStream dos = new DeflaterOutputStream(bos); | |
| 278 dos.write(sourceData); | |
| 279 dos.close(); | |
| 280 final byte[] compressedData = bos.toByteArray(); | |
| 281 dos = null; | |
| 282 bos = null; | |
| 283 final Byte dataPrefix; | |
| 284 if (compressedData.length >= (sourceData.length - (sourceData.length >>> 2))) { | |
| 285 // compression wasn't too effective, | |
| 286 data = sourceData; | |
| 287 dataPrefix = 'u'; | |
| 288 } else { | |
| 289 data = compressedData; | |
| 290 dataPrefix = null; | |
| 291 } | |
| 292 | |
| 293 ByteBuffer header = ByteBuffer.allocate(64 /* REVLOGV1_RECORD_SIZE */); | |
| 294 if (offset == 0) { | |
| 295 final int INLINEDATA = 1 << 16; | |
| 296 header.putInt(1 /* RevlogNG */ | INLINEDATA); | |
| 297 header.putInt(0); | |
| 298 } else { | |
| 299 header.putLong(offset << 16); | |
| 300 } | |
| 301 final int compressedLen = data.length + (dataPrefix == null ? 0 : 1); | |
| 302 header.putInt(compressedLen); | |
| 303 header.putInt(content.length); | |
| 304 header.putInt(base); | |
| 305 header.putInt(link); | |
| 306 header.putInt(p1Rev); | |
| 307 header.putInt(p2Rev); | |
| 308 header.put(node.toByteArray()); | |
| 309 // assume 12 bytes left are zeros | |
| 310 indexFile.write(header.array()); | |
| 311 if (dataPrefix != null) { | |
| 312 indexFile.write(dataPrefix.byteValue()); | |
| 313 } | |
| 314 indexFile.write(data); | |
| 315 // | |
| 316 offset += compressedLen; | |
| 317 revisionSequence.add(node); | |
| 318 prevRevContent.done(); | |
| 319 prevRevContent = new ByteArrayDataAccess(content); | |
| 320 } catch (IOException ex) { | |
| 321 throw new HgBadStateException(ex); | |
| 322 } | |
| 323 return true; | |
| 324 } | |
| 325 } | |
| 326 | |
| 327 } |
