Mercurial > jhg
comparison src/org/tmatesoft/hg/core/HgCloneCommand.java @ 530:0f6fa88e2162
Towards commit command: refactor clone, extract pieces to reuse. Describe a defect discovered when bundle has few patches with 0,0 parents
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Wed, 23 Jan 2013 17:46:12 +0100 |
parents | a41d955dc360 |
children | 95c2f43008bd |
comparison
equal
deleted
inserted
replaced
529:95bdcf75e71e | 530:0f6fa88e2162 |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2011-2012 TMate Software Ltd | 2 * Copyright (c) 2011-2013 TMate Software Ltd |
3 * | 3 * |
4 * This program is free software; you can redistribute it and/or modify | 4 * This program is free software; you can redistribute it and/or modify |
5 * it under the terms of the GNU General Public License as published by | 5 * it under the terms of the GNU General Public License as published by |
6 * the Free Software Foundation; version 2 of the License. | 6 * the Free Software Foundation; version 2 of the License. |
7 * | 7 * |
17 package org.tmatesoft.hg.core; | 17 package org.tmatesoft.hg.core; |
18 | 18 |
19 import static org.tmatesoft.hg.core.Nodeid.NULL; | 19 import static org.tmatesoft.hg.core.Nodeid.NULL; |
20 import static org.tmatesoft.hg.internal.RequiresFile.*; | 20 import static org.tmatesoft.hg.internal.RequiresFile.*; |
21 | 21 |
22 import java.io.ByteArrayOutputStream; | |
23 import java.io.File; | 22 import java.io.File; |
24 import java.io.FileOutputStream; | 23 import java.io.FileOutputStream; |
25 import java.io.IOException; | 24 import java.io.IOException; |
26 import java.nio.ByteBuffer; | |
27 import java.util.ArrayList; | 25 import java.util.ArrayList; |
28 import java.util.Collections; | 26 import java.util.Collections; |
29 import java.util.LinkedList; | 27 import java.util.LinkedList; |
30 import java.util.TreeMap; | 28 import java.util.TreeMap; |
31 import java.util.zip.DeflaterOutputStream; | |
32 | 29 |
33 import org.tmatesoft.hg.internal.ByteArrayDataAccess; | 30 import org.tmatesoft.hg.internal.ByteArrayDataAccess; |
34 import org.tmatesoft.hg.internal.DataAccess; | 31 import org.tmatesoft.hg.internal.DataAccess; |
35 import org.tmatesoft.hg.internal.DigestHelper; | 32 import org.tmatesoft.hg.internal.DigestHelper; |
36 import org.tmatesoft.hg.internal.Lifecycle; | 33 import org.tmatesoft.hg.internal.Lifecycle; |
37 import org.tmatesoft.hg.internal.RepoInitializer; | 34 import org.tmatesoft.hg.internal.RepoInitializer; |
35 import org.tmatesoft.hg.internal.RevlogCompressor; | |
36 import org.tmatesoft.hg.internal.RevlogStreamWriter; | |
38 import org.tmatesoft.hg.repo.HgBundle; | 37 import org.tmatesoft.hg.repo.HgBundle; |
39 import org.tmatesoft.hg.repo.HgBundle.GroupElement; | 38 import org.tmatesoft.hg.repo.HgBundle.GroupElement; |
40 import org.tmatesoft.hg.repo.HgInvalidControlFileException; | 39 import org.tmatesoft.hg.repo.HgInvalidControlFileException; |
41 import org.tmatesoft.hg.repo.HgInvalidFileException; | 40 import org.tmatesoft.hg.repo.HgInvalidFileException; |
42 import org.tmatesoft.hg.repo.HgInvalidStateException; | 41 import org.tmatesoft.hg.repo.HgInvalidStateException; |
97 } | 96 } |
98 if (destination.exists()) { | 97 if (destination.exists()) { |
99 if (!destination.isDirectory()) { | 98 if (!destination.isDirectory()) { |
100 throw new HgBadArgumentException(String.format("%s is not a directory", destination), null); | 99 throw new HgBadArgumentException(String.format("%s is not a directory", destination), null); |
101 } else if (destination.list().length > 0) { | 100 } else if (destination.list().length > 0) { |
102 throw new HgBadArgumentException(String.format("% shall be empty", destination), null); | 101 throw new HgBadArgumentException(String.format("%s shall be empty", destination), null); |
103 } | 102 } |
104 } else { | 103 } else { |
105 destination.mkdirs(); | 104 destination.mkdirs(); |
106 } | 105 } |
107 ProgressSupport progress = getProgressSupport(null); | 106 ProgressSupport progress = getProgressSupport(null); |
144 private String filename; // human-readable name of the file being written, for log/exception purposes | 143 private String filename; // human-readable name of the file being written, for log/exception purposes |
145 | 144 |
146 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>(); | 145 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>(); |
147 private boolean collectChangelogIndexes = false; | 146 private boolean collectChangelogIndexes = false; |
148 | 147 |
149 private int base = -1; | |
150 private long offset = 0; | |
151 private DataAccess prevRevContent; | 148 private DataAccess prevRevContent; |
152 private final DigestHelper dh = new DigestHelper(); | 149 private final DigestHelper dh = new DigestHelper(); |
153 private final ArrayList<Nodeid> revisionSequence = new ArrayList<Nodeid>(); // last visited nodes first | 150 private final ArrayList<Nodeid> revisionSequence = new ArrayList<Nodeid>(); // last visited nodes first |
154 | 151 |
155 private final LinkedList<String> fncacheFiles = new LinkedList<String>(); | 152 private final LinkedList<String> fncacheFiles = new LinkedList<String>(); |
179 fncacheFile.close(); | 176 fncacheFile.close(); |
180 } | 177 } |
181 | 178 |
182 public void changelogStart() { | 179 public void changelogStart() { |
183 try { | 180 try { |
184 base = -1; | 181 revlogHeader.offset(0).baseRevision(-1); |
185 offset = 0; | |
186 revisionSequence.clear(); | 182 revisionSequence.clear(); |
187 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i")); | 183 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i")); |
188 collectChangelogIndexes = true; | 184 collectChangelogIndexes = true; |
189 } catch (IOException ex) { | 185 } catch (IOException ex) { |
190 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(filename)); | 186 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(filename)); |
209 stopIfCancelled(); | 205 stopIfCancelled(); |
210 } | 206 } |
211 | 207 |
212 public void manifestStart() { | 208 public void manifestStart() { |
213 try { | 209 try { |
214 base = -1; | 210 revlogHeader.offset(0).baseRevision(-1); |
215 offset = 0; | |
216 revisionSequence.clear(); | 211 revisionSequence.clear(); |
217 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i")); | 212 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i")); |
218 } catch (IOException ex) { | 213 } catch (IOException ex) { |
219 throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(filename)); | 214 throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(filename)); |
220 } | 215 } |
237 stopIfCancelled(); | 232 stopIfCancelled(); |
238 } | 233 } |
239 | 234 |
240 public void fileStart(String name) { | 235 public void fileStart(String name) { |
241 try { | 236 try { |
242 base = -1; | 237 revlogHeader.offset(0).baseRevision(-1); |
243 offset = 0; | |
244 revisionSequence.clear(); | 238 revisionSequence.clear(); |
245 fncacheFiles.add("data/" + name + ".i"); // TODO post-1.0 this is pure guess, | 239 fncacheFiles.add("data/" + name + ".i"); // TODO post-1.0 this is pure guess, |
246 // need to investigate more how filenames are kept in fncache | 240 // need to investigate more how filenames are kept in fncache |
247 File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString()); | 241 File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString()); |
248 file.getParentFile().mkdirs(); | 242 file.getParentFile().mkdirs(); |
282 } | 276 } |
283 } | 277 } |
284 String m = String.format("Can't find index of %s for file %s", p.shortNotation(), filename); | 278 String m = String.format("Can't find index of %s for file %s", p.shortNotation(), filename); |
285 throw new HgInvalidControlFileException(m, null, null).setRevision(p); | 279 throw new HgInvalidControlFileException(m, null, null).setRevision(p); |
286 } | 280 } |
281 | |
282 private RevlogStreamWriter.HeaderWriter revlogHeader = new RevlogStreamWriter.HeaderWriter(true); | |
283 private RevlogCompressor revlogDataZip = new RevlogCompressor(); | |
287 | 284 |
288 public boolean element(GroupElement ge) { | 285 public boolean element(GroupElement ge) { |
289 try { | 286 try { |
290 assert indexFile != null; | 287 assert indexFile != null; |
291 boolean writeComplete = false; | 288 boolean writeComplete = false; |
292 Nodeid p1 = ge.firstParent(); | 289 Nodeid p1 = ge.firstParent(); |
293 Nodeid p2 = ge.secondParent(); | 290 Nodeid p2 = ge.secondParent(); |
294 if (p1.isNull() && p2.isNull() /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) { | 291 if (p1.isNull() && p2.isNull() /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) { |
292 // FIXME NOTE, both parents isNull == true doesn't necessarily mean | |
293 // empty prevContent, see build.gradle sample below | |
295 prevRevContent = new ByteArrayDataAccess(new byte[0]); | 294 prevRevContent = new ByteArrayDataAccess(new byte[0]); |
296 writeComplete = true; | 295 writeComplete = true; |
297 } | 296 } |
298 byte[] content = ge.apply(prevRevContent.byteArray()); | 297 byte[] content = ge.apply(prevRevContent.byteArray()); |
299 byte[] calculated = dh.sha1(p1, p2, content).asBinary(); | 298 byte[] calculated = dh.sha1(p1, p2, content).asBinary(); |
300 final Nodeid node = ge.node(); | 299 final Nodeid node = ge.node(); |
301 if (!node.equalsTo(calculated)) { | 300 if (!node.equalsTo(calculated)) { |
302 // TODO post-1.0 custom exception ChecksumCalculationFailed? | 301 // TODO post-1.0 custom exception ChecksumCalculationFailed? |
303 throw new HgInvalidStateException(String.format("Checksum failed: expected %s, calculated %s. File %s", node, calculated, filename)); | 302 throw new HgInvalidStateException(String.format("Checksum failed: expected %s, calculated %s. File %s", node, calculated, filename)); |
304 } | 303 } |
305 final int link; | 304 revlogHeader.nodeid(node); |
306 if (collectChangelogIndexes) { | 305 if (collectChangelogIndexes) { |
307 changelogIndexes.put(node, revisionSequence.size()); | 306 changelogIndexes.put(node, revisionSequence.size()); |
308 link = revisionSequence.size(); | 307 revlogHeader.linkRevision(revisionSequence.size()); |
309 } else { | 308 } else { |
310 Integer csRev = changelogIndexes.get(ge.cset()); | 309 Integer csRev = changelogIndexes.get(ge.cset()); |
311 if (csRev == null) { | 310 if (csRev == null) { |
312 throw new HgInvalidStateException(String.format("Changelog doesn't contain revision %s of %s", ge.cset().shortNotation(), filename)); | 311 throw new HgInvalidStateException(String.format("Changelog doesn't contain revision %s of %s", ge.cset().shortNotation(), filename)); |
313 } | 312 } |
314 link = csRev.intValue(); | 313 revlogHeader.linkRevision(csRev.intValue()); |
315 } | 314 } |
316 final int p1Rev = knownRevision(p1), p2Rev = knownRevision(p2); | 315 revlogHeader.parents(knownRevision(p1), knownRevision(p2)); |
317 byte[] patchContent = ge.rawDataByteArray(); | 316 byte[] patchContent = ge.rawDataByteArray(); |
318 writeComplete = writeComplete || patchContent.length >= (/* 3/4 of actual */content.length - (content.length >>> 2)); | 317 writeComplete = writeComplete || patchContent.length >= (/* 3/4 of actual */content.length - (content.length >>> 2)); |
319 if (writeComplete) { | 318 if (writeComplete) { |
320 base = revisionSequence.size(); | 319 revlogHeader.baseRevision(revisionSequence.size()); |
321 } | 320 } |
322 final byte[] sourceData = writeComplete ? content : patchContent; | 321 final byte[] sourceData = writeComplete ? content : patchContent; |
323 final byte[] data; | 322 revlogDataZip.reset(sourceData); |
324 ByteArrayOutputStream bos = new ByteArrayOutputStream(content.length); | 323 final int compressedLen; |
325 DeflaterOutputStream dos = new DeflaterOutputStream(bos); | 324 final boolean useUncompressedData = revlogDataZip.getCompressedLengthEstimate() >= (sourceData.length - (sourceData.length >>> 2)); |
326 dos.write(sourceData); | 325 if (useUncompressedData) { |
327 dos.close(); | |
328 final byte[] compressedData = bos.toByteArray(); | |
329 dos = null; | |
330 bos = null; | |
331 final Byte dataPrefix; | |
332 if (compressedData.length >= (sourceData.length - (sourceData.length >>> 2))) { | |
333 // compression wasn't too effective, | 326 // compression wasn't too effective, |
334 data = sourceData; | 327 compressedLen = sourceData.length + 1 /*1 byte for 'u' - uncompressed prefix byte*/; |
335 dataPrefix = 'u'; | |
336 } else { | 328 } else { |
337 data = compressedData; | 329 compressedLen= revlogDataZip.getCompressedLengthEstimate(); |
338 dataPrefix = null; | 330 } |
339 } | 331 |
340 | 332 revlogHeader.length(content.length, compressedLen); |
341 ByteBuffer header = ByteBuffer.allocate(64 /* REVLOGV1_RECORD_SIZE */); | 333 |
342 if (offset == 0) { | 334 revlogHeader.write(indexFile); |
343 final int INLINEDATA = 1 << 16; | 335 |
344 header.putInt(1 /* RevlogNG */ | INLINEDATA); | 336 if (useUncompressedData) { |
345 header.putInt(0); | 337 indexFile.write((byte) 'u'); |
338 indexFile.write(sourceData); | |
346 } else { | 339 } else { |
347 header.putLong(offset << 16); | 340 int actualCompressedLenWritten = revlogDataZip.writeCompressedData(indexFile); |
348 } | 341 if (actualCompressedLenWritten != compressedLen) { |
349 final int compressedLen = data.length + (dataPrefix == null ? 0 : 1); | 342 throw new HgInvalidStateException(String.format("Expected %d bytes of compressed data, but actually wrote %d in %s", compressedLen, actualCompressedLenWritten, filename)); |
350 header.putInt(compressedLen); | 343 } |
351 header.putInt(content.length); | 344 } |
352 header.putInt(base); | |
353 header.putInt(link); | |
354 header.putInt(p1Rev); | |
355 header.putInt(p2Rev); | |
356 header.put(node.toByteArray()); | |
357 // assume 12 bytes left are zeros | |
358 indexFile.write(header.array()); | |
359 if (dataPrefix != null) { | |
360 indexFile.write(dataPrefix.byteValue()); | |
361 } | |
362 indexFile.write(data); | |
363 // | 345 // |
364 offset += compressedLen; | |
365 revisionSequence.add(node); | 346 revisionSequence.add(node); |
366 prevRevContent.done(); | 347 prevRevContent.done(); |
367 prevRevContent = new ByteArrayDataAccess(content); | 348 prevRevContent = new ByteArrayDataAccess(content); |
368 } catch (IOException ex) { | 349 } catch (IOException ex) { |
369 String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename); | 350 String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename); |
370 throw new HgInvalidControlFileException(m, ex, new File(filename)); | 351 throw new HgInvalidControlFileException(m, ex, new File(filename)); |
371 } | 352 } |
372 return cancelException == null; | 353 return cancelException == null; |
373 } | 354 } |
355 /* | |
356 $ hg debugindex build.gradle | |
357 rev offset length base linkrev nodeid p1 p2 | |
358 0 0 857 0 454 b2a1b20d1933 000000000000 000000000000 | |
359 1 857 319 0 455 5324c8f2b550 b2a1b20d1933 000000000000 | |
360 2 1176 533 0 460 4011d52141cd 5324c8f2b550 000000000000 | |
361 3 1709 85 0 463 d0be58845306 4011d52141cd 000000000000 | |
362 4 1794 105 0 464 3ddd456244a0 d0be58845306 000000000000 | |
363 5 1899 160 0 466 a3f374fbf33a 3ddd456244a0 000000000000 | |
364 6 2059 133 0 468 0227d28e0db6 a3f374fbf33a 000000000000 | |
365 | |
366 once we get a bundle for this repository and look into it for the same file: | |
367 | |
368 $hg debugbundle -a /tmp/hg-bundle-4418325145435980614.tmp | |
369 format: id, p1, p2, cset, delta base, len(delta) | |
370 | |
371 build.gradle | |
372 62a101b7994c6c5b0423ba6c802f8c64d24ef784 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 6ec4af642ba8024edd636af15e672c97cc3294e4 0000000000000000000000000000000000000000 1368 | |
373 b2a1b20d1933d0605aab6780ee52fe5ab3073832 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 7dcc920e2d57d5850ee9f08ac863251460565bd3 62a101b7994c6c5b0423ba6c802f8c64d24ef784 2373 | |
374 5324c8f2b5503a4d1ead3ac40a9851c27572166b b2a1b20d1933d0605aab6780ee52fe5ab3073832 0000000000000000000000000000000000000000 7b883bf03b14ccea8ee74db0a34f9f66ca644a3c b2a1b20d1933d0605aab6780ee52fe5ab3073832 579 | |
375 4011d52141cd717c92cbf350a93522d2f3ee415e 5324c8f2b5503a4d1ead3ac40a9851c27572166b 0000000000000000000000000000000000000000 55e9588b84b83aa96fe76a06ee8bf067c5d3c90e 5324c8f2b5503a4d1ead3ac40a9851c27572166b 1147 | |
376 d0be588453068787dcb3ee05f8edfe47fdd5ae78 4011d52141cd717c92cbf350a93522d2f3ee415e 0000000000000000000000000000000000000000 ad0322a4af204547c400e1846b2b83d446ab8da5 4011d52141cd717c92cbf350a93522d2f3ee415e 85 | |
377 3ddd456244a08f81779163d9faf922a6dcd9e53e d0be588453068787dcb3ee05f8edfe47fdd5ae78 0000000000000000000000000000000000000000 3ace1fc95d0a1a941b6427c60b6e624f96dd71ad d0be588453068787dcb3ee05f8edfe47fdd5ae78 151 | |
378 a3f374fbf33aba1cc3b4f472db022b5185880f5d 3ddd456244a08f81779163d9faf922a6dcd9e53e 0000000000000000000000000000000000000000 3ca4ae7bdd3890b8ed89bfea1b42af593e04b373 3ddd456244a08f81779163d9faf922a6dcd9e53e 195 | |
379 0227d28e0db69afebee34cd5a4151889fb6271da a3f374fbf33aba1cc3b4f472db022b5185880f5d 0000000000000000000000000000000000000000 31bd09da0dcfe48e1fc662143f91ff402238aa84 a3f374fbf33aba1cc3b4f472db022b5185880f5d 145 | |
380 | |
381 but there's no delta base information in the bundle file, it's merely a hard-coded convention (always patches previous version, see | |
382 (a) changegroup.py#builddeltaheader(): # do nothing with basenode, it is implicitly the previous one in HG10 | |
383 (b) revlog.py#group(): prev, curr = revs[r], revs[r + 1] | |
384 for c in bundler.revchunk(self, curr, prev): | |
385 ) | |
386 | |
387 | |
388 It's unclear where the first chunk (identified 62a101b7...) comes from (by the way, there's no such changeset as 6ec4af... as specified in the chunk, while 7dcc920e.. IS changeset 454) | |
389 | |
390 EXPLANATION: | |
391 if cloned repository comes from svnkit repo (where's the gradle branch): | |
392 $hg debugindex build.gradle | |
393 rev offset length base linkrev nodeid p1 p2 | |
394 0 0 590 0 213 62a101b7994c 000000000000 000000000000 | |
395 1 590 872 0 452 b2a1b20d1933 000000000000 000000000000 | |
396 2 1462 319 0 453 5324c8f2b550 b2a1b20d1933 000000000000 | |
397 3 1781 533 0 459 4011d52141cd 5324c8f2b550 000000000000 | |
398 4 2314 85 0 462 d0be58845306 4011d52141cd 000000000000 | |
399 5 2399 105 0 466 3ddd456244a0 d0be58845306 000000000000 | |
400 6 2504 160 0 468 a3f374fbf33a 3ddd456244a0 000000000000 | |
401 7 2664 133 0 470 0227d28e0db6 a3f374fbf33a 000000000000 | |
402 | |
403 and the aforementioned bundle was result of hg incoming svnkit!!! | |
404 */ | |
374 | 405 |
375 public void start(int count, Callback callback, Object token) { | 406 public void start(int count, Callback callback, Object token) { |
376 progressSupport.start(count); | 407 progressSupport.start(count); |
377 lifecycleCallback = callback; | 408 lifecycleCallback = callback; |
378 } | 409 } |