Mercurial > jhg
comparison src/org/tmatesoft/hg/core/HgCloneCommand.java @ 532:688c1ab113bb
Introduce explicit reference to base patch in bundle's group element, use it when cloning to fix defect when few revisions list null,null parents
| author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
|---|---|
| date | Wed, 23 Jan 2013 19:14:15 +0100 |
| parents | 95c2f43008bd |
| children | 243202f1bda5 |
comparison
equal
deleted
inserted
replaced
| 531:95c2f43008bd | 532:688c1ab113bb |
|---|---|
| 145 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>(); | 145 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>(); |
| 146 private boolean collectChangelogIndexes = false; | 146 private boolean collectChangelogIndexes = false; |
| 147 | 147 |
| 148 private DataAccess prevRevContent; | 148 private DataAccess prevRevContent; |
| 149 private final DigestHelper dh = new DigestHelper(); | 149 private final DigestHelper dh = new DigestHelper(); |
| 150 private final ArrayList<Nodeid> revisionSequence = new ArrayList<Nodeid>(); // last visited nodes first | 150 // recently processed nodes last, so that index in the array may be used as a linkRevision or baseRevision |
| 151 private final ArrayList<Nodeid> revisionSequence = new ArrayList<Nodeid>(); | |
| 151 | 152 |
| 152 private final LinkedList<String> fncacheFiles = new LinkedList<String>(); | 153 private final LinkedList<String> fncacheFiles = new LinkedList<String>(); |
| 153 private RepoInitializer repoInit; | 154 private RepoInitializer repoInit; |
| 154 private Lifecycle.Callback lifecycleCallback; | 155 private Lifecycle.Callback lifecycleCallback; |
| 155 private CancelledException cancelException; | 156 private CancelledException cancelException; |
| 181 revlogHeader.offset(0).baseRevision(-1); | 182 revlogHeader.offset(0).baseRevision(-1); |
| 182 revisionSequence.clear(); | 183 revisionSequence.clear(); |
| 183 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i")); | 184 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i")); |
| 184 collectChangelogIndexes = true; | 185 collectChangelogIndexes = true; |
| 185 } catch (IOException ex) { | 186 } catch (IOException ex) { |
| 186 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(filename)); | 187 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(hgDir, filename)); |
| 187 } | 188 } |
| 188 stopIfCancelled(); | 189 stopIfCancelled(); |
| 189 } | 190 } |
| 190 | 191 |
| 191 public void changelogEnd() { | 192 public void changelogEnd() { |
| 192 try { | 193 try { |
| 193 if (prevRevContent != null) { | 194 clearPreviousContent(); |
| 194 prevRevContent.done(); | |
| 195 prevRevContent = null; | |
| 196 } | |
| 197 collectChangelogIndexes = false; | 195 collectChangelogIndexes = false; |
| 198 indexFile.close(); | 196 closeIndexFile(); |
| 199 indexFile = null; | 197 } catch (IOException ex) { |
| 200 filename = null; | 198 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(hgDir, filename)); |
| 201 } catch (IOException ex) { | |
| 202 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(filename)); | |
| 203 } | 199 } |
| 204 progressSupport.worked(1); | 200 progressSupport.worked(1); |
| 205 stopIfCancelled(); | 201 stopIfCancelled(); |
| 206 } | 202 } |
| 207 | 203 |
| 209 try { | 205 try { |
| 210 revlogHeader.offset(0).baseRevision(-1); | 206 revlogHeader.offset(0).baseRevision(-1); |
| 211 revisionSequence.clear(); | 207 revisionSequence.clear(); |
| 212 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i")); | 208 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i")); |
| 213 } catch (IOException ex) { | 209 } catch (IOException ex) { |
| 214 throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(filename)); | 210 throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(hgDir, filename)); |
| 215 } | 211 } |
| 216 stopIfCancelled(); | 212 stopIfCancelled(); |
| 217 } | 213 } |
| 218 | 214 |
| 219 public void manifestEnd() { | 215 public void manifestEnd() { |
| 220 try { | 216 try { |
| 221 if (prevRevContent != null) { | 217 clearPreviousContent(); |
| 222 prevRevContent.done(); | 218 closeIndexFile(); |
| 223 prevRevContent = null; | 219 } catch (IOException ex) { |
| 224 } | 220 throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(hgDir, filename)); |
| 225 indexFile.close(); | |
| 226 indexFile = null; | |
| 227 filename = null; | |
| 228 } catch (IOException ex) { | |
| 229 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(filename)); | |
| 230 } | 221 } |
| 231 progressSupport.worked(1); | 222 progressSupport.worked(1); |
| 232 stopIfCancelled(); | 223 stopIfCancelled(); |
| 233 } | 224 } |
| 234 | 225 |
| 248 stopIfCancelled(); | 239 stopIfCancelled(); |
| 249 } | 240 } |
| 250 | 241 |
| 251 public void fileEnd(String name) { | 242 public void fileEnd(String name) { |
| 252 try { | 243 try { |
| 253 if (prevRevContent != null) { | 244 clearPreviousContent(); |
| 254 prevRevContent.done(); | 245 closeIndexFile(); |
| 255 prevRevContent = null; | |
| 256 } | |
| 257 indexFile.close(); | |
| 258 indexFile = null; | |
| 259 filename = null; | |
| 260 } catch (IOException ex) { | 246 } catch (IOException ex) { |
| 261 String m = String.format("Failed to write file %s", filename); | 247 String m = String.format("Failed to write file %s", filename); |
| 262 throw new HgInvalidControlFileException(m, ex, new File(filename)); | 248 throw new HgInvalidControlFileException(m, ex, new File(filename)); |
| 263 } | 249 } |
| 264 progressSupport.worked(1); | 250 progressSupport.worked(1); |
| 265 stopIfCancelled(); | 251 stopIfCancelled(); |
| 252 } | |
| 253 | |
| 254 private void clearPreviousContent() { | |
| 255 if (prevRevContent != null) { | |
| 256 prevRevContent.done(); | |
| 257 prevRevContent = null; | |
| 258 } | |
| 259 } | |
| 260 | |
| 261 private void closeIndexFile() throws IOException { | |
| 262 indexFile.close(); | |
| 263 indexFile = null; | |
| 264 filename = null; | |
| 266 } | 265 } |
| 267 | 266 |
| 268 private int knownRevision(Nodeid p) { | 267 private int knownRevision(Nodeid p) { |
| 269 if (p.isNull()) { | 268 if (p.isNull()) { |
| 270 return -1; | 269 return -1; |
| 274 return i; | 273 return i; |
| 275 } | 274 } |
| 276 } | 275 } |
| 277 } | 276 } |
| 278 String m = String.format("Can't find index of %s for file %s", p.shortNotation(), filename); | 277 String m = String.format("Can't find index of %s for file %s", p.shortNotation(), filename); |
| 279 throw new HgInvalidControlFileException(m, null, null).setRevision(p); | 278 throw new HgInvalidControlFileException(m, null, new File(hgDir, filename)).setRevision(p); |
| 280 } | 279 } |
| 281 | 280 |
| 282 private RevlogStreamWriter.HeaderWriter revlogHeader = new RevlogStreamWriter.HeaderWriter(true); | 281 private RevlogStreamWriter.HeaderWriter revlogHeader = new RevlogStreamWriter.HeaderWriter(true); |
| 283 private RevlogCompressor revlogDataZip = new RevlogCompressor(); | 282 private RevlogCompressor revlogDataZip = new RevlogCompressor(); |
| 284 | 283 |
| 285 public boolean element(GroupElement ge) { | 284 public boolean element(GroupElement ge) { |
| 286 try { | 285 try { |
| 287 assert indexFile != null; | 286 assert indexFile != null; |
| 288 boolean writeComplete = false; | 287 boolean writeComplete = false; |
| 288 Nodeid deltaBase = ge.patchBase(); | |
| 289 if (deltaBase.isNull()) { | |
| 290 // NOTE, can't use both parents isNull == true to empty prevRevContent | |
| 291 // see build.gradle sample below why. | |
| 292 prevRevContent = new DataAccess(); // empty data | |
| 293 writeComplete = true; | |
| 294 // if (writeComplete) would set baseRevision correctly, | |
| 295 } else { | |
| 296 Nodeid prevRevision = revisionSequence.size() > 0 ? revisionSequence.get(revisionSequence.size()-1) : Nodeid.NULL; | |
| 297 if (!prevRevision.equals(deltaBase)) { | |
| 298 // presently, bundle group elements always patch previous, see | |
| 299 // (a) changegroup.py#builddeltaheader(): # do nothing with basenode, it is implicitly the previous one in HG10 | |
| 300 // (b) revlog.py#group(): prev, curr = revs[r], revs[r + 1] | |
| 301 // for c in bundler.revchunk(self, curr, prev): | |
| 302 // so there's no reason to have code here to extract contents of deltaBase revision | |
| 303 String m = String.format("Revision %s import failed: delta base %s is not the last node we've handled (and know content for) %s", ge.node(), deltaBase, prevRevision); | |
| 304 throw new HgInvalidStateException(m); | |
| 305 } | |
| 306 } | |
| 307 // | |
| 308 byte[] content = ge.apply(prevRevContent.byteArray()); | |
| 289 Nodeid p1 = ge.firstParent(); | 309 Nodeid p1 = ge.firstParent(); |
| 290 Nodeid p2 = ge.secondParent(); | 310 Nodeid p2 = ge.secondParent(); |
| 291 if (p1.isNull() && p2.isNull() /* or forced flag, does REVIDX_PUNCHED_FLAG indicate that? */) { | |
| 292 // FIXME NOTE, both parents isNull == true doesn't necessarily mean | |
| 293 // empty prevContent, see build.gradle sample below | |
| 294 prevRevContent = new ByteArrayDataAccess(new byte[0]); | |
| 295 writeComplete = true; | |
| 296 } | |
| 297 byte[] content = ge.apply(prevRevContent.byteArray()); | |
| 298 byte[] calculated = dh.sha1(p1, p2, content).asBinary(); | 311 byte[] calculated = dh.sha1(p1, p2, content).asBinary(); |
| 299 final Nodeid node = ge.node(); | 312 final Nodeid node = ge.node(); |
| 300 if (!node.equalsTo(calculated)) { | 313 if (!node.equalsTo(calculated)) { |
| 301 String m = String.format("Checksum failed: expected %s, calculated %s. File %s", node, calculated, filename); | 314 String m = String.format("Checksum failed: expected %s, calculated %s. File %s", node, calculated, filename); |
| 302 throw new HgRevisionIntegrityException(m, null, new File(hgDir, filename)); | 315 throw new HgRevisionIntegrityException(m, null, new File(hgDir, filename)); |
| 303 } | 316 } |
| 304 revlogHeader.nodeid(node); | 317 revlogHeader.nodeid(node); |
| 318 // | |
| 305 if (collectChangelogIndexes) { | 319 if (collectChangelogIndexes) { |
| 306 changelogIndexes.put(node, revisionSequence.size()); | 320 changelogIndexes.put(node, revisionSequence.size()); |
| 307 revlogHeader.linkRevision(revisionSequence.size()); | 321 revlogHeader.linkRevision(revisionSequence.size()); |
| 308 } else { | 322 } else { |
| 309 Integer csRev = changelogIndexes.get(ge.cset()); | 323 Integer csRev = changelogIndexes.get(ge.cset()); |
| 310 if (csRev == null) { | 324 if (csRev == null) { |
| 311 throw new HgInvalidStateException(String.format("Changelog doesn't contain revision %s of %s", ge.cset().shortNotation(), filename)); | 325 throw new HgInvalidStateException(String.format("Changelog doesn't contain revision %s of %s", ge.cset().shortNotation(), filename)); |
| 312 } | 326 } |
| 313 revlogHeader.linkRevision(csRev.intValue()); | 327 revlogHeader.linkRevision(csRev.intValue()); |
| 314 } | 328 } |
| 329 // | |
| 315 revlogHeader.parents(knownRevision(p1), knownRevision(p2)); | 330 revlogHeader.parents(knownRevision(p1), knownRevision(p2)); |
| 331 // | |
| 316 byte[] patchContent = ge.rawDataByteArray(); | 332 byte[] patchContent = ge.rawDataByteArray(); |
| 333 // no reason to keep patch if it's close (here, >75%) in size to the complete contents, | |
| 334 // save patching effort in this case | |
| 317 writeComplete = writeComplete || patchContent.length >= (/* 3/4 of actual */content.length - (content.length >>> 2)); | 335 writeComplete = writeComplete || patchContent.length >= (/* 3/4 of actual */content.length - (content.length >>> 2)); |
| 336 | |
| 318 if (writeComplete) { | 337 if (writeComplete) { |
| 319 revlogHeader.baseRevision(revisionSequence.size()); | 338 revlogHeader.baseRevision(revisionSequence.size()); |
| 320 } | 339 } |
| 340 assert revlogHeader.baseRevision() >= 0; | |
| 341 | |
| 321 final byte[] sourceData = writeComplete ? content : patchContent; | 342 final byte[] sourceData = writeComplete ? content : patchContent; |
| 322 revlogDataZip.reset(sourceData); | 343 revlogDataZip.reset(sourceData); |
| 323 final int compressedLen; | 344 final int compressedLen; |
| 324 final boolean useUncompressedData = revlogDataZip.getCompressedLengthEstimate() >= (sourceData.length - (sourceData.length >>> 2)); | 345 final boolean useUncompressedData = revlogDataZip.getCompressedLengthEstimate() >= (sourceData.length - (sourceData.length >>> 2)); |
| 325 if (useUncompressedData) { | 346 if (useUncompressedData) { |
| 376 d0be588453068787dcb3ee05f8edfe47fdd5ae78 4011d52141cd717c92cbf350a93522d2f3ee415e 0000000000000000000000000000000000000000 ad0322a4af204547c400e1846b2b83d446ab8da5 4011d52141cd717c92cbf350a93522d2f3ee415e 85 | 397 d0be588453068787dcb3ee05f8edfe47fdd5ae78 4011d52141cd717c92cbf350a93522d2f3ee415e 0000000000000000000000000000000000000000 ad0322a4af204547c400e1846b2b83d446ab8da5 4011d52141cd717c92cbf350a93522d2f3ee415e 85 |
| 377 3ddd456244a08f81779163d9faf922a6dcd9e53e d0be588453068787dcb3ee05f8edfe47fdd5ae78 0000000000000000000000000000000000000000 3ace1fc95d0a1a941b6427c60b6e624f96dd71ad d0be588453068787dcb3ee05f8edfe47fdd5ae78 151 | 398 3ddd456244a08f81779163d9faf922a6dcd9e53e d0be588453068787dcb3ee05f8edfe47fdd5ae78 0000000000000000000000000000000000000000 3ace1fc95d0a1a941b6427c60b6e624f96dd71ad d0be588453068787dcb3ee05f8edfe47fdd5ae78 151 |
| 378 a3f374fbf33aba1cc3b4f472db022b5185880f5d 3ddd456244a08f81779163d9faf922a6dcd9e53e 0000000000000000000000000000000000000000 3ca4ae7bdd3890b8ed89bfea1b42af593e04b373 3ddd456244a08f81779163d9faf922a6dcd9e53e 195 | 399 a3f374fbf33aba1cc3b4f472db022b5185880f5d 3ddd456244a08f81779163d9faf922a6dcd9e53e 0000000000000000000000000000000000000000 3ca4ae7bdd3890b8ed89bfea1b42af593e04b373 3ddd456244a08f81779163d9faf922a6dcd9e53e 195 |
| 379 0227d28e0db69afebee34cd5a4151889fb6271da a3f374fbf33aba1cc3b4f472db022b5185880f5d 0000000000000000000000000000000000000000 31bd09da0dcfe48e1fc662143f91ff402238aa84 a3f374fbf33aba1cc3b4f472db022b5185880f5d 145 | 400 0227d28e0db69afebee34cd5a4151889fb6271da a3f374fbf33aba1cc3b4f472db022b5185880f5d 0000000000000000000000000000000000000000 31bd09da0dcfe48e1fc662143f91ff402238aa84 a3f374fbf33aba1cc3b4f472db022b5185880f5d 145 |
| 380 | 401 |
| 381 but there's no delta base information in the bundle file, it's merely a hard-coded convention (always patches previous version, see | 402 but there's no delta base information in the bundle file, it's merely a hard-coded convention |
| 382 (a) changegroup.py#builddeltaheader(): # do nothing with basenode, it is implicitly the previous one in HG10 | |
| 383 (b) revlog.py#group(): prev, curr = revs[r], revs[r + 1] | |
| 384 for c in bundler.revchunk(self, curr, prev): | |
| 385 ) | |
| 386 | |
| 387 | 403 |
| 388 It's unclear where the first chunk (identified 62a101b7...) comes from (by the way, there's no such changeset as 6ec4af... as specified in the chunk, while 7dcc920e.. IS changeset 454) | 404 It's unclear where the first chunk (identified 62a101b7...) comes from (by the way, there's no such changeset as 6ec4af... as specified in the chunk, while 7dcc920e.. IS changeset 454) |
| 389 | 405 |
| 390 EXPLANATION: | 406 EXPLANATION: |
| 391 if cloned repository comes from svnkit repo (where's the gradle branch): | 407 if cloned repository comes from svnkit repo (where's the gradle branch): |
