Mercurial > jhg
comparison src/org/tmatesoft/hg/core/HgCloneCommand.java @ 618:7c0d2ce340b8
Refactor approach how content finds it way down to a commit revision
| author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
|---|---|
| date | Thu, 16 May 2013 19:46:13 +0200 |
| parents | 5e0313485eef |
| children | 4e6179bde4fc |
comparison
equal
deleted
inserted
replaced
| 617:65c01508f002 | 618:7c0d2ce340b8 |
|---|---|
| 28 import java.util.Collections; | 28 import java.util.Collections; |
| 29 import java.util.TreeMap; | 29 import java.util.TreeMap; |
| 30 | 30 |
| 31 import org.tmatesoft.hg.internal.ByteArrayDataAccess; | 31 import org.tmatesoft.hg.internal.ByteArrayDataAccess; |
| 32 import org.tmatesoft.hg.internal.DataAccess; | 32 import org.tmatesoft.hg.internal.DataAccess; |
| 33 import org.tmatesoft.hg.internal.DataAccessProvider; | |
| 34 import org.tmatesoft.hg.internal.DataSerializer; | 33 import org.tmatesoft.hg.internal.DataSerializer; |
| 35 import org.tmatesoft.hg.internal.DigestHelper; | 34 import org.tmatesoft.hg.internal.DigestHelper; |
| 36 import org.tmatesoft.hg.internal.FNCacheFile; | 35 import org.tmatesoft.hg.internal.FNCacheFile; |
| 37 import org.tmatesoft.hg.internal.Internals; | 36 import org.tmatesoft.hg.internal.Internals; |
| 38 import org.tmatesoft.hg.internal.Lifecycle; | 37 import org.tmatesoft.hg.internal.Lifecycle; |
| 146 private final ProgressSupport progressSupport; | 145 private final ProgressSupport progressSupport; |
| 147 private final CancelSupport cancelSupport; | 146 private final CancelSupport cancelSupport; |
| 148 private final SessionContext ctx; | 147 private final SessionContext ctx; |
| 149 private final Path.Source pathFactory; | 148 private final Path.Source pathFactory; |
| 150 private FileOutputStream indexFile; | 149 private FileOutputStream indexFile; |
| 150 private File currentFile; | |
| 151 private String filename; // human-readable name of the file being written, for log/exception purposes | 151 private String filename; // human-readable name of the file being written, for log/exception purposes |
| 152 | 152 |
| 153 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>(); | 153 private final TreeMap<Nodeid, Integer> changelogIndexes = new TreeMap<Nodeid, Integer>(); |
| 154 private boolean collectChangelogIndexes = false; | 154 private boolean collectChangelogIndexes = false; |
| 155 | 155 |
| 197 | 197 |
| 198 public void changelogStart() { | 198 public void changelogStart() { |
| 199 try { | 199 try { |
| 200 revlogHeader.offset(0).baseRevision(-1); | 200 revlogHeader.offset(0).baseRevision(-1); |
| 201 revisionSequence.clear(); | 201 revisionSequence.clear(); |
| 202 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00changelog.i")); | 202 indexFile = new FileOutputStream(currentFile = new File(hgDir, filename = "store/00changelog.i")); |
| 203 collectChangelogIndexes = true; | 203 collectChangelogIndexes = true; |
| 204 } catch (IOException ex) { | 204 } catch (IOException ex) { |
| 205 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(hgDir, filename)); | 205 throw new HgInvalidControlFileException("Failed to write changelog", ex, new File(hgDir, filename)); |
| 206 } | 206 } |
| 207 stopIfCancelled(); | 207 stopIfCancelled(); |
| 221 | 221 |
| 222 public void manifestStart() { | 222 public void manifestStart() { |
| 223 try { | 223 try { |
| 224 revlogHeader.offset(0).baseRevision(-1); | 224 revlogHeader.offset(0).baseRevision(-1); |
| 225 revisionSequence.clear(); | 225 revisionSequence.clear(); |
| 226 indexFile = new FileOutputStream(new File(hgDir, filename = "store/00manifest.i")); | 226 indexFile = new FileOutputStream(currentFile = new File(hgDir, filename = "store/00manifest.i")); |
| 227 } catch (IOException ex) { | 227 } catch (IOException ex) { |
| 228 throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(hgDir, filename)); | 228 throw new HgInvalidControlFileException("Failed to write manifest", ex, new File(hgDir, filename)); |
| 229 } | 229 } |
| 230 stopIfCancelled(); | 230 stopIfCancelled(); |
| 231 } | 231 } |
| 245 try { | 245 try { |
| 246 revlogHeader.offset(0).baseRevision(-1); | 246 revlogHeader.offset(0).baseRevision(-1); |
| 247 revisionSequence.clear(); | 247 revisionSequence.clear(); |
| 248 File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString()); | 248 File file = new File(hgDir, filename = storagePathHelper.rewrite(name).toString()); |
| 249 file.getParentFile().mkdirs(); | 249 file.getParentFile().mkdirs(); |
| 250 indexFile = new FileOutputStream(file); | 250 indexFile = new FileOutputStream(currentFile = file); |
| 251 } catch (IOException ex) { | 251 } catch (IOException ex) { |
| 252 String m = String.format("Failed to write file %s", filename); | 252 String m = String.format("Failed to write file %s", filename); |
| 253 throw new HgInvalidControlFileException(m, ex, new File(filename)); | 253 throw new HgInvalidControlFileException(m, ex, new File(filename)); |
| 254 } | 254 } |
| 255 stopIfCancelled(); | 255 stopIfCancelled(); |
| 277 | 277 |
| 278 private void closeIndexFile() throws IOException { | 278 private void closeIndexFile() throws IOException { |
| 279 indexFile.close(); | 279 indexFile.close(); |
| 280 indexFile = null; | 280 indexFile = null; |
| 281 filename = null; | 281 filename = null; |
| 282 currentFile = null; | |
| 282 } | 283 } |
| 283 | 284 |
| 284 private int knownRevision(Nodeid p) { | 285 private int knownRevision(Nodeid p) { |
| 285 if (p.isNull()) { | 286 if (p.isNull()) { |
| 286 return -1; | 287 return -1; |
| 365 } | 366 } |
| 366 | 367 |
| 367 revlogHeader.length(content.length, compressedLen); | 368 revlogHeader.length(content.length, compressedLen); |
| 368 | 369 |
| 369 // XXX may be wise not to create DataSerializer for each revision, but for a file | 370 // XXX may be wise not to create DataSerializer for each revision, but for a file |
| 370 DataAccessProvider.StreamDataSerializer sds = new DataAccessProvider.StreamDataSerializer(ctx.getLog(), indexFile) { | 371 DataSerializer sds = new DataSerializer() { |
| 371 @Override | 372 @Override |
| 372 public void done() { | 373 public void write(byte[] data, int offset, int length) throws HgIOException { |
| 373 // override parent behavior not to close stream in use | 374 try { |
| 374 } | 375 indexFile.write(data, offset, length); |
| 376 } catch (IOException ex) { | |
| 377 throw new HgIOException("Write failure", ex, currentFile); | |
| 378 } | |
| 379 } | |
| 375 }; | 380 }; |
| 376 revlogHeader.serialize(sds); | 381 revlogHeader.serialize(sds); |
| 377 | 382 |
| 378 if (useUncompressedData) { | 383 if (useUncompressedData) { |
| 379 indexFile.write((byte) 'u'); | 384 indexFile.write((byte) 'u'); |
| 387 sds.done(); | 392 sds.done(); |
| 388 // | 393 // |
| 389 revisionSequence.add(node); | 394 revisionSequence.add(node); |
| 390 prevRevContent.done(); | 395 prevRevContent.done(); |
| 391 prevRevContent = new ByteArrayDataAccess(content); | 396 prevRevContent = new ByteArrayDataAccess(content); |
| 392 } catch (IOException ex) { | 397 } catch (HgIOException ex) { |
| 393 String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename); | 398 String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename); |
| 394 throw new HgInvalidControlFileException(m, ex, new File(hgDir, filename)); | 399 throw new HgInvalidControlFileException(m, ex, currentFile); |
| 400 } catch (IOException ex) { | |
| 401 String m = String.format("Failed to write revision %s of file %s", ge.node().shortNotation(), filename); | |
| 402 throw new HgInvalidControlFileException(m, ex, currentFile); | |
| 395 } | 403 } |
| 396 return cancelException == null; | 404 return cancelException == null; |
| 397 } | 405 } |
| 398 /* | 406 /* |
| 399 $ hg debugindex build.gradle | 407 $ hg debugindex build.gradle |
