Mercurial > jhg
comparison src/org/tmatesoft/hg/internal/RevlogStreamWriter.java @ 618:7c0d2ce340b8
Refactor approach how content finds it way down to a commit revision
| author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
|---|---|
| date | Thu, 16 May 2013 19:46:13 +0200 |
| parents | 65c01508f002 |
| children | 6526d8adbc0f |
comparison
equal
deleted
inserted
replaced
| 617:65c01508f002 | 618:7c0d2ce340b8 |
|---|---|
| 17 package org.tmatesoft.hg.internal; | 17 package org.tmatesoft.hg.internal; |
| 18 | 18 |
| 19 import static org.tmatesoft.hg.internal.Internals.REVLOGV1_RECORD_SIZE; | 19 import static org.tmatesoft.hg.internal.Internals.REVLOGV1_RECORD_SIZE; |
| 20 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; | 20 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; |
| 21 | 21 |
| 22 import java.io.File; | |
| 23 import java.io.IOException; | 22 import java.io.IOException; |
| 24 import java.nio.ByteBuffer; | 23 import java.nio.ByteBuffer; |
| 25 | 24 |
| 26 import org.tmatesoft.hg.core.HgIOException; | 25 import org.tmatesoft.hg.core.HgIOException; |
| 27 import org.tmatesoft.hg.core.Nodeid; | 26 import org.tmatesoft.hg.core.Nodeid; |
| 28 import org.tmatesoft.hg.core.SessionContext; | 27 import org.tmatesoft.hg.core.SessionContext; |
| 28 import org.tmatesoft.hg.internal.DataSerializer.ByteArrayDataSerializer; | |
| 29 import org.tmatesoft.hg.internal.DataSerializer.ByteArrayDataSource; | |
| 30 import org.tmatesoft.hg.internal.DataSerializer.DataSource; | |
| 29 import org.tmatesoft.hg.repo.HgInvalidControlFileException; | 31 import org.tmatesoft.hg.repo.HgInvalidControlFileException; |
| 30 import org.tmatesoft.hg.repo.HgInvalidStateException; | 32 import org.tmatesoft.hg.repo.HgInvalidStateException; |
| 31 | 33 |
| 32 /** | 34 /** |
| 33 * | 35 * |
| 58 } | 60 } |
| 59 | 61 |
| 60 /** | 62 /** |
| 61 * @return nodeid of added revision | 63 * @return nodeid of added revision |
| 62 */ | 64 */ |
| 63 public Nodeid addRevision(byte[] content, int linkRevision, int p1, int p2) throws HgIOException { | 65 public Nodeid addRevision(DataSource content, int linkRevision, int p1, int p2) throws HgIOException { |
| 64 lastEntryRevision = Nodeid.NULL; | 66 lastEntryRevision = Nodeid.NULL; |
| 65 int revCount = revlogStream.revisionCount(); | 67 int revCount = revlogStream.revisionCount(); |
| 66 lastEntryIndex = revCount == 0 ? NO_REVISION : revCount - 1; | 68 lastEntryIndex = revCount == 0 ? NO_REVISION : revCount - 1; |
| 67 populateLastEntry(); | 69 populateLastEntry(); |
| 68 // | 70 // |
| 69 Patch patch = GeneratePatchInspector.delta(lastEntryContent, content); | 71 byte[] contentByteArray = toByteArray(content); |
| 72 Patch patch = GeneratePatchInspector.delta(lastEntryContent, contentByteArray); | |
| 70 int patchSerializedLength = patch.serializedLength(); | 73 int patchSerializedLength = patch.serializedLength(); |
| 71 | 74 |
| 72 final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, content.length); | 75 final boolean writeComplete = preferCompleteOverPatch(patchSerializedLength, contentByteArray.length); |
| 73 DataSerializer.DataSource dataSource = writeComplete ? new DataSerializer.ByteArrayDataSource(content) : patch.new PatchDataSource(); | 76 DataSerializer.DataSource dataSource = writeComplete ? new ByteArrayDataSource(contentByteArray) : patch.new PatchDataSource(); |
| 74 revlogDataZip.reset(dataSource); | 77 revlogDataZip.reset(dataSource); |
| 75 final int compressedLen; | 78 final int compressedLen; |
| 76 final boolean useCompressedData = preferCompressedOverComplete(revlogDataZip.getCompressedLength(), dataSource.serializeLength()); | 79 final boolean useCompressedData = preferCompressedOverComplete(revlogDataZip.getCompressedLength(), dataSource.serializeLength()); |
| 77 if (useCompressedData) { | 80 if (useCompressedData) { |
| 78 compressedLen= revlogDataZip.getCompressedLength(); | 81 compressedLen= revlogDataZip.getCompressedLength(); |
| 81 compressedLen = dataSource.serializeLength() + 1 /*1 byte for 'u' - uncompressed prefix byte*/; | 84 compressedLen = dataSource.serializeLength() + 1 /*1 byte for 'u' - uncompressed prefix byte*/; |
| 82 } | 85 } |
| 83 // | 86 // |
| 84 Nodeid p1Rev = revision(p1); | 87 Nodeid p1Rev = revision(p1); |
| 85 Nodeid p2Rev = revision(p2); | 88 Nodeid p2Rev = revision(p2); |
| 86 byte[] revisionNodeidBytes = dh.sha1(p1Rev, p2Rev, content).asBinary(); | 89 byte[] revisionNodeidBytes = dh.sha1(p1Rev, p2Rev, contentByteArray).asBinary(); |
| 87 // | 90 // |
| 88 | 91 |
| 89 DataSerializer indexFile, dataFile, activeFile; | 92 DataSerializer indexFile, dataFile; |
| 90 indexFile = dataFile = activeFile = null; | 93 indexFile = dataFile = null; |
| 91 try { | 94 try { |
| 92 // | 95 // |
| 93 activeFile = indexFile = revlogStream.getIndexStreamWriter(transaction); | 96 indexFile = revlogStream.getIndexStreamWriter(transaction); |
| 94 final boolean isInlineData = revlogStream.isInlineData(); | 97 final boolean isInlineData = revlogStream.isInlineData(); |
| 95 HeaderWriter revlogHeader = new HeaderWriter(isInlineData); | 98 HeaderWriter revlogHeader = new HeaderWriter(isInlineData); |
| 96 revlogHeader.length(content.length, compressedLen); | 99 revlogHeader.length(contentByteArray.length, compressedLen); |
| 97 revlogHeader.nodeid(revisionNodeidBytes); | 100 revlogHeader.nodeid(revisionNodeidBytes); |
| 98 revlogHeader.linkRevision(linkRevision); | 101 revlogHeader.linkRevision(linkRevision); |
| 99 revlogHeader.parents(p1, p2); | 102 revlogHeader.parents(p1, p2); |
| 100 revlogHeader.baseRevision(writeComplete ? lastEntryIndex+1 : lastEntryBase); | 103 revlogHeader.baseRevision(writeComplete ? lastEntryIndex+1 : lastEntryBase); |
| 101 long lastEntryOffset = revlogStream.newEntryOffset(); | 104 long lastEntryOffset = revlogStream.newEntryOffset(); |
| 106 if (isInlineData) { | 109 if (isInlineData) { |
| 107 dataFile = indexFile; | 110 dataFile = indexFile; |
| 108 } else { | 111 } else { |
| 109 dataFile = revlogStream.getDataStreamWriter(transaction); | 112 dataFile = revlogStream.getDataStreamWriter(transaction); |
| 110 } | 113 } |
| 111 activeFile = dataFile; | |
| 112 if (useCompressedData) { | 114 if (useCompressedData) { |
| 113 int actualCompressedLenWritten = revlogDataZip.writeCompressedData(dataFile); | 115 int actualCompressedLenWritten = revlogDataZip.writeCompressedData(dataFile); |
| 114 if (actualCompressedLenWritten != compressedLen) { | 116 if (actualCompressedLenWritten != compressedLen) { |
| 115 throw new HgInvalidStateException(String.format("Expected %d bytes of compressed data, but actually wrote %d in %s", compressedLen, actualCompressedLenWritten, revlogStream.getDataFileName())); | 117 throw new HgInvalidStateException(String.format("Expected %d bytes of compressed data, but actually wrote %d in %s", compressedLen, actualCompressedLenWritten, revlogStream.getDataFileName())); |
| 116 } | 118 } |
| 118 dataFile.writeByte((byte) 'u'); | 120 dataFile.writeByte((byte) 'u'); |
| 119 dataSource.serialize(dataFile); | 121 dataSource.serialize(dataFile); |
| 120 } | 122 } |
| 121 | 123 |
| 122 | 124 |
| 123 lastEntryContent = content; | 125 lastEntryContent = contentByteArray; |
| 124 lastEntryBase = revlogHeader.baseRevision(); | 126 lastEntryBase = revlogHeader.baseRevision(); |
| 125 lastEntryIndex++; | 127 lastEntryIndex++; |
| 126 lastEntryRevision = Nodeid.fromBinary(revisionNodeidBytes, 0); | 128 lastEntryRevision = Nodeid.fromBinary(revisionNodeidBytes, 0); |
| 127 revisionCache.put(lastEntryIndex, lastEntryRevision); | 129 revisionCache.put(lastEntryIndex, lastEntryRevision); |
| 128 | 130 |
| 129 revlogStream.revisionAdded(lastEntryIndex, lastEntryRevision, lastEntryBase, lastEntryOffset); | 131 revlogStream.revisionAdded(lastEntryIndex, lastEntryRevision, lastEntryBase, lastEntryOffset); |
| 130 } catch (IOException ex) { | |
| 131 String m = String.format("Failed to write revision %d", lastEntryIndex+1, null); | |
| 132 // FIXME proper file in the exception based on activeFile == dataFile || indexFile | |
| 133 throw new HgIOException(m, ex, new File(revlogStream.getDataFileName())); | |
| 134 } finally { | 132 } finally { |
| 135 indexFile.done(); | 133 indexFile.done(); |
| 136 if (dataFile != null && dataFile != indexFile) { | 134 if (dataFile != null && dataFile != indexFile) { |
| 137 dataFile.done(); | 135 dataFile.done(); |
| 138 } | 136 } |
| 139 } | 137 } |
| 140 return lastEntryRevision; | 138 return lastEntryRevision; |
| 141 } | 139 } |
| 142 | 140 |
| 141 private byte[] toByteArray(DataSource content) throws HgIOException { | |
| 142 ByteArrayDataSerializer ba = new ByteArrayDataSerializer(); | |
| 143 content.serialize(ba); | |
| 144 return ba.toByteArray(); | |
| 145 } | |
| 146 | |
| 143 private Nodeid revision(int revisionIndex) { | 147 private Nodeid revision(int revisionIndex) { |
| 144 if (revisionIndex == NO_REVISION) { | 148 if (revisionIndex == NO_REVISION) { |
| 145 return Nodeid.NULL; | 149 return Nodeid.NULL; |
| 146 } | 150 } |
| 147 Nodeid n = revisionCache.get(revisionIndex); | 151 Nodeid n = revisionCache.get(revisionIndex); |
| 249 public HeaderWriter nodeid(byte[] nodeidBytes) { | 253 public HeaderWriter nodeid(byte[] nodeidBytes) { |
| 250 nodeid = nodeidBytes; | 254 nodeid = nodeidBytes; |
| 251 return this; | 255 return this; |
| 252 } | 256 } |
| 253 | 257 |
| 254 public void serialize(DataSerializer out) throws IOException { | 258 public void serialize(DataSerializer out) throws HgIOException { |
| 255 header.clear(); | 259 header.clear(); |
| 256 if (offset == 0) { | 260 if (offset == 0) { |
| 257 int version = 1 /* RevlogNG */; | 261 int version = 1 /* RevlogNG */; |
| 258 if (isInline) { | 262 if (isInline) { |
| 259 version |= RevlogStream.INLINEDATA; | 263 version |= RevlogStream.INLINEDATA; |
