comparison src/org/tmatesoft/hg/repo/HgBlameFacility.java @ 569:c4fd1037bc6f

Support for copy/rename follow/no-follow for annotate
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Wed, 10 Apr 2013 20:04:54 +0200
parents 8ed4f4f4f0a6
children 36853bb80a35
comparison
equal deleted inserted replaced
568:8ed4f4f4f0a6 569:c4fd1037bc6f
14 * the terms of a license other than GNU General Public License 14 * the terms of a license other than GNU General Public License
15 * contact TMate Software at support@hg4j.com 15 * contact TMate Software at support@hg4j.com
16 */ 16 */
17 package org.tmatesoft.hg.repo; 17 package org.tmatesoft.hg.repo;
18 18
19 import static org.tmatesoft.hg.repo.HgRepository.NO_REVISION; 19 import static org.tmatesoft.hg.core.HgIterateDirection.NewToOld;
20 import static org.tmatesoft.hg.repo.HgRepository.TIP; 20 import static org.tmatesoft.hg.core.HgIterateDirection.OldToNew;
21 21 import static org.tmatesoft.hg.repo.HgInternals.wrongRevisionIndex;
22 import static org.tmatesoft.hg.repo.HgRepository.*;
23
24 import java.util.Arrays;
22 import java.util.BitSet; 25 import java.util.BitSet;
26 import java.util.Collections;
23 import java.util.LinkedList; 27 import java.util.LinkedList;
24 import java.util.ListIterator;
25 28
26 import org.tmatesoft.hg.core.HgCallbackTargetException; 29 import org.tmatesoft.hg.core.HgCallbackTargetException;
27 import org.tmatesoft.hg.core.HgIterateDirection; 30 import org.tmatesoft.hg.core.HgIterateDirection;
28 import org.tmatesoft.hg.core.Nodeid; 31 import org.tmatesoft.hg.core.Nodeid;
29 import org.tmatesoft.hg.internal.ByteArrayChannel; 32 import org.tmatesoft.hg.internal.BlameHelper;
30 import org.tmatesoft.hg.internal.Callback; 33 import org.tmatesoft.hg.internal.Callback;
31 import org.tmatesoft.hg.internal.DiffHelper;
32 import org.tmatesoft.hg.internal.Experimental; 34 import org.tmatesoft.hg.internal.Experimental;
33 import org.tmatesoft.hg.internal.IntMap;
34 import org.tmatesoft.hg.internal.IntVector; 35 import org.tmatesoft.hg.internal.IntVector;
35 import org.tmatesoft.hg.internal.Internals;
36 import org.tmatesoft.hg.internal.DiffHelper.LineSequence;
37 import org.tmatesoft.hg.internal.DiffHelper.LineSequence.ByteChain;
38 import org.tmatesoft.hg.internal.RangeSeq;
39 import org.tmatesoft.hg.repo.HgBlameFacility.RevisionDescriptor.Recipient;
40 import org.tmatesoft.hg.util.Adaptable; 36 import org.tmatesoft.hg.util.Adaptable;
41 import org.tmatesoft.hg.util.CancelledException;
42 import org.tmatesoft.hg.util.Pair;
43 37
44 /** 38 /**
45 * Facility with diff/annotate functionality. 39 * Facility with diff/annotate functionality.
46 * 40 *
47 * @author Artem Tikhomirov 41 * @author Artem Tikhomirov
60 54
61 /** 55 /**
62 * mimic 'hg diff -r clogRevIndex1 -r clogRevIndex2' 56 * mimic 'hg diff -r clogRevIndex1 -r clogRevIndex2'
63 */ 57 */
64 public void diff(int clogRevIndex1, int clogRevIndex2, Inspector insp) throws HgCallbackTargetException { 58 public void diff(int clogRevIndex1, int clogRevIndex2, Inspector insp) throws HgCallbackTargetException {
59 // FIXME clogRevIndex1 and clogRevIndex2 may point to different files, need to decide whether to throw an exception
60 // or to attempt to look up correct file node (tricky)
65 int fileRevIndex1 = fileRevIndex(df, clogRevIndex1); 61 int fileRevIndex1 = fileRevIndex(df, clogRevIndex1);
66 int fileRevIndex2 = fileRevIndex(df, clogRevIndex2); 62 int fileRevIndex2 = fileRevIndex(df, clogRevIndex2);
67 FileLinesCache fileInfoCache = new FileLinesCache(df, 5); 63 BlameHelper bh = new BlameHelper(insp, 5);
68 LineSequence c1 = fileInfoCache.lines(fileRevIndex1); 64 bh.useFileUpTo(df, clogRevIndex2);
69 LineSequence c2 = fileInfoCache.lines(fileRevIndex2); 65 bh.diff(fileRevIndex1, clogRevIndex1, fileRevIndex2, clogRevIndex2);
70 DiffHelper<LineSequence> pg = new DiffHelper<LineSequence>();
71 pg.init(c1, c2);
72 BlameBlockInspector bbi = new BlameBlockInspector(fileRevIndex2, insp, clogRevIndex1, clogRevIndex2);
73 pg.findMatchingBlocks(bbi);
74 bbi.checkErrors();
75 } 66 }
76 67
77 /** 68 /**
78 * Walk file history up/down to revision at given changeset and report changes for each revision 69 * Walk file history up/down to revision at given changeset and report changes for each revision
79 */ 70 */
80 public void annotate(int changelogRevisionIndex, Inspector insp, HgIterateDirection iterateOrder) throws HgCallbackTargetException { 71 public void annotate(int changelogRevisionIndex, Inspector insp, HgIterateDirection iterateOrder) throws HgCallbackTargetException {
72 annotate(0, changelogRevisionIndex, insp, iterateOrder);
73 }
74
75 /**
76 * Walk file history range and report changes for each revision
77 */
78 public void annotate(int changelogRevIndexStart, int changelogRevIndexEnd, Inspector insp, HgIterateDirection iterateOrder) throws HgCallbackTargetException {
79 if (wrongRevisionIndex(changelogRevIndexStart) || wrongRevisionIndex(changelogRevIndexEnd)) {
80 throw new IllegalArgumentException();
81 }
82 // Note, changelogRevisionIndex may be TIP, while the code below doesn't tolerate constants
83 //
84 int lastRevision = df.getRepo().getChangelog().getLastRevision();
85 if (changelogRevIndexEnd == TIP) {
86 changelogRevIndexEnd = lastRevision;
87 }
88 HgInternals.checkRevlogRange(changelogRevIndexStart, changelogRevIndexEnd, lastRevision);
81 if (!df.exists()) { 89 if (!df.exists()) {
82 return; 90 return;
83 } 91 }
84 // Note, changelogRevisionIndex may be TIP, while #implAnnotateChange doesn't tolerate constants 92 BlameHelper bh = new BlameHelper(insp, 10);
85 // 93 HgDataFile currentFile = df;
86 FileRevisionHistoryChunk fileHistory = new FileRevisionHistoryChunk(df); 94 int fileLastClogRevIndex = changelogRevIndexEnd;
87 fileHistory.init(changelogRevisionIndex); 95 FileRevisionHistoryChunk nextChunk = null;
88 // fileHistory.linkTo(null); FIXME 96 LinkedList<FileRevisionHistoryChunk> fileCompleteHistory = new LinkedList<FileRevisionHistoryChunk>();
89 97 do {
90 int[] fileRevParents = new int[2]; 98 FileRevisionHistoryChunk fileHistory = new FileRevisionHistoryChunk(currentFile);
91 FileLinesCache fileInfoCache = new FileLinesCache(df, 10); 99 fileHistory.init(fileLastClogRevIndex);
92 for (int fri : fileHistory.fileRevisions(iterateOrder)) { 100 fileHistory.linkTo(nextChunk);
93 int clogRevIndex = df.getChangesetRevisionIndex(fri); 101 fileCompleteHistory.addFirst(fileHistory); // to get the list in old-to-new order
94 fileHistory.getParents(fri, fileRevParents); 102 nextChunk = fileHistory;
95 implAnnotateChange(fileInfoCache, clogRevIndex, fri, fileRevParents, insp); 103 bh.useFileUpTo(currentFile, fileLastClogRevIndex);
104 if (currentFile.isCopy()) {
105 // TODO SessionContext.getPathFactory() and replace all Path.create
106 HgRepository repo = currentFile.getRepo();
107 currentFile = repo.getFileNode(currentFile.getCopySourceName());
108 fileLastClogRevIndex = repo.getChangelog().getRevisionIndex(currentFile.getCopySourceRevision());
109 // XXX perhaps, shall fail with meaningful exception if new file doesn't exist (.i/.d not found for whatever reason)
110 // or source revision is missing?
111 } else {
112 currentFile = null; // stop iterating
113 }
114 } while (currentFile != null && fileLastClogRevIndex >= changelogRevIndexStart);
115 // fileCompleteHistory is in (origin, intermediate target, ultimate target) order
116
117 int[] fileClogParentRevs = new int[2];
118 int[] fileParentRevs = new int[2];
119 if (iterateOrder == NewToOld) {
120 Collections.reverse(fileCompleteHistory);
121 }
122 boolean shallFilterStart = changelogRevIndexStart != 0; // no reason if complete history is walked
123 for (FileRevisionHistoryChunk fileHistory : fileCompleteHistory) {
124 for (int fri : fileHistory.fileRevisions(iterateOrder)) {
125 int clogRevIndex = fileHistory.changeset(fri);
126 if (shallFilterStart) {
127 if (iterateOrder == NewToOld) {
128 // clogRevIndex decreases
129 if (clogRevIndex < changelogRevIndexStart) {
130 break;
131 }
132 // fall-through, clogRevIndex is in the [start..end] range
133 } else { // old to new
134 // the way we built fileHistory ensures we won't walk past changelogRevIndexEnd
135 // here we ensure we start from the right one, the one indicated with changelogRevIndexStart
136 if (clogRevIndex < changelogRevIndexStart) {
137 continue;
138 } else {
139 shallFilterStart = false; // once boundary is crossed, no need to check
140 // fall-through
141 }
142 }
143 }
144 fileHistory.fillFileParents(fri, fileParentRevs);
145 fileHistory.fillCsetParents(fri, fileClogParentRevs);
146 bh.annotateChange(fri, clogRevIndex, fileParentRevs, fileClogParentRevs);
147 }
96 } 148 }
97 } 149 }
98 150
99 /** 151 /**
100 * Annotates changes of the file against its parent(s). 152 * Annotates changes of the file against its parent(s).
107 int[] fileRevParents = new int[2]; 159 int[] fileRevParents = new int[2];
108 df.parents(fileRevIndex, fileRevParents, null, null); 160 df.parents(fileRevIndex, fileRevParents, null, null);
109 if (changelogRevisionIndex == TIP) { 161 if (changelogRevisionIndex == TIP) {
110 changelogRevisionIndex = df.getChangesetRevisionIndex(fileRevIndex); 162 changelogRevisionIndex = df.getChangesetRevisionIndex(fileRevIndex);
111 } 163 }
112 implAnnotateChange(new FileLinesCache(df, 5), changelogRevisionIndex, fileRevIndex, fileRevParents, insp); 164 BlameHelper bh = new BlameHelper(insp, 5);
113 } 165 bh.useFileUpTo(df, changelogRevisionIndex);
114 166 int[] fileClogParentRevs = new int[2];
115 private void implAnnotateChange(FileLinesCache fl, int csetRevIndex, int fileRevIndex, int[] fileParentRevs, Inspector insp) throws HgCallbackTargetException { 167 fileClogParentRevs[0] = fileRevParents[0] == NO_REVISION ? NO_REVISION : df.getChangesetRevisionIndex(fileRevParents[0]);
116 final LineSequence fileRevLines = fl.lines(fileRevIndex); 168 fileClogParentRevs[1] = fileRevParents[1] == NO_REVISION ? NO_REVISION : df.getChangesetRevisionIndex(fileRevParents[1]);
117 if (fileParentRevs[0] != NO_REVISION && fileParentRevs[1] != NO_REVISION) { 169 bh.annotateChange(fileRevIndex, changelogRevisionIndex, fileRevParents, fileClogParentRevs);
118 LineSequence p1Lines = fl.lines(fileParentRevs[0]);
119 LineSequence p2Lines = fl.lines(fileParentRevs[1]);
120 int p1ClogIndex = fl.getChangesetRevisionIndex(fileParentRevs[0]);
121 int p2ClogIndex = fl.getChangesetRevisionIndex(fileParentRevs[1]);
122 DiffHelper<LineSequence> pg = new DiffHelper<LineSequence>();
123 pg.init(p2Lines, fileRevLines);
124 EqualBlocksCollector p2MergeCommon = new EqualBlocksCollector();
125 pg.findMatchingBlocks(p2MergeCommon);
126 //
127 pg.init(p1Lines);
128 BlameBlockInspector bbi = new BlameBlockInspector(fileRevIndex, insp, p1ClogIndex, csetRevIndex);
129 bbi.setMergeParent2(p2MergeCommon, p2ClogIndex);
130 pg.findMatchingBlocks(bbi);
131 bbi.checkErrors();
132 } else if (fileParentRevs[0] == fileParentRevs[1]) {
133 // may be equal iff both are unset
134 assert fileParentRevs[0] == NO_REVISION;
135 // everything added
136 BlameBlockInspector bbi = new BlameBlockInspector(fileRevIndex, insp, NO_REVISION, csetRevIndex);
137 bbi.begin(LineSequence.newlines(new byte[0]), fileRevLines);
138 bbi.match(0, fileRevLines.chunkCount()-1, 0);
139 bbi.end();
140 bbi.checkErrors();
141 } else {
142 int soleParent = fileParentRevs[0] == NO_REVISION ? fileParentRevs[1] : fileParentRevs[0];
143 assert soleParent != NO_REVISION;
144 LineSequence parentLines = fl.lines(soleParent);
145
146 int parentChangesetRevIndex = fl.getChangesetRevisionIndex(soleParent);
147 DiffHelper<LineSequence> pg = new DiffHelper<LineSequence>();
148 pg.init(parentLines, fileRevLines);
149 BlameBlockInspector bbi = new BlameBlockInspector(fileRevIndex, insp, parentChangesetRevIndex, csetRevIndex);
150 pg.findMatchingBlocks(bbi);
151 bbi.checkErrors();
152 }
153 }
154
155 private static int fileRevIndex(HgDataFile df, int csetRevIndex) {
156 Nodeid fileRev = df.getRepo().getManifest().getFileRevision(csetRevIndex, df.getPath());
157 return df.getRevisionIndex(fileRev);
158 }
159
160 private static class FileRevisionHistoryChunk {
161 private final HgDataFile df;
162 private IntVector fileRevsToVisit;
163 private IntVector fileParentRevs;
164
165 public FileRevisionHistoryChunk(HgDataFile file) {
166 df = file;
167 }
168
169 public void getParents(int fileRevIndex, int[] fileRevParents) {
170 fileRevParents[0] = fileParentRevs.get(fileRevIndex * 2);
171 fileRevParents[1] = fileParentRevs.get(fileRevIndex * 2 + 1);
172 }
173
174 public void init (int changelogRevisionIndex) {
175 // XXX df.indexWalk(0, fileRevIndex, ) might be more effective
176 int fileRevIndex = fileRevIndex(df, changelogRevisionIndex);
177 int[] fileRevParents = new int[2];
178 fileParentRevs = new IntVector((fileRevIndex+1) * 2, 0);
179 fileParentRevs.add(NO_REVISION, NO_REVISION); // parents of fileRevIndex == 0
180 for (int i = 1; i <= fileRevIndex; i++) {
181 df.parents(i, fileRevParents, null, null);
182 fileParentRevs.add(fileRevParents[0], fileRevParents[1]);
183 }
184 fileRevsToVisit = new IntVector(fileRevIndex + 1, 0);
185 LinkedList<Integer> queue = new LinkedList<Integer>();
186 BitSet seen = new BitSet(fileRevIndex + 1);
187 queue.add(fileRevIndex);
188 do {
189 int x = queue.removeFirst();
190 if (seen.get(x)) {
191 continue;
192 }
193 seen.set(x);
194 fileRevsToVisit.add(x);
195 int p1 = fileParentRevs.get(2*x);
196 int p2 = fileParentRevs.get(2*x + 1);
197 if (p1 != NO_REVISION) {
198 queue.addLast(p1);
199 }
200 if (p2 != NO_REVISION) {
201 queue.addLast(p2);
202 }
203 } while (!queue.isEmpty());
204 // make sure no child is processed before we handled all (grand-)parents of the element
205 fileRevsToVisit.sort(false);
206 // now fileRevsToVisit keep file change ancestry from new to old
207 }
208
209 public void linkTo(FileRevisionHistoryChunk origin) {
210 Internals.notImplemented();
211 }
212
213 public int[] fileRevisions(HgIterateDirection iterateOrder) {
214 // fileRevsToVisit is { r10, r7, r6, r5, r0 }, new to old
215 int[] rv = fileRevsToVisit.toArray();
216 if (iterateOrder == HgIterateDirection.OldToNew) {
217 // reverse return value
218 for (int a = 0, b = rv.length-1; a < b; a++, b--) {
219 int t = rv[b];
220 rv[b] = rv[a];
221 rv[a] = t;
222 }
223 }
224 return rv;
225 }
226 }
227
228 private static class FileLinesCache {
229 private final HgDataFile df;
230 private final LinkedList<Pair<Integer, LineSequence>> lruCache;
231 private final int limit;
232 private IntMap<Integer> fileToClogIndexMap = new IntMap<Integer>(20);
233
234 public FileLinesCache(HgDataFile file, int lruLimit) {
235 df = file;
236 limit = lruLimit;
237 lruCache = new LinkedList<Pair<Integer, LineSequence>>();
238 }
239
240 public int getChangesetRevisionIndex(int fileRevIndex) {
241 Integer cached = fileToClogIndexMap.get(fileRevIndex);
242 if (cached == null) {
243 cached = df.getChangesetRevisionIndex(fileRevIndex);
244 fileToClogIndexMap.put(fileRevIndex, cached);
245 }
246 return cached.intValue();
247 }
248
249 public LineSequence lines(int fileRevIndex) {
250 Pair<Integer, LineSequence> cached = checkCache(fileRevIndex);
251 if (cached != null) {
252 return cached.second();
253 }
254 try {
255 ByteArrayChannel c;
256 df.content(fileRevIndex, c = new ByteArrayChannel());
257 LineSequence rv = LineSequence.newlines(c.toArray());
258 lruCache.addFirst(new Pair<Integer, LineSequence>(fileRevIndex, rv));
259 if (lruCache.size() > limit) {
260 lruCache.removeLast();
261 }
262 return rv;
263 } catch (CancelledException ex) {
264 // TODO likely it was bad idea to throw cancelled exception from content()
265 // deprecate and provide alternative?
266 HgInvalidStateException ise = new HgInvalidStateException("ByteArrayChannel never throws CancelledException");
267 ise.initCause(ex);
268 throw ise;
269 }
270 }
271
272 private Pair<Integer,LineSequence> checkCache(int fileRevIndex) {
273 Pair<Integer, LineSequence> rv = null;
274 for (ListIterator<Pair<Integer, LineSequence>> it = lruCache.listIterator(); it.hasNext(); ) {
275 Pair<Integer, LineSequence> p = it.next();
276 if (p.first() == fileRevIndex) {
277 rv = p;
278 it.remove();
279 break;
280 }
281 }
282 if (rv != null) {
283 lruCache.addFirst(rv);
284 }
285 return rv;
286 }
287 } 170 }
288 171
289 /** 172 /**
290 * Client's sink for revision differences. 173 * Client's sink for revision differences.
291 * 174 *
371 * @return revision index of the change in target file's revlog 254 * @return revision index of the change in target file's revlog
372 */ 255 */
373 int fileRevisionIndex(); 256 int fileRevisionIndex();
374 257
375 /** 258 /**
259 * @return file object under blame (target file)
260 */
261 HgDataFile file();
262
263 /**
376 * Implement to indicate interest in {@link RevisionDescriptor}. 264 * Implement to indicate interest in {@link RevisionDescriptor}.
377 * 265 *
378 * Note, instance of {@link RevisionDescriptor} is the same for 266 * Note, instance of {@link RevisionDescriptor} is the same for
379 * {@link #start(RevisionDescriptor)} and {@link #done(RevisionDescriptor)} 267 * {@link #start(RevisionDescriptor)} and {@link #done(RevisionDescriptor)}
380 * methods, and not necessarily a new one (i.e. <code>==</code>) for the next 268 * methods, and not necessarily a new one (i.e. <code>==</code>) for the next
445 */ 333 */
446 BlockData removedLines(); 334 BlockData removedLines();
447 } 335 }
448 public interface ChangeBlock extends AddBlock, DeleteBlock { 336 public interface ChangeBlock extends AddBlock, DeleteBlock {
449 } 337 }
450 338
451 private static class BlameBlockInspector extends DiffHelper.DeltaInspector<LineSequence> { 339
452 private final Inspector insp; 340 private static int fileRevIndex(HgDataFile df, int csetRevIndex) {
453 private final int csetOrigin; 341 Nodeid fileRev = df.getRepo().getManifest().getFileRevision(csetRevIndex, df.getPath());
454 private final int csetTarget; 342 return df.getRevisionIndex(fileRev);
455 private EqualBlocksCollector p2MergeCommon; 343 }
456 private int csetMergeParent; 344
457 private IntVector mergeRanges; 345 private static class FileRevisionHistoryChunk {
458 private final AnnotateRev annotatedRevision; 346 private final HgDataFile df;
459 private HgCallbackTargetException error; 347 // change ancestry, sequence of file revisions
460 348 private IntVector fileRevsToVisit;
461 public BlameBlockInspector(int fileRevIndex, Inspector inspector, int originCset, int targetCset) { 349 // parent pairs of complete file history
462 assert inspector != null; 350 private IntVector fileParentRevs;
463 insp = inspector; 351 // map file revision to changelog revision (sparse array, only file revisions to visit are set)
464 annotatedRevision = new AnnotateRev(); 352 private int[] file2changelog;
465 annotatedRevision.set(fileRevIndex); 353 private int originChangelogRev = BAD_REVISION, originFileRev = BAD_REVISION;
466 csetOrigin = originCset; 354
467 csetTarget = targetCset; 355 public FileRevisionHistoryChunk(HgDataFile file) {
468 } 356 df = file;
469 357 }
470 public void setMergeParent2(EqualBlocksCollector p2Merge, int parentCset2) { 358
471 p2MergeCommon = p2Merge; 359 public void init(int changelogRevisionIndex) {
472 csetMergeParent = parentCset2; 360 // XXX df.indexWalk(0, fileRevIndex, ) might be more effective
473 mergeRanges = new IntVector(3*10, 3*10); 361 int fileRevIndex = fileRevIndex(df, changelogRevisionIndex);
474 } 362 int[] fileRevParents = new int[2];
475 363 fileParentRevs = new IntVector((fileRevIndex+1) * 2, 0);
476 @Override 364 fileParentRevs.add(NO_REVISION, NO_REVISION); // parents of fileRevIndex == 0
477 public void begin(LineSequence s1, LineSequence s2) { 365 for (int i = 1; i <= fileRevIndex; i++) {
478 super.begin(s1, s2); 366 df.parents(i, fileRevParents, null, null);
479 if (shallStop()) { 367 fileParentRevs.add(fileRevParents[0], fileRevParents[1]);
368 }
369 // fileRevsToVisit keep file change ancestry from new to old
370 fileRevsToVisit = new IntVector(fileRevIndex + 1, 0);
371 // keep map of file revision to changelog revision
372 file2changelog = new int[fileRevIndex+1];
373 // only elements worth visit would get mapped, so there would be unfilled areas in the file2changelog,
374 // prevent from error (make it explicit) by bad value
375 Arrays.fill(file2changelog, BAD_REVISION);
376 LinkedList<Integer> queue = new LinkedList<Integer>();
377 BitSet seen = new BitSet(fileRevIndex + 1);
378 queue.add(fileRevIndex);
379 do {
380 int x = queue.removeFirst();
381 if (seen.get(x)) {
382 continue;
383 }
384 seen.set(x);
385 fileRevsToVisit.add(x);
386 file2changelog[x] = df.getChangesetRevisionIndex(x);
387 int p1 = fileParentRevs.get(2*x);
388 int p2 = fileParentRevs.get(2*x + 1);
389 if (p1 != NO_REVISION) {
390 queue.addLast(p1);
391 }
392 if (p2 != NO_REVISION) {
393 queue.addLast(p2);
394 }
395 } while (!queue.isEmpty());
396 // make sure no child is processed before we handled all (grand-)parents of the element
397 fileRevsToVisit.sort(false);
398 }
399
400 public void linkTo(FileRevisionHistoryChunk target) {
401 // assume that target.init() has been called already
402 if (target == null) {
480 return; 403 return;
481 } 404 }
482 ContentBlock originContent = new ContentBlock(s1); 405 target.originFileRev = fileRevsToVisit.get(0); // files to visit are new to old
483 ContentBlock targetContent = new ContentBlock(s2); 406 target.originChangelogRev = changeset(target.originFileRev);
484 annotatedRevision.set(originContent, targetContent); 407 }
485 annotatedRevision.set(csetOrigin, csetTarget, p2MergeCommon != null ? csetMergeParent : NO_REVISION); 408
486 Recipient curious = Adaptable.Factory.getAdapter(insp, Recipient.class, null); 409 public int[] fileRevisions(HgIterateDirection iterateOrder) {
487 if (curious != null) { 410 // fileRevsToVisit is { r10, r7, r6, r5, r0 }, new to old
488 try { 411 int[] rv = fileRevsToVisit.toArray();
489 curious.start(annotatedRevision); 412 if (iterateOrder == OldToNew) {
490 } catch (HgCallbackTargetException ex) { 413 // reverse return value
491 error = ex; 414 for (int a = 0, b = rv.length-1; a < b; a++, b--) {
415 int t = rv[b];
416 rv[b] = rv[a];
417 rv[a] = t;
492 } 418 }
493 } 419 }
494 } 420 return rv;
495 421 }
496 @Override 422
497 public void end() { 423 public int changeset(int fileRevIndex) {
498 super.end(); 424 return file2changelog[fileRevIndex];
499 if (shallStop()) { 425 }
426
427 public void fillFileParents(int fileRevIndex, int[] fileParents) {
428 if (fileRevIndex == 0 && originFileRev != BAD_REVISION) {
429 // this chunk continues another file
430 assert originFileRev != NO_REVISION;
431 fileParents[0] = originFileRev;
432 fileParents[1] = NO_REVISION;
500 return; 433 return;
501 } 434 }
502 Recipient curious = Adaptable.Factory.getAdapter(insp, Recipient.class, null); 435 fileParents[0] = fileParentRevs.get(fileRevIndex * 2);
503 if (curious != null) { 436 fileParents[1] = fileParentRevs.get(fileRevIndex * 2 + 1);
504 try { 437 }
505 curious.done(annotatedRevision); 438
506 } catch (HgCallbackTargetException ex) { 439 public void fillCsetParents(int fileRevIndex, int[] csetParents) {
507 error = ex; 440 if (fileRevIndex == 0 && originFileRev != BAD_REVISION) {
508 } 441 assert originFileRev != NO_REVISION;
509 } 442 csetParents[0] = originChangelogRev;
510 p2MergeCommon = null; 443 csetParents[1] = NO_REVISION; // I wonder if possible to start a copy with two parents?
511 }
512
513 @Override
514 protected void changed(int s1From, int s1To, int s2From, int s2To) {
515 if (shallStop()) {
516 return; 444 return;
517 } 445 }
518 try { 446 int fp1 = fileParentRevs.get(fileRevIndex * 2);
519 if (p2MergeCommon != null) { 447 int fp2 = fileParentRevs.get(fileRevIndex * 2 + 1);
520 mergeRanges.clear(); 448 csetParents[0] = fp1 == NO_REVISION ? NO_REVISION : changeset(fp1);
521 p2MergeCommon.combineAndMarkRangesWithTarget(s2From, s2To - s2From, csetOrigin, csetMergeParent, mergeRanges); 449 csetParents[1] = fp2 == NO_REVISION ? NO_REVISION : changeset(fp2);
522
523 /*
524 * Usecases, how it USED TO BE initially:
525 * 3 lines changed to 10 lines. range of 10 lines breaks down to 2 from p2, 3 from p1, and 5 from p2.
526 * We report: 2 lines changed to 2(p2), then 1 line changed with 3(p1) and 5 lines added from p2.
527 *
528 * 10 lines changed to 3 lines, range of 3 lines breaks down to 2 line from p1 and 1 line from p2.
529 * We report: 2 lines changed to 2(p1) and 8 lines changed to 1(p2)
530 *
531 * NOW, lines from p2 are always reported as pure add (since we need their insertion point to be in p2, not in p1)
532 * and we try to consume p1 changes as soon as we see first p1's range
533 */
534 int s1TotalLines = s1To - s1From, s1ConsumedLines = 0, s1Start = s1From;
535
536 for (int i = 0; i < mergeRanges.size(); i += 3) {
537 final int rangeOrigin = mergeRanges.get(i);
538 final int rangeStart = mergeRanges.get(i+1);
539 final int rangeLen = mergeRanges.get(i+2);
540 final boolean lastRange = i+3 >= mergeRanges.size();
541 final int s1LinesLeft = s1TotalLines - s1ConsumedLines;
542 // how many lines we may report as changed (don't use more than in range unless it's the very last range)
543 final int s1LinesToBorrow = lastRange ? s1LinesLeft : Math.min(s1LinesLeft, rangeLen);
544 if (rangeOrigin != csetMergeParent && s1LinesToBorrow > 0) {
545 ChangeBlockImpl block = getChangeBlock(s1Start, s1LinesToBorrow, rangeStart, rangeLen);
546 block.setOriginAndTarget(rangeOrigin, csetTarget);
547 insp.changed(block);
548 s1ConsumedLines += s1LinesToBorrow;
549 s1Start += s1LinesToBorrow;
550 } else {
551 int blockInsPoint = rangeOrigin != csetMergeParent ? s1Start : p2MergeCommon.reverseMapLine(rangeStart);
552 ChangeBlockImpl block = getAddBlock(rangeStart, rangeLen, blockInsPoint);
553 block.setOriginAndTarget(rangeOrigin, csetTarget);
554 insp.added(block);
555 }
556 }
557 if (s1ConsumedLines != s1TotalLines) {
558 assert s1ConsumedLines < s1TotalLines : String.format("Expected to process %d lines, but actually was %d", s1TotalLines, s1ConsumedLines);
559 // either there were no ranges from p1, whole s2From..s2To range came from p2, shall report as deleted
560 // or the ranges found were not enough to consume whole s2From..s2To
561 // The "deletion point" is shifted to the end of last csetOrigin->csetTarget change
562 int s2DeletePoint = s2From + s1ConsumedLines;
563 ChangeBlockImpl block = new ChangeBlockImpl(annotatedRevision.origin, null, s1Start, s1To - s1Start, -1, -1, -1, s2DeletePoint);
564 block.setOriginAndTarget(csetOrigin, csetTarget);
565 insp.deleted(block);
566 }
567 } else {
568 ChangeBlockImpl block = getChangeBlock(s1From, s1To - s1From, s2From, s2To - s2From);
569 block.setOriginAndTarget(csetOrigin, csetTarget);
570 insp.changed(block);
571 }
572 } catch (HgCallbackTargetException ex) {
573 error = ex;
574 }
575 }
576
577 @Override
578 protected void added(int s1InsertPoint, int s2From, int s2To) {
579 if (shallStop()) {
580 return;
581 }
582 try {
583 if (p2MergeCommon != null) {
584 mergeRanges.clear();
585 p2MergeCommon.combineAndMarkRangesWithTarget(s2From, s2To - s2From, csetOrigin, csetMergeParent, mergeRanges);
586 int insPoint = s1InsertPoint; // track changes to insertion point
587 for (int i = 0; i < mergeRanges.size(); i += 3) {
588 int rangeOrigin = mergeRanges.get(i);
589 int rangeStart = mergeRanges.get(i+1);
590 int rangeLen = mergeRanges.get(i+2);
591 ChangeBlockImpl block = getAddBlock(rangeStart, rangeLen, insPoint);
592 block.setOriginAndTarget(rangeOrigin, csetTarget);
593 insp.added(block);
594 // indicate insPoint moved down number of lines we just reported
595 insPoint += rangeLen;
596 }
597 } else {
598 ChangeBlockImpl block = getAddBlock(s2From, s2To - s2From, s1InsertPoint);
599 block.setOriginAndTarget(csetOrigin, csetTarget);
600 insp.added(block);
601 }
602 } catch (HgCallbackTargetException ex) {
603 error = ex;
604 }
605 }
606
607 @Override
608 protected void deleted(int s2DeletePoint, int s1From, int s1To) {
609 if (shallStop()) {
610 return;
611 }
612 try {
613 ChangeBlockImpl block = new ChangeBlockImpl(annotatedRevision.origin, null, s1From, s1To - s1From, -1, -1, -1, s2DeletePoint);
614 block.setOriginAndTarget(csetOrigin, csetTarget);
615 insp.deleted(block);
616 } catch (HgCallbackTargetException ex) {
617 error = ex;
618 }
619 }
620
621 @Override
622 protected void unchanged(int s1From, int s2From, int length) {
623 if (shallStop()) {
624 return;
625 }
626 try {
627 EqualBlockImpl block = new EqualBlockImpl(s1From, s2From, length, annotatedRevision.target);
628 block.setOriginAndTarget(csetOrigin, csetTarget);
629 insp.same(block);
630 } catch (HgCallbackTargetException ex) {
631 error = ex;
632 }
633 }
634
635 void checkErrors() throws HgCallbackTargetException {
636 if (error != null) {
637 throw error;
638 }
639 }
640
641 private boolean shallStop() {
642 return error != null;
643 }
644
645 private ChangeBlockImpl getAddBlock(int start, int len, int insPoint) {
646 return new ChangeBlockImpl(null, annotatedRevision.target, -1, -1, start, len, insPoint, -1);
647 }
648
649 private ChangeBlockImpl getChangeBlock(int start1, int len1, int start2, int len2) {
650 return new ChangeBlockImpl(annotatedRevision.origin, annotatedRevision.target, start1, len1, start2, len2, start1, start2);
651 }
652 }
653
654 private static class BlockImpl implements Block {
655 private int originCset;
656 private int targetCset;
657
658 void setOriginAndTarget(int originChangesetIndex, int targetChangesetIndex) {
659 // XXX perhaps, shall be part of Inspector API, rather than Block's
660 // as they don't change between blocks (although the moment about merged revisions)
661 // is not yet clear to me
662 originCset = originChangesetIndex;
663 targetCset = targetChangesetIndex;
664 }
665
666 public int originChangesetIndex() {
667 return originCset;
668 }
669
670 public int targetChangesetIndex() {
671 return targetCset;
672 }
673 }
674
675 private static class EqualBlockImpl extends BlockImpl implements EqualBlock {
676 private final int start1, start2;
677 private final int length;
678 private final ContentBlock fullContent;
679 private FilterBlock myContent;
680
681 EqualBlockImpl(int blockStartSeq1, int blockStartSeq2, int blockLength, ContentBlock targetContent) {
682 start1 = blockStartSeq1;
683 start2 = blockStartSeq2;
684 length = blockLength;
685 fullContent = targetContent;
686 }
687
688 public int originStart() {
689 return start1;
690 }
691
692 public int targetStart() {
693 return start2;
694 }
695
696 public int length() {
697 return length;
698 }
699
700 public BlockData content() {
701 if (myContent == null) {
702 myContent = new FilterBlock(fullContent, start2, length);
703 }
704 return myContent;
705 }
706
707 @Override
708 public String toString() {
709 return String.format("@@ [%d..%d) == [%d..%d) @@", start1, start1+length, start2, start2+length);
710 }
711 }
712
713 private static class ChangeBlockImpl extends BlockImpl implements ChangeBlock {
714 private final ContentBlock oldContent;
715 private final ContentBlock newContent;
716 private final int s1Start;
717 private final int s1Len;
718 private final int s2Start;
719 private final int s2Len;
720 private final int s1InsertPoint;
721 private final int s2DeletePoint;
722 private FilterBlock addedBlock, removedBlock;
723
724 public ChangeBlockImpl(ContentBlock c1, ContentBlock c2, int s1Start, int s1Len, int s2Start, int s2Len, int s1InsertPoint, int s2DeletePoint) {
725 oldContent = c1;
726 newContent = c2;
727 this.s1Start = s1Start;
728 this.s1Len = s1Len;
729 this.s2Start = s2Start;
730 this.s2Len = s2Len;
731 this.s1InsertPoint = s1InsertPoint;
732 this.s2DeletePoint = s2DeletePoint;
733 }
734
735 public int insertedAt() {
736 return s1InsertPoint;
737 }
738
739 public int firstAddedLine() {
740 return s2Start;
741 }
742
743 public int totalAddedLines() {
744 return s2Len;
745 }
746
747 public BlockData addedLines() {
748 if (addedBlock == null) {
749 addedBlock = new FilterBlock(newContent, firstAddedLine(), totalAddedLines());
750 }
751 return addedBlock;
752 }
753
754 public int removedAt() {
755 return s2DeletePoint;
756 }
757
758 public int firstRemovedLine() {
759 return s1Start;
760 }
761
762 public int totalRemovedLines() {
763 return s1Len;
764 }
765
766 public BlockData removedLines() {
767 if (removedBlock == null) {
768 removedBlock = new FilterBlock(oldContent, firstRemovedLine(), totalRemovedLines());
769 }
770 return removedBlock;
771 }
772
773 @Override
774 public String toString() {
775 if (s2DeletePoint == -1) {
776 return String.format("@@ -%d,0 +%d,%d @@", insertedAt(), firstAddedLine(), totalAddedLines());
777 } else if (s1InsertPoint == -1) {
778 // delete only
779 return String.format("@@ -%d,%d +%d,0 @@", firstRemovedLine(), totalRemovedLines(), removedAt());
780 }
781 return String.format("@@ -%d,%d +%d,%d @@", firstRemovedLine(), totalRemovedLines(), firstAddedLine(), totalAddedLines());
782 }
783 }
784
785 private static class SingleLine implements BlockData {
786 private final ByteChain line;
787
788 public SingleLine(ByteChain lineContent) {
789 line = lineContent;
790 }
791
792 public BlockData elementAt(int index) {
793 assert false;
794 return null;
795 }
796
797 public int elementCount() {
798 return 0;
799 }
800
801 public byte[] asArray() {
802 return line.data();
803 }
804 }
805
806 private static class ContentBlock implements BlockData {
807 private final LineSequence seq;
808
809 public ContentBlock(LineSequence sequence) {
810 seq = sequence;
811 }
812
813 public BlockData elementAt(int index) {
814 return new SingleLine(seq.chunk(index));
815 }
816
817 public int elementCount() {
818 return seq.chunkCount() - 1;
819 }
820
821 public byte[] asArray() {
822 return seq.data(0, seq.chunkCount() - 1);
823 }
824 }
825
826 private static class FilterBlock implements BlockData {
827 private final ContentBlock contentBlock;
828 private final int from;
829 private final int length;
830
831 public FilterBlock(ContentBlock bd, int startFrom, int len) {
832 assert bd != null;
833 assert startFrom + len < bd.seq.chunkCount(); // there's one extra chunk in the end, so strict less is ok
834 contentBlock = bd;
835 from = startFrom;
836 length = len;
837 }
838
839 public BlockData elementAt(int index) {
840 if (index < 0 || index >= length) {
841 throw new IllegalArgumentException(String.format("Expected value from [0..%d), got %d", length, index));
842 }
843 return contentBlock.elementAt(from + index);
844 }
845
846 public int elementCount() {
847 return length;
848 }
849
850 public byte[] asArray() {
851 return contentBlock.seq.data(from, from + length);
852 }
853 }
854
855
856 private static class EqualBlocksCollector implements DiffHelper.MatchInspector<LineSequence> {
857 private final RangeSeq matches = new RangeSeq();
858
859 public void begin(LineSequence s1, LineSequence s2) {
860 }
861
862 public void match(int startSeq1, int startSeq2, int matchLength) {
863 matches.add(startSeq1, startSeq2, matchLength);
864 }
865
866 public void end() {
867 }
868
869 public int reverseMapLine(int ln) {
870 return matches.reverseMapLine(ln);
871 }
872
873 public void intersectWithTarget(int start, int length, IntVector result) {
874 int s = start;
875 for (int l = start, x = start + length; l < x; l++) {
876 if (!matches.includesTargetLine(l)) {
877 if (l - s > 0) {
878 result.add(s);
879 result.add(l - s);
880 }
881 s = l+1;
882 }
883 }
884 if (s < start+length) {
885 result.add(s);
886 result.add((start + length) - s);
887 }
888 }
889
890 /*
891 * intersects [start..start+length) with ranges of target lines, and based on the intersection
892 * breaks initial range into smaller ranges and records them into result, with marker to indicate
893 * whether the range is from initial range (markerSource) or is a result of the intersection with target
894 * (markerTarget)
895 */
896 public void combineAndMarkRangesWithTarget(int start, int length, int markerSource, int markerTarget, IntVector result) {
897 int sourceStart = start, targetStart = start, sourceEnd = start + length;
898 for (int l = sourceStart; l < sourceEnd; l++) {
899 if (matches.includesTargetLine(l)) {
900 // l is from target
901 if (sourceStart < l) {
902 // few lines from source range were not in the target, report them
903 result.add(markerSource);
904 result.add(sourceStart);
905 result.add(l - sourceStart);
906 }
907 // indicate the earliest line from source range to use
908 sourceStart = l + 1;
909 } else {
910 // l is not in target
911 if (targetStart < l) {
912 // report lines from target range
913 result.add(markerTarget);
914 result.add(targetStart);
915 result.add(l - targetStart);
916 }
917 // next line *may* be from target
918 targetStart = l + 1;
919 }
920 }
921 // if source range end with line from target, sourceStart would be == sourceEnd, and we need to add range with markerTarget
922 // if source range doesn't end with target line, targetStart == sourceEnd, while sourceStart < sourceEnd
923 if (sourceStart < sourceEnd) {
924 assert targetStart == sourceEnd;
925 // something left from the source range
926 result.add(markerSource);
927 result.add(sourceStart);
928 result.add(sourceEnd - sourceStart);
929 } else if (targetStart < sourceEnd) {
930 assert sourceStart == sourceEnd;
931 result.add(markerTarget);
932 result.add(targetStart);
933 result.add(sourceEnd - targetStart);
934 }
935 }
936 }
937
938 private static class AnnotateRev implements RevisionDescriptor {
939 public ContentBlock origin, target;
940 public int originCset, targetCset, mergeCset, fileRevIndex;
941
942 public void set(int fileRev) {
943 fileRevIndex = fileRev;
944 }
945 public void set(ContentBlock o, ContentBlock t) {
946 origin = o;
947 target = t;
948 }
949 public void set(int o, int t, int m) {
950 originCset = o;
951 targetCset = t;
952 mergeCset = m;
953 }
954
955 public BlockData origin() {
956 return origin;
957 }
958
959 public BlockData target() {
960 return target;
961 }
962
963 public int originChangesetIndex() {
964 return originCset;
965 }
966
967 public int targetChangesetIndex() {
968 return targetCset;
969 }
970
971 public boolean isMerge() {
972 return mergeCset != NO_REVISION;
973 }
974
975 public int mergeChangesetIndex() {
976 return mergeCset;
977 }
978
979 public int fileRevisionIndex() {
980 return fileRevIndex;
981 }
982 @Override
983 public String toString() {
984 if (isMerge()) {
985 return String.format("[%d,%d->%d]", originCset, mergeCset, targetCset);
986 }
987 return String.format("[%d->%d]", originCset, targetCset);
988 }
989 }
990
991 public static void main(String[] args) {
992 EqualBlocksCollector bc = new EqualBlocksCollector();
993 bc.match(-1, 5, 3);
994 bc.match(-1, 10, 2);
995 bc.match(-1, 15, 3);
996 bc.match(-1, 20, 3);
997 IntVector r = new IntVector();
998 bc.intersectWithTarget(7, 10, r);
999 for (int i = 0; i < r.size(); i+=2) {
1000 System.out.printf("[%d..%d) ", r.get(i), r.get(i) + r.get(i+1));
1001 }
1002 System.out.println();
1003 r.clear();
1004 bc.combineAndMarkRangesWithTarget(0, 16, 508, 514, r);
1005 for (int i = 0; i < r.size(); i+=3) {
1006 System.out.printf("%d:[%d..%d) ", r.get(i), r.get(i+1), r.get(i+1) + r.get(i+2));
1007 } 450 }
1008 } 451 }
1009 } 452 }