comparison src/org/tmatesoft/hg/core/HgLogCommand.java @ 328:a674b8590362

Move file tree history to upper API level
author Artem Tikhomirov <tikhomirov.artem@gmail.com>
date Wed, 05 Oct 2011 07:13:57 +0200
parents d68dcb3b5f49
children 5f9073eabf06
comparison
equal deleted inserted replaced
327:3f09b8c19142 328:a674b8590362
16 */ 16 */
17 package org.tmatesoft.hg.core; 17 package org.tmatesoft.hg.core;
18 18
19 import static org.tmatesoft.hg.repo.HgRepository.TIP; 19 import static org.tmatesoft.hg.repo.HgRepository.TIP;
20 20
21 import java.util.ArrayList;
22 import java.util.Arrays;
21 import java.util.Calendar; 23 import java.util.Calendar;
24 import java.util.Collection;
22 import java.util.Collections; 25 import java.util.Collections;
23 import java.util.ConcurrentModificationException; 26 import java.util.ConcurrentModificationException;
24 import java.util.LinkedList; 27 import java.util.LinkedList;
25 import java.util.List; 28 import java.util.List;
26 import java.util.Set; 29 import java.util.Set;
27 import java.util.TreeSet; 30 import java.util.TreeSet;
28 31
32 import org.tmatesoft.hg.internal.IntMap;
33 import org.tmatesoft.hg.internal.IntVector;
29 import org.tmatesoft.hg.repo.HgChangelog; 34 import org.tmatesoft.hg.repo.HgChangelog;
30 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; 35 import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
31 import org.tmatesoft.hg.repo.HgDataFile; 36 import org.tmatesoft.hg.repo.HgDataFile;
32 import org.tmatesoft.hg.repo.HgRepository; 37 import org.tmatesoft.hg.repo.HgRepository;
38 import org.tmatesoft.hg.repo.HgStatusCollector;
39 import org.tmatesoft.hg.util.CancelSupport;
33 import org.tmatesoft.hg.util.CancelledException; 40 import org.tmatesoft.hg.util.CancelledException;
41 import org.tmatesoft.hg.util.Pair;
34 import org.tmatesoft.hg.util.Path; 42 import org.tmatesoft.hg.util.Path;
35 import org.tmatesoft.hg.util.ProgressSupport; 43 import org.tmatesoft.hg.util.ProgressSupport;
36 44
37 45
38 /** 46 /**
203 throw new ConcurrentModificationException(); 211 throw new ConcurrentModificationException();
204 } 212 }
205 final ProgressSupport progressHelper = getProgressSupport(handler); 213 final ProgressSupport progressHelper = getProgressSupport(handler);
206 try { 214 try {
207 count = 0; 215 count = 0;
208 HgChangelog.ParentWalker pw = parentHelper; // leave it uninitialized unless we iterate whole repo 216 HgChangelog.ParentWalker pw = getParentHelper(file == null); // leave it uninitialized unless we iterate whole repo
209 if (file == null) {
210 pw = getParentHelper();
211 }
212 // ChangesetTransfrom creates a blank PathPool, and #file(String, boolean) above 217 // ChangesetTransfrom creates a blank PathPool, and #file(String, boolean) above
213 // may utilize it as well. CommandContext? How about StatusCollector there as well? 218 // may utilize it as well. CommandContext? How about StatusCollector there as well?
214 csetTransform = new ChangesetTransformer(repo, handler, pw, progressHelper, getCancelSupport(handler, true)); 219 csetTransform = new ChangesetTransformer(repo, handler, pw, progressHelper, getCancelSupport(handler, true));
215 if (file == null) { 220 if (file == null) {
216 progressHelper.start(endRev - startRev + 1); 221 progressHelper.start(endRev - startRev + 1);
248 } finally { 253 } finally {
249 csetTransform = null; 254 csetTransform = null;
250 progressHelper.done(); 255 progressHelper.done();
251 } 256 }
252 } 257 }
253 258
259 public void execute(HgChangesetTreeHandler handler) throws CancelledException {
260 if (handler == null) {
261 throw new IllegalArgumentException();
262 }
263 if (csetTransform != null) {
264 throw new ConcurrentModificationException();
265 }
266 if (file == null) {
267 throw new IllegalArgumentException("History tree is supported for files only (at least now), please specify file");
268 }
269 if (followHistory) {
270 throw new UnsupportedOperationException("Can't follow file history when building tree (yet?)");
271 }
272 class TreeBuildInspector implements HgChangelog.ParentInspector, HgChangelog.RevisionInspector {
273 HistoryNode[] completeHistory;
274 int[] commitRevisions;
275
276 public void next(int revisionNumber, Nodeid revision, int linkedRevision) {
277 commitRevisions[revisionNumber] = linkedRevision;
278 }
279
280 public void next(int revisionNumber, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2) {
281 HistoryNode p1 = null, p2 = null;
282 if (parent1 != -1) {
283 p1 = completeHistory[parent1];
284 }
285 if (parent2!= -1) {
286 p2 = completeHistory[parent2];
287 }
288 completeHistory[revisionNumber] = new HistoryNode(commitRevisions[revisionNumber], revision, p1, p2);
289 }
290
291 HistoryNode[] go(HgDataFile fileNode) {
292 completeHistory = new HistoryNode[fileNode.getRevisionCount()];
293 commitRevisions = new int[completeHistory.length];
294 fileNode.walk(0, TIP, this);
295 return completeHistory;
296 }
297 };
298 final ProgressSupport progressHelper = getProgressSupport(handler);
299 progressHelper.start(4);
300 final CancelSupport cancelHelper = getCancelSupport(handler, true);
301 cancelHelper.checkCancelled();
302 HgDataFile fileNode = repo.getFileNode(file);
303 // build tree of nodes according to parents in file's revlog
304 final TreeBuildInspector treeBuildInspector = new TreeBuildInspector();
305 final HistoryNode[] completeHistory = treeBuildInspector.go(fileNode);
306 progressHelper.worked(1);
307 cancelHelper.checkCancelled();
308 ElementImpl ei = new ElementImpl(treeBuildInspector.commitRevisions.length);
309 final ProgressSupport ph2;
310 if (treeBuildInspector.commitRevisions.length < 100 /*XXX is it really worth it? */) {
311 ei.initTransform();
312 repo.getChangelog().range(ei, treeBuildInspector.commitRevisions);
313 progressHelper.worked(1);
314 ph2 = new ProgressSupport.Sub(progressHelper, 2);
315 } else {
316 ph2 = new ProgressSupport.Sub(progressHelper, 3);
317 }
318 ph2.start(completeHistory.length);
319 // XXX shall sort completeHistory according to changeset numbers?
320 for (int i = 0; i < completeHistory.length; i++ ) {
321 final HistoryNode n = completeHistory[i];
322 handler.next(ei.init(n));
323 ph2.worked(1);
324 cancelHelper.checkCancelled();
325 }
326 progressHelper.done();
327 }
328
254 // 329 //
255 330
256 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { 331 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
257 if (limit > 0 && count >= limit) { 332 if (limit > 0 && count >= limit) {
258 return; 333 return;
278 } 353 }
279 count++; 354 count++;
280 csetTransform.next(revisionNumber, nodeid, cset); 355 csetTransform.next(revisionNumber, nodeid, cset);
281 } 356 }
282 357
283 private HgChangelog.ParentWalker getParentHelper() { 358 private HgChangelog.ParentWalker getParentHelper(boolean create) {
284 if (parentHelper == null) { 359 if (parentHelper == null && create) {
285 parentHelper = repo.getChangelog().new ParentWalker(); 360 parentHelper = repo.getChangelog().new ParentWalker();
286 parentHelper.init(); 361 parentHelper.init();
287 } 362 }
288 return parentHelper; 363 return parentHelper;
289 } 364 }
315 390
316 public void next(HgChangeset changeset) { 391 public void next(HgChangeset changeset) {
317 result.add(changeset.clone()); 392 result.add(changeset.clone());
318 } 393 }
319 } 394 }
395
396 private static class HistoryNode {
397 final int changeset;
398 final Nodeid fileRevision;
399 final HistoryNode parent1, parent2;
400 List<HistoryNode> children;
401
402 HistoryNode(int cs, Nodeid revision, HistoryNode p1, HistoryNode p2) {
403 changeset = cs;
404 fileRevision = revision;
405 parent1 = p1;
406 parent2 = p2;
407 if (p1 != null) {
408 p1.addChild(this);
409 }
410 if (p2 != null) {
411 p2.addChild(this);
412 }
413 }
414
415 void addChild(HistoryNode child) {
416 if (children == null) {
417 children = new ArrayList<HistoryNode>(2);
418 }
419 children.add(child);
420 }
421 }
422
423 private class ElementImpl implements HgChangesetTreeHandler.TreeElement, HgChangelog.Inspector {
424 private HistoryNode historyNode;
425 private Pair<HgChangeset, HgChangeset> parents;
426 private List<HgChangeset> children;
427 private IntMap<HgChangeset> cachedChangesets;
428 private ChangesetTransformer.Transformation transform;
429 private Nodeid changesetRevision;
430 private Pair<Nodeid,Nodeid> parentRevisions;
431 private List<Nodeid> childRevisions;
432
433 public ElementImpl(int total) {
434 cachedChangesets = new IntMap<HgChangeset>(total);
435 }
436
437 ElementImpl init(HistoryNode n) {
438 historyNode = n;
439 parents = null;
440 children = null;
441 changesetRevision = null;
442 parentRevisions = null;
443 childRevisions = null;
444 return this;
445 }
446
447 public Nodeid fileRevision() {
448 return historyNode.fileRevision;
449 }
450
451 public HgChangeset changeset() {
452 return get(historyNode.changeset)[0];
453 }
454
455 public Pair<HgChangeset, HgChangeset> parents() {
456 if (parents != null) {
457 return parents;
458 }
459 HistoryNode p;
460 final int p1, p2;
461 if ((p = historyNode.parent1) != null) {
462 p1 = p.changeset;
463 } else {
464 p1 = -1;
465 }
466 if ((p = historyNode.parent2) != null) {
467 p2 = p.changeset;
468 } else {
469 p2 = -1;
470 }
471 HgChangeset[] r = get(p1, p2);
472 return parents = new Pair<HgChangeset, HgChangeset>(r[0], r[1]);
473 }
474
475 public Collection<HgChangeset> children() {
476 if (children != null) {
477 return children;
478 }
479 if (historyNode.children == null) {
480 children = Collections.emptyList();
481 } else {
482 int[] childrentChangesetNumbers = new int[historyNode.children.size()];
483 int j = 0;
484 for (HistoryNode hn : historyNode.children) {
485 childrentChangesetNumbers[j++] = hn.changeset;
486 }
487 children = Arrays.asList(get(childrentChangesetNumbers));
488 }
489 return children;
490 }
491
492 void populate(HgChangeset cs) {
493 cachedChangesets.put(cs.getRevision(), cs);
494 }
495
496 private HgChangeset[] get(int... changelogRevisionNumber) {
497 HgChangeset[] rv = new HgChangeset[changelogRevisionNumber.length];
498 IntVector misses = new IntVector(changelogRevisionNumber.length, -1);
499 for (int i = 0; i < changelogRevisionNumber.length; i++) {
500 if (changelogRevisionNumber[i] == -1) {
501 rv[i] = null;
502 continue;
503 }
504 HgChangeset cached = cachedChangesets.get(changelogRevisionNumber[i]);
505 if (cached != null) {
506 rv[i] = cached;
507 } else {
508 misses.add(changelogRevisionNumber[i]);
509 }
510 }
511 if (misses.size() > 0) {
512 final int[] changesets2read = misses.toArray();
513 initTransform();
514 repo.getChangelog().range(this, changesets2read);
515 for (int changeset2read : changesets2read) {
516 HgChangeset cs = cachedChangesets.get(changeset2read);
517 if (cs == null) {
518 throw new HgBadStateException();
519 }
520 // HgChangelog.range may reorder changesets according to their order in the changelog
521 // thus need to find original index
522 boolean sanity = false;
523 for (int i = 0; i < changelogRevisionNumber.length; i++) {
524 if (changelogRevisionNumber[i] == cs.getRevision()) {
525 rv[i] = cs;
526 sanity = true;
527 break;
528 }
529 }
530 assert sanity;
531 }
532 }
533 return rv;
534 }
535
536 // init only when needed
537 void initTransform() {
538 if (transform == null) {
539 transform = new ChangesetTransformer.Transformation(new HgStatusCollector(repo)/*XXX try to reuse from context?*/, getParentHelper(false));
540 }
541 }
542
543 public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
544 HgChangeset cs = transform.handle(revisionNumber, nodeid, cset);
545 populate(cs.clone());
546 }
547
548 public Nodeid changesetRevision() {
549 if (changesetRevision == null) {
550 changesetRevision = getRevision(historyNode.changeset);
551 }
552 return changesetRevision;
553 }
554
555 public Pair<Nodeid, Nodeid> parentRevisions() {
556 if (parentRevisions == null) {
557 HistoryNode p;
558 final Nodeid p1, p2;
559 if ((p = historyNode.parent1) != null) {
560 p1 = getRevision(p.changeset);
561 } else {
562 p1 = Nodeid.NULL;;
563 }
564 if ((p = historyNode.parent2) != null) {
565 p2 = getRevision(p.changeset);
566 } else {
567 p2 = Nodeid.NULL;
568 }
569 parentRevisions = new Pair<Nodeid, Nodeid>(p1, p2);
570 }
571 return parentRevisions;
572 }
573
574 public Collection<Nodeid> childRevisions() {
575 if (childRevisions != null) {
576 return childRevisions;
577 }
578 if (historyNode.children == null) {
579 childRevisions = Collections.emptyList();
580 } else {
581 ArrayList<Nodeid> rv = new ArrayList<Nodeid>(historyNode.children.size());
582 for (HistoryNode hn : historyNode.children) {
583 rv.add(getRevision(hn.changeset));
584 }
585 childRevisions = Collections.unmodifiableList(rv);
586 }
587 return childRevisions;
588 }
589
590 // reading nodeid involves reading index only, guess, can afford not to optimize multiple reads
591 private Nodeid getRevision(int changelogRevisionNumber) {
592 // XXX pipe through pool
593 HgChangeset cs = cachedChangesets.get(changelogRevisionNumber);
594 if (cs != null) {
595 return cs.getNodeid();
596 } else {
597 return repo.getChangelog().getRevision(changelogRevisionNumber);
598 }
599 }
600 }
320 } 601 }