tikhomirov@64: /*
tikhomirov@565: s * Copyright (c) 2011-2013 TMate Software Ltd
tikhomirov@64: *
tikhomirov@64: * This program is free software; you can redistribute it and/or modify
tikhomirov@64: * it under the terms of the GNU General Public License as published by
tikhomirov@64: * the Free Software Foundation; version 2 of the License.
tikhomirov@64: *
tikhomirov@64: * This program is distributed in the hope that it will be useful,
tikhomirov@64: * but WITHOUT ANY WARRANTY; without even the implied warranty of
tikhomirov@64: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
tikhomirov@64: * GNU General Public License for more details.
tikhomirov@64: *
tikhomirov@64: * For information on how to redistribute this software under
tikhomirov@64: * the terms of a license other than GNU General Public License
tikhomirov@102: * contact TMate Software at support@hg4j.com
tikhomirov@64: */
tikhomirov@64: package org.tmatesoft.hg.core;
tikhomirov@64:
tikhomirov@518: import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION;
tikhomirov@74: import static org.tmatesoft.hg.repo.HgRepository.TIP;
tikhomirov@456: import static org.tmatesoft.hg.util.LogFacility.Severity.Error;
tikhomirov@64:
tikhomirov@328: import java.util.ArrayList;
tikhomirov@328: import java.util.Arrays;
tikhomirov@64: import java.util.Calendar;
tikhomirov@328: import java.util.Collection;
tikhomirov@64: import java.util.Collections;
tikhomirov@511: import java.util.Comparator;
tikhomirov@64: import java.util.ConcurrentModificationException;
tikhomirov@510: import java.util.Iterator;
tikhomirov@64: import java.util.LinkedList;
tikhomirov@64: import java.util.List;
tikhomirov@64: import java.util.Set;
tikhomirov@64: import java.util.TreeSet;
tikhomirov@64:
tikhomirov@520: import org.tmatesoft.hg.internal.AdapterPlug;
tikhomirov@520: import org.tmatesoft.hg.internal.BatchRangeHelper;
tikhomirov@565: import org.tmatesoft.hg.internal.CsetParamKeeper;
tikhomirov@692: import org.tmatesoft.hg.internal.FileRenameHistory;
tikhomirov@692: import org.tmatesoft.hg.internal.FileRenameHistory.Chunk;
tikhomirov@328: import org.tmatesoft.hg.internal.IntMap;
tikhomirov@328: import org.tmatesoft.hg.internal.IntVector;
tikhomirov@526: import org.tmatesoft.hg.internal.Internals;
tikhomirov@518: import org.tmatesoft.hg.internal.Lifecycle;
tikhomirov@520: import org.tmatesoft.hg.internal.LifecycleProxy;
tikhomirov@596: import org.tmatesoft.hg.internal.ReverseIterator;
tikhomirov@215: import org.tmatesoft.hg.repo.HgChangelog;
tikhomirov@154: import org.tmatesoft.hg.repo.HgChangelog.RawChangeset;
tikhomirov@80: import org.tmatesoft.hg.repo.HgDataFile;
tikhomirov@423: import org.tmatesoft.hg.repo.HgInvalidStateException;
tikhomirov@456: import org.tmatesoft.hg.repo.HgParentChildMap;
tikhomirov@74: import org.tmatesoft.hg.repo.HgRepository;
tikhomirov@423: import org.tmatesoft.hg.repo.HgRuntimeException;
tikhomirov@328: import org.tmatesoft.hg.repo.HgStatusCollector;
tikhomirov@514: import org.tmatesoft.hg.util.Adaptable;
tikhomirov@328: import org.tmatesoft.hg.util.CancelSupport;
tikhomirov@157: import org.tmatesoft.hg.util.CancelledException;
tikhomirov@328: import org.tmatesoft.hg.util.Pair;
tikhomirov@133: import org.tmatesoft.hg.util.Path;
tikhomirov@215: import org.tmatesoft.hg.util.ProgressSupport;
tikhomirov@64:
tikhomirov@64:
tikhomirov@64: /**
tikhomirov@131: * Access to changelog, 'hg log' command counterpart.
tikhomirov@131: *
tikhomirov@64: *
tikhomirov@131: * Usage:
tikhomirov@70: * new LogCommand().limit(20).branch("maintenance-2.1").user("me").execute(new MyHandler());
tikhomirov@64: *
tikhomirov@131: * Not thread-safe (each thread has to use own {@link HgLogCommand} instance).
tikhomirov@64: *
tikhomirov@64: * @author Artem Tikhomirov
tikhomirov@64: * @author TMate Software Ltd.
tikhomirov@64: */
tikhomirov@518: public class HgLogCommand extends HgAbstractCommand {
tikhomirov@64:
tikhomirov@64: private final HgRepository repo;
tikhomirov@64: private Set users;
tikhomirov@64: private Set branches;
tikhomirov@64: private int limit = 0, count = 0;
tikhomirov@64: private int startRev = 0, endRev = TIP;
tikhomirov@64: private Calendar date;
tikhomirov@77: private Path file;
tikhomirov@514: /*
tikhomirov@514: * Whether to iterate file origins, if any.
tikhomirov@514: * Makes sense only when file != null
tikhomirov@514: */
tikhomirov@514: private boolean followRenames;
tikhomirov@514: /*
tikhomirov@514: * Whether to track history of the selected file version (based on file revision
tikhomirov@514: * in working dir parent), follow ancestors only.
tikhomirov@514: * Note, 'hg log --follow' combines both #followHistory and #followAncestry
tikhomirov@514: */
tikhomirov@514: private boolean followAncestry;
tikhomirov@522:
tikhomirov@522: private HgIterateDirection iterateDirection = HgIterateDirection.OldToNew;
tikhomirov@522:
tikhomirov@193: private ChangesetTransformer csetTransform;
tikhomirov@432: private HgParentChildMap parentHelper;
tikhomirov@80:
tikhomirov@131: public HgLogCommand(HgRepository hgRepo) {
tikhomirov@107: repo = hgRepo;
tikhomirov@64: }
tikhomirov@64:
tikhomirov@64: /**
tikhomirov@148: * Limit search to specified user. Multiple user names may be specified. Once set, user names can't be
tikhomirov@148: * cleared, use new command instance in such cases.
tikhomirov@64: * @param user - full or partial name of the user, case-insensitive, non-null.
tikhomirov@64: * @return this
instance for convenience
tikhomirov@148: * @throws IllegalArgumentException when argument is null
tikhomirov@64: */
tikhomirov@131: public HgLogCommand user(String user) {
tikhomirov@64: if (user == null) {
tikhomirov@64: throw new IllegalArgumentException();
tikhomirov@64: }
tikhomirov@64: if (users == null) {
tikhomirov@64: users = new TreeSet();
tikhomirov@64: }
tikhomirov@64: users.add(user.toLowerCase());
tikhomirov@64: return this;
tikhomirov@64: }
tikhomirov@64:
tikhomirov@64: /**
tikhomirov@64: * Limit search to specified branch. Multiple branch specification possible (changeset from any of these
tikhomirov@148: * would be included in result). If unspecified, all branches are considered. There's no way to clean branch selection
tikhomirov@148: * once set, create fresh new command instead.
tikhomirov@64: * @param branch - branch name, case-sensitive, non-null.
tikhomirov@64: * @return this
instance for convenience
tikhomirov@148: * @throws IllegalArgumentException when branch argument is null
tikhomirov@64: */
tikhomirov@131: public HgLogCommand branch(String branch) {
tikhomirov@64: if (branch == null) {
tikhomirov@64: throw new IllegalArgumentException();
tikhomirov@64: }
tikhomirov@64: if (branches == null) {
tikhomirov@64: branches = new TreeSet();
tikhomirov@64: }
tikhomirov@64: branches.add(branch);
tikhomirov@64: return this;
tikhomirov@64: }
tikhomirov@64:
tikhomirov@64: // limit search to specific date
tikhomirov@64: // multiple?
tikhomirov@131: public HgLogCommand date(Calendar date) {
tikhomirov@64: this.date = date;
tikhomirov@418: // TODO post-1.0 implement
tikhomirov@64: // isSet(field) - false => don't use in detection of 'same date'
tikhomirov@526: throw Internals.notImplemented();
tikhomirov@64: }
tikhomirov@64:
tikhomirov@64: /**
tikhomirov@64: *
tikhomirov@64: * @param num - number of changeset to produce. Pass 0 to clear the limit.
tikhomirov@64: * @return this
instance for convenience
tikhomirov@64: */
tikhomirov@131: public HgLogCommand limit(int num) {
tikhomirov@64: limit = num;
tikhomirov@64: return this;
tikhomirov@64: }
tikhomirov@64:
tikhomirov@64: /**
tikhomirov@64: * Limit to specified subset of Changelog, [min(rev1,rev2), max(rev1,rev2)], inclusive.
tikhomirov@64: * Revision may be specified with {@link HgRepository#TIP}
tikhomirov@427: *
tikhomirov@427: * @param rev1 - local index of start changeset revision
tikhomirov@427: * @param rev2 - index of end changeset revision
tikhomirov@64: * @return this
instance for convenience
tikhomirov@64: */
tikhomirov@131: public HgLogCommand range(int rev1, int rev2) {
tikhomirov@64: if (rev1 != TIP && rev2 != TIP) {
tikhomirov@64: startRev = rev2 < rev1 ? rev2 : rev1;
tikhomirov@64: endRev = startRev == rev2 ? rev1 : rev2;
tikhomirov@64: } else if (rev1 == TIP && rev2 != TIP) {
tikhomirov@64: startRev = rev2;
tikhomirov@64: endRev = rev1;
tikhomirov@64: } else {
tikhomirov@64: startRev = rev1;
tikhomirov@64: endRev = rev2;
tikhomirov@64: }
tikhomirov@565: // TODO [2.0 API break] shall throw HgBadArgumentException, like other commands do
tikhomirov@64: return this;
tikhomirov@64: }
tikhomirov@64:
tikhomirov@77: /**
tikhomirov@683: * Limit history to specified range.
tikhomirov@683: *
tikhomirov@683: * @see #range(int, int)
tikhomirov@683: * @param cset1 range start revision
tikhomirov@683: * @param cset2 range end revision
tikhomirov@683: * @return this
instance for convenience
tikhomirov@683: * @throws HgBadArgumentException if revisions are not valid changeset identifiers
tikhomirov@683: */
tikhomirov@683: public HgLogCommand range(Nodeid cset1, Nodeid cset2) throws HgBadArgumentException {
tikhomirov@683: CsetParamKeeper pk = new CsetParamKeeper(repo);
tikhomirov@683: int r1 = pk.set(cset1).get();
tikhomirov@683: int r2 = pk.set(cset2).get();
tikhomirov@683: return range(r1, r2);
tikhomirov@683: }
tikhomirov@683:
tikhomirov@683: /**
tikhomirov@683: * Select specific changeset by index
tikhomirov@683: * @see #changeset(Nodeid)
tikhomirov@683: * @param revisionIndex index of changelog revision
tikhomirov@683: * @return this
for convenience
tikhomirov@683: * @throws HgBadArgumentException if failed to find supplied changeset revision
tikhomirov@683: */
tikhomirov@683: public HgLogCommand changeset(int revisionIndex) throws HgBadArgumentException {
tikhomirov@683: int ri = new CsetParamKeeper(repo).set(revisionIndex).get();
tikhomirov@683: return range(ri, ri);
tikhomirov@683: }
tikhomirov@683:
tikhomirov@683: /**
tikhomirov@253: * Select specific changeset
tikhomirov@253: *
tikhomirov@253: * @param nid changeset revision
tikhomirov@253: * @return this
for convenience
tikhomirov@427: * @throws HgBadArgumentException if failed to find supplied changeset revision
tikhomirov@253: */
tikhomirov@427: public HgLogCommand changeset(Nodeid nid) throws HgBadArgumentException {
tikhomirov@253: // XXX perhaps, shall support multiple (...) arguments and extend #execute to handle not only range, but also set of revisions.
tikhomirov@565: final int csetRevIndex = new CsetParamKeeper(repo).set(nid).get();
tikhomirov@565: return range(csetRevIndex, csetRevIndex);
tikhomirov@253: }
tikhomirov@253:
tikhomirov@253: /**
tikhomirov@516: * Visit history of a given file only. Note, unlike native hg log
command argument --follow
, this method doesn't
tikhomirov@516: * follow file ancestry, but reports complete file history (with followCopyRenames == true
, for each
tikhomirov@516: * name of the file known in sequence). To achieve output similar to that of hg log --follow filePath
, use
tikhomirov@516: * {@link #file(Path, boolean, boolean) file(filePath, true, true)} alternative.
tikhomirov@516: *
tikhomirov@516: * @param filePath path relative to repository root. Pass null
to reset.
tikhomirov@516: * @param followCopyRename true to report changesets of the original file(-s), if copy/rename ever occured to the file.
tikhomirov@516: * @return this
for convenience
tikhomirov@77: */
tikhomirov@516: public HgLogCommand file(Path filePath, boolean followCopyRename) {
tikhomirov@516: return file(filePath, followCopyRename, false);
tikhomirov@516: }
tikhomirov@516:
tikhomirov@516: /**
tikhomirov@516: * Full control over file history iteration.
tikhomirov@516: *
tikhomirov@516: * @param filePath path relative to repository root. Pass null
to reset.
tikhomirov@516: * @param followCopyRename true to report changesets of the original file(-s), if copy/rename ever occured to the file.
tikhomirov@516: * @param followFileAncestry true to follow file history starting from revision at working copy parent. Note, only revisions
tikhomirov@516: * accessible (i.e. on direct parent line) from the selected one will be reported. This is how hg log --follow filePath
tikhomirov@516: * behaves, with the difference that this method allows separate control whether to follow renames or not.
tikhomirov@516: *
tikhomirov@516: * @return this
for convenience
tikhomirov@516: */
tikhomirov@516: public HgLogCommand file(Path filePath, boolean followCopyRename, boolean followFileAncestry) {
tikhomirov@516: file = filePath;
tikhomirov@516: followRenames = followCopyRename;
tikhomirov@516: followAncestry = followFileAncestry;
tikhomirov@77: return this;
tikhomirov@64: }
tikhomirov@142:
tikhomirov@142: /**
tikhomirov@516: * Handy analog to {@link #file(Path, boolean)} when clients' paths come from filesystem and need conversion to repository's
tikhomirov@516: * @return this
for convenience
tikhomirov@142: */
tikhomirov@142: public HgLogCommand file(String file, boolean followCopyRename) {
tikhomirov@571: Path.Source ps = repo.getSessionContext().getPathFactory();
tikhomirov@571: return file(ps.path(repo.getToRepoPathHelper().rewrite(file)), followCopyRename);
tikhomirov@142: }
tikhomirov@64:
tikhomirov@64: /**
tikhomirov@516: * Handy analog to {@link #file(Path, boolean, boolean)} when clients' paths come from filesystem and need conversion to repository's
tikhomirov@516: * @return this
for convenience
tikhomirov@516: */
tikhomirov@516: public HgLogCommand file(String file, boolean followCopyRename, boolean followFileAncestry) {
tikhomirov@571: Path.Source ps = repo.getSessionContext().getPathFactory();
tikhomirov@571: return file(ps.path(repo.getToRepoPathHelper().rewrite(file)), followCopyRename, followFileAncestry);
tikhomirov@516: }
tikhomirov@522:
tikhomirov@522: /**
tikhomirov@522: * Specifies order for changesets reported through #execute(...) methods.
tikhomirov@522: * By default, command reports changeset in their natural repository order, older first,
tikhomirov@522: * newer last (i.e. {@link HgIterateDirection#OldToNew}
tikhomirov@522: *
tikhomirov@522: * @param order {@link HgIterateDirection#NewToOld} to get newer revisions first
tikhomirov@522: * @return this
for convenience
tikhomirov@522: */
tikhomirov@522: public HgLogCommand order(HgIterateDirection order) {
tikhomirov@522: iterateDirection = order;
tikhomirov@522: return this;
tikhomirov@522: }
tikhomirov@516:
tikhomirov@516: /**
tikhomirov@419: * Similar to {@link #execute(HgChangesetHandler)}, collects and return result as a list.
tikhomirov@427: *
tikhomirov@427: * @see #execute(HgChangesetHandler)
tikhomirov@427: * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
tikhomirov@64: */
tikhomirov@396: public List execute() throws HgException {
tikhomirov@64: CollectHandler collector = new CollectHandler();
tikhomirov@215: try {
tikhomirov@215: execute(collector);
tikhomirov@423: } catch (HgCallbackTargetException ex) {
tikhomirov@423: // see below for CanceledException
tikhomirov@423: HgInvalidStateException t = new HgInvalidStateException("Internal error");
tikhomirov@423: t.initCause(ex);
tikhomirov@423: throw t;
tikhomirov@396: } catch (CancelledException ex) {
tikhomirov@215: // can't happen as long as our CollectHandler doesn't throw any exception
tikhomirov@423: HgInvalidStateException t = new HgInvalidStateException("Internal error");
tikhomirov@423: t.initCause(ex);
tikhomirov@423: throw t;
tikhomirov@215: }
tikhomirov@64: return collector.getChanges();
tikhomirov@64: }
tikhomirov@64:
tikhomirov@64: /**
tikhomirov@402: * Iterate over range of changesets configured in the command.
tikhomirov@64: *
tikhomirov@205: * @param handler callback to process changesets.
tikhomirov@427: * @throws HgCallbackTargetException propagated exception from the handler
tikhomirov@427: * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
tikhomirov@380: * @throws CancelledException if execution of the command was cancelled
tikhomirov@64: * @throws IllegalArgumentException when inspector argument is null
tikhomirov@64: * @throws ConcurrentModificationException if this log command instance is already running
tikhomirov@64: */
tikhomirov@370: public void execute(HgChangesetHandler handler) throws HgCallbackTargetException, HgException, CancelledException {
tikhomirov@64: if (handler == null) {
tikhomirov@64: throw new IllegalArgumentException();
tikhomirov@64: }
tikhomirov@193: if (csetTransform != null) {
tikhomirov@64: throw new ConcurrentModificationException();
tikhomirov@64: }
tikhomirov@215: final ProgressSupport progressHelper = getProgressSupport(handler);
tikhomirov@64: try {
tikhomirov@628: if (repo.getChangelog().getRevisionCount() == 0) {
tikhomirov@628: return;
tikhomirov@628: }
tikhomirov@692: final int firstCset = startRev;
tikhomirov@628: final int lastCset = endRev == TIP ? repo.getChangelog().getLastRevision() : endRev;
tikhomirov@628: // XXX pretty much like HgInternals.checkRevlogRange
tikhomirov@628: if (lastCset < 0 || lastCset > repo.getChangelog().getLastRevision()) {
tikhomirov@628: throw new HgBadArgumentException(String.format("Bad value %d for end revision", lastCset), null);
tikhomirov@628: }
tikhomirov@692: if (firstCset < 0 || firstCset > lastCset) {
tikhomirov@692: throw new HgBadArgumentException(String.format("Bad value %d for start revision for range [%1$d..%d]", firstCset, lastCset), null);
tikhomirov@628: }
tikhomirov@628: final int BATCH_SIZE = 100;
tikhomirov@64: count = 0;
tikhomirov@432: HgParentChildMap pw = getParentHelper(file == null); // leave it uninitialized unless we iterate whole repo
tikhomirov@193: // ChangesetTransfrom creates a blank PathPool, and #file(String, boolean) above
tikhomirov@193: // may utilize it as well. CommandContext? How about StatusCollector there as well?
tikhomirov@322: csetTransform = new ChangesetTransformer(repo, handler, pw, progressHelper, getCancelSupport(handler, true));
tikhomirov@520: // FilteringInspector is responsible to check command arguments: users, branches, limit, etc.
tikhomirov@520: // prior to passing cset to next Inspector, which is either (a) collector to reverse cset order, then invokes
tikhomirov@520: // transformer from (b), below, with alternative cset order or (b) transformer to hi-level csets.
tikhomirov@518: FilteringInspector filterInsp = new FilteringInspector();
tikhomirov@692: filterInsp.changesets(firstCset, lastCset);
tikhomirov@77: if (file == null) {
tikhomirov@692: progressHelper.start(lastCset - firstCset + 1);
tikhomirov@522: if (iterateDirection == HgIterateDirection.OldToNew) {
tikhomirov@520: filterInsp.delegateTo(csetTransform);
tikhomirov@692: repo.getChangelog().range(firstCset, lastCset, filterInsp);
tikhomirov@520: csetTransform.checkFailure();
tikhomirov@520: } else {
tikhomirov@522: assert iterateDirection == HgIterateDirection.NewToOld;
tikhomirov@692: BatchRangeHelper brh = new BatchRangeHelper(firstCset, lastCset, BATCH_SIZE, true);
tikhomirov@692: BatchChangesetInspector batchInspector = new BatchChangesetInspector(Math.min(lastCset-firstCset+1, BATCH_SIZE));
tikhomirov@520: filterInsp.delegateTo(batchInspector);
tikhomirov@692: // XXX this batching code is bit verbose, refactor
tikhomirov@520: while (brh.hasNext()) {
tikhomirov@520: brh.next();
tikhomirov@520: repo.getChangelog().range(brh.start(), brh.end(), filterInsp);
tikhomirov@520: for (BatchChangesetInspector.BatchRecord br : batchInspector.iterate(true)) {
tikhomirov@520: csetTransform.next(br.csetIndex, br.csetRevision, br.cset);
tikhomirov@520: csetTransform.checkFailure();
tikhomirov@520: }
tikhomirov@520: batchInspector.reset();
tikhomirov@520: }
tikhomirov@520: }
tikhomirov@77: } else {
tikhomirov@520: filterInsp.delegateTo(csetTransform);
tikhomirov@514: final HgFileRenameHandlerMixin withCopyHandler = Adaptable.Factory.getAdapter(handler, HgFileRenameHandlerMixin.class, null);
tikhomirov@528: FileRenameQueueBuilder frqBuilder = new FileRenameQueueBuilder();
tikhomirov@692: List fileRenames = frqBuilder.buildFileRenamesQueue(firstCset, lastCset);
tikhomirov@528: progressHelper.start(fileRenames.size());
tikhomirov@518: for (int nameIndex = 0, fileRenamesSize = fileRenames.size(); nameIndex < fileRenamesSize; nameIndex++) {
tikhomirov@692: QueueElement curRename = fileRenames.get(nameIndex);
tikhomirov@692: HgDataFile fileNode = curRename.file();
tikhomirov@518: if (followAncestry) {
tikhomirov@518: TreeBuildInspector treeBuilder = new TreeBuildInspector(followAncestry);
tikhomirov@520: @SuppressWarnings("unused")
tikhomirov@692: List fileAncestry = treeBuilder.go(curRename);
tikhomirov@692: int[] commitRevisions = narrowChangesetRange(treeBuilder.getCommitRevisions(), firstCset, lastCset);
tikhomirov@522: if (iterateDirection == HgIterateDirection.OldToNew) {
tikhomirov@518: repo.getChangelog().range(filterInsp, commitRevisions);
tikhomirov@520: csetTransform.checkFailure();
tikhomirov@518: } else {
tikhomirov@522: assert iterateDirection == HgIterateDirection.NewToOld;
tikhomirov@518: // visit one by one in the opposite direction
tikhomirov@518: for (int i = commitRevisions.length-1; i >= 0; i--) {
tikhomirov@518: int csetWithFileChange = commitRevisions[i];
tikhomirov@518: repo.getChangelog().range(csetWithFileChange, csetWithFileChange, filterInsp);
tikhomirov@518: }
tikhomirov@126: }
tikhomirov@518: } else {
tikhomirov@518: // report complete file history (XXX may narrow range with [startRev, endRev], but need to go from file rev to link rev)
tikhomirov@692: int fileStartRev = curRename.fileFrom();
tikhomirov@692: int fileEndRev = curRename.file().getLastRevision(); //curRename.fileTo();
tikhomirov@522: if (iterateDirection == HgIterateDirection.OldToNew) {
tikhomirov@520: fileNode.history(fileStartRev, fileEndRev, filterInsp);
tikhomirov@520: csetTransform.checkFailure();
tikhomirov@520: } else {
tikhomirov@522: assert iterateDirection == HgIterateDirection.NewToOld;
tikhomirov@520: BatchRangeHelper brh = new BatchRangeHelper(fileStartRev, fileEndRev, BATCH_SIZE, true);
tikhomirov@520: BatchChangesetInspector batchInspector = new BatchChangesetInspector(Math.min(fileEndRev-fileStartRev+1, BATCH_SIZE));
tikhomirov@520: filterInsp.delegateTo(batchInspector);
tikhomirov@520: while (brh.hasNext()) {
tikhomirov@520: brh.next();
tikhomirov@520: fileNode.history(brh.start(), brh.end(), filterInsp);
tikhomirov@520: for (BatchChangesetInspector.BatchRecord br : batchInspector.iterate(true /*iterateDirection == IterateDirection.FromNewToOld*/)) {
tikhomirov@520: csetTransform.next(br.csetIndex, br.csetRevision, br.cset);
tikhomirov@520: csetTransform.checkFailure();
tikhomirov@520: }
tikhomirov@520: batchInspector.reset();
tikhomirov@520: }
tikhomirov@520: }
tikhomirov@518: }
tikhomirov@528: if (withCopyHandler != null && nameIndex + 1 < fileRenamesSize) {
tikhomirov@692: QueueElement nextRename = fileRenames.get(nameIndex+1);
tikhomirov@518: HgFileRevision src, dst;
tikhomirov@518: // A -> B
tikhomirov@522: if (iterateDirection == HgIterateDirection.OldToNew) {
tikhomirov@518: // curRename: A, nextRename: B
tikhomirov@692: src = curRename.last();
tikhomirov@692: dst = nextRename.first(src);
tikhomirov@518: } else {
tikhomirov@522: assert iterateDirection == HgIterateDirection.NewToOld;
tikhomirov@518: // curRename: B, nextRename: A
tikhomirov@692: src = nextRename.last();
tikhomirov@692: dst = curRename.first(src);
tikhomirov@80: }
tikhomirov@518: withCopyHandler.copy(src, dst);
tikhomirov@518: }
tikhomirov@528: progressHelper.worked(1);
tikhomirov@518: } // for renames
tikhomirov@528: frqBuilder.reportRenameIfNotInQueue(fileRenames, withCopyHandler);
tikhomirov@518: } // file != null
tikhomirov@427: } catch (HgRuntimeException ex) {
tikhomirov@427: throw new HgLibraryFailureException(ex);
tikhomirov@64: } finally {
tikhomirov@193: csetTransform = null;
tikhomirov@215: progressHelper.done();
tikhomirov@64: }
tikhomirov@64: }
tikhomirov@328:
tikhomirov@520: private static class BatchChangesetInspector extends AdapterPlug implements HgChangelog.Inspector {
tikhomirov@520: private static class BatchRecord {
tikhomirov@520: public final int csetIndex;
tikhomirov@520: public final Nodeid csetRevision;
tikhomirov@520: public final RawChangeset cset;
tikhomirov@520:
tikhomirov@520: public BatchRecord(int index, Nodeid nodeid, RawChangeset changeset) {
tikhomirov@520: csetIndex = index;
tikhomirov@520: csetRevision = nodeid;
tikhomirov@520: cset = changeset;
tikhomirov@520: }
tikhomirov@520: }
tikhomirov@520: private final ArrayList batch;
tikhomirov@520:
tikhomirov@520: public BatchChangesetInspector(int batchSizeHint) {
tikhomirov@520: batch = new ArrayList(batchSizeHint);
tikhomirov@520: }
tikhomirov@520:
tikhomirov@520: public BatchChangesetInspector reset() {
tikhomirov@520: batch.clear();
tikhomirov@520: return this;
tikhomirov@520: }
tikhomirov@520:
tikhomirov@520: public void next(int revisionIndex, Nodeid nodeid, RawChangeset cset) {
tikhomirov@520: batch.add(new BatchRecord(revisionIndex, nodeid, cset.clone()));
tikhomirov@520: }
tikhomirov@520:
tikhomirov@520: public Iterable iterate(final boolean reverse) {
tikhomirov@596: return reverse ? ReverseIterator.reversed(batch) : batch;
tikhomirov@520: }
tikhomirov@520:
tikhomirov@520: // alternative would be dispatch(HgChangelog.Inspector) and dispatchReverse()
tikhomirov@520: // methods, but progress and cancellation might get messy then
tikhomirov@520: }
tikhomirov@520:
tikhomirov@518: // public static void main(String[] args) {
tikhomirov@518: // int[] r = new int[] {17, 19, 21, 23, 25, 29};
tikhomirov@518: // System.out.println(Arrays.toString(narrowChangesetRange(r, 0, 45)));
tikhomirov@518: // System.out.println(Arrays.toString(narrowChangesetRange(r, 0, 25)));
tikhomirov@518: // System.out.println(Arrays.toString(narrowChangesetRange(r, 5, 26)));
tikhomirov@518: // System.out.println(Arrays.toString(narrowChangesetRange(r, 20, 26)));
tikhomirov@518: // System.out.println(Arrays.toString(narrowChangesetRange(r, 26, 28)));
tikhomirov@518: // }
tikhomirov@518:
tikhomirov@518: private static int[] narrowChangesetRange(int[] csetRange, int startCset, int endCset) {
tikhomirov@518: int lastInRange = csetRange[csetRange.length-1];
tikhomirov@518: assert csetRange.length < 2 || csetRange[0] < lastInRange; // sorted
tikhomirov@518: assert startCset >= 0 && startCset <= endCset;
tikhomirov@518: if (csetRange[0] >= startCset && lastInRange <= endCset) {
tikhomirov@518: // completely fits in
tikhomirov@518: return csetRange;
tikhomirov@518: }
tikhomirov@518: if (csetRange[0] > endCset || lastInRange < startCset) {
tikhomirov@518: return new int[0]; // trivial
tikhomirov@518: }
tikhomirov@518: int i = 0;
tikhomirov@518: while (i < csetRange.length && csetRange[i] < startCset) {
tikhomirov@518: i++;
tikhomirov@518: }
tikhomirov@518: int j = csetRange.length - 1;
tikhomirov@518: while (j > i && csetRange[j] > endCset) {
tikhomirov@518: j--;
tikhomirov@518: }
tikhomirov@518: if (i == j) {
tikhomirov@518: // no values in csetRange fit into [startCset, endCset]
tikhomirov@518: return new int[0];
tikhomirov@518: }
tikhomirov@518: int[] rv = new int[j-i+1];
tikhomirov@518: System.arraycopy(csetRange, i, rv, 0, rv.length);
tikhomirov@518: return rv;
tikhomirov@518: }
tikhomirov@518:
tikhomirov@370: /**
tikhomirov@515: * Tree-wise iteration of a file history, with handy access to parent-child relations between changesets.
tikhomirov@515: * When file history is being followed, handler may additionally implement {@link HgFileRenameHandlerMixin}
tikhomirov@515: * to get notified about switching between history chunks that belong to different names.
tikhomirov@402: *
tikhomirov@402: * @param handler callback to process changesets.
tikhomirov@515: * @see HgFileRenameHandlerMixin
tikhomirov@427: * @throws HgCallbackTargetException propagated exception from the handler
tikhomirov@427: * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
tikhomirov@380: * @throws CancelledException if execution of the command was cancelled
tikhomirov@402: * @throws IllegalArgumentException if command is not satisfied with its arguments
tikhomirov@402: * @throws ConcurrentModificationException if this log command instance is already running
tikhomirov@370: */
tikhomirov@515: public void execute(final HgChangesetTreeHandler handler) throws HgCallbackTargetException, HgException, CancelledException {
tikhomirov@328: if (handler == null) {
tikhomirov@328: throw new IllegalArgumentException();
tikhomirov@328: }
tikhomirov@328: if (csetTransform != null) {
tikhomirov@328: throw new ConcurrentModificationException();
tikhomirov@328: }
tikhomirov@328: if (file == null) {
tikhomirov@328: throw new IllegalArgumentException("History tree is supported for files only (at least now), please specify file");
tikhomirov@328: }
tikhomirov@692: final int firstCset = startRev;
tikhomirov@692: final int lastCset = endRev == TIP ? repo.getChangelog().getLastRevision() : endRev;
tikhomirov@692: // XXX pretty much like HgInternals.checkRevlogRange
tikhomirov@692: if (lastCset < 0 || lastCset > repo.getChangelog().getLastRevision()) {
tikhomirov@692: throw new HgBadArgumentException(String.format("Bad value %d for end revision", lastCset), null);
tikhomirov@692: }
tikhomirov@692: if (firstCset < 0 || startRev > lastCset) {
tikhomirov@692: throw new HgBadArgumentException(String.format("Bad value %d for start revision for range [%1$d..%d]", startRev, lastCset), null);
tikhomirov@692: }
tikhomirov@328: final ProgressSupport progressHelper = getProgressSupport(handler);
tikhomirov@328: final CancelSupport cancelHelper = getCancelSupport(handler, true);
tikhomirov@514: final HgFileRenameHandlerMixin renameHandler = Adaptable.Factory.getAdapter(handler, HgFileRenameHandlerMixin.class, null);
tikhomirov@507:
tikhomirov@628: try {
tikhomirov@508:
tikhomirov@628: // XXX rename. dispatcher is not a proper name (most of the job done - managing history chunk interconnection)
tikhomirov@628: final HandlerDispatcher dispatcher = new HandlerDispatcher() {
tikhomirov@628:
tikhomirov@628: @Override
tikhomirov@628: protected void once(HistoryNode n) throws HgCallbackTargetException, CancelledException, HgRuntimeException {
tikhomirov@628: handler.treeElement(ei.init(n, currentFileNode));
tikhomirov@628: cancelHelper.checkCancelled();
tikhomirov@628: }
tikhomirov@628: };
tikhomirov@628:
tikhomirov@628: // renamed files in the queue are placed with respect to #iterateDirection
tikhomirov@628: // i.e. if we iterate from new to old, recent filenames come first
tikhomirov@628: FileRenameQueueBuilder frqBuilder = new FileRenameQueueBuilder();
tikhomirov@692: List fileRenamesQueue = frqBuilder.buildFileRenamesQueue(firstCset, lastCset);
tikhomirov@628: // XXX perhaps, makes sense to look at selected file's revision when followAncestry is true
tikhomirov@628: // to ensure file we attempt to trace is in the WC's parent. Native hg aborts if not.
tikhomirov@628: progressHelper.start(4 * fileRenamesQueue.size());
tikhomirov@628: for (int namesIndex = 0, renamesQueueSize = fileRenamesQueue.size(); namesIndex < renamesQueueSize; namesIndex++) {
tikhomirov@628:
tikhomirov@692: final QueueElement renameInfo = fileRenamesQueue.get(namesIndex);
tikhomirov@628: dispatcher.prepare(progressHelper, renameInfo);
tikhomirov@628: cancelHelper.checkCancelled();
tikhomirov@628: if (namesIndex > 0) {
tikhomirov@628: dispatcher.connectWithLastJunctionPoint(renameInfo, fileRenamesQueue.get(namesIndex - 1));
tikhomirov@628: }
tikhomirov@628: if (namesIndex + 1 < renamesQueueSize) {
tikhomirov@628: // there's at least one more name we are going to look at
tikhomirov@628: dispatcher.updateJunctionPoint(renameInfo, fileRenamesQueue.get(namesIndex+1), renameHandler != null);
tikhomirov@628: } else {
tikhomirov@628: dispatcher.clearJunctionPoint();
tikhomirov@628: }
tikhomirov@628: dispatcher.dispatchAllChanges();
tikhomirov@628: if (renameHandler != null && namesIndex + 1 < renamesQueueSize) {
tikhomirov@628: dispatcher.reportRenames(renameHandler);
tikhomirov@628: }
tikhomirov@628: } // for fileRenamesQueue;
tikhomirov@628: frqBuilder.reportRenameIfNotInQueue(fileRenamesQueue, renameHandler);
tikhomirov@628: } catch (HgRuntimeException ex) {
tikhomirov@628: throw new HgLibraryFailureException(ex);
tikhomirov@628: }
tikhomirov@328: progressHelper.done();
tikhomirov@328: }
tikhomirov@328:
tikhomirov@692: private static class QueueElement {
tikhomirov@692: private final HgDataFile df;
tikhomirov@692: private final Nodeid lastRev;
tikhomirov@692: private final int firstRevIndex, lastRevIndex;
tikhomirov@692:
tikhomirov@692: QueueElement(HgDataFile file, Nodeid fileLastRev) {
tikhomirov@692: df = file;
tikhomirov@692: lastRev = fileLastRev;
tikhomirov@692: firstRevIndex = 0;
tikhomirov@692: lastRevIndex = lastRev == null ? df.getLastRevision() : df.getRevisionIndex(lastRev);
tikhomirov@692: }
tikhomirov@692: QueueElement(HgDataFile file, int firstFileRev, int lastFileRev) {
tikhomirov@692: df = file;
tikhomirov@692: firstRevIndex = firstFileRev;
tikhomirov@692: lastRevIndex = lastFileRev;
tikhomirov@692: lastRev = null;
tikhomirov@692: }
tikhomirov@692: HgDataFile file() {
tikhomirov@692: return df;
tikhomirov@692: }
tikhomirov@692: int fileFrom() {
tikhomirov@692: return firstRevIndex;
tikhomirov@692: }
tikhomirov@692: int fileTo() {
tikhomirov@692: return lastRevIndex;
tikhomirov@692: }
tikhomirov@692: // never null
tikhomirov@692: Nodeid lastFileRev() {
tikhomirov@692: return lastRev == null ? df.getRevision(fileTo()) : lastRev;
tikhomirov@692: }
tikhomirov@692: HgFileRevision last() {
tikhomirov@692: return new HgFileRevision(df, lastFileRev(), null);
tikhomirov@692: }
tikhomirov@692: HgFileRevision first(HgFileRevision from) {
tikhomirov@692: return new HgFileRevision(df, df.getRevision(0), from.getPath());
tikhomirov@692: }
tikhomirov@692: }
tikhomirov@692:
tikhomirov@507: /**
tikhomirov@528: * Utility to build sequence of file renames
tikhomirov@507: */
tikhomirov@528: private class FileRenameQueueBuilder {
tikhomirov@528:
tikhomirov@528: /**
tikhomirov@528: * Follows file renames and build a list of all corresponding file nodes and revisions they were
tikhomirov@528: * copied/renamed/branched at (IOW, their latest revision to look at).
tikhomirov@528: *
tikhomirov@528: * @param followRename when false
, the list contains one element only,
tikhomirov@528: * file node with the name of the file as it was specified by the user.
tikhomirov@528: *
tikhomirov@528: * @param followAncestry the most recent file revision reported depends on this parameter,
tikhomirov@528: * and it is file revision from working copy parent in there when it's true.
tikhomirov@528: * null
as Pair's second indicates file's TIP revision shall be used.
tikhomirov@528: *
tikhomirov@528: * TODO may use HgFileRevision (after some refactoring to accept HgDataFile and Nodeid) instead of Pair
tikhomirov@528: * and possibly reuse this functionality
tikhomirov@528: *
tikhomirov@528: * @return list of file renames, ordered with respect to {@link #iterateDirection}
tikhomirov@628: * @throws HgRuntimeException
tikhomirov@528: */
tikhomirov@692: public List buildFileRenamesQueue(int csetStart, int csetEnd) throws HgPathNotFoundException, HgRuntimeException {
tikhomirov@692: LinkedList rv = new LinkedList();
tikhomirov@528: Nodeid startRev = null;
tikhomirov@528: HgDataFile fileNode = repo.getFileNode(file);
tikhomirov@528: if (!fileNode.exists()) {
tikhomirov@528: throw new HgPathNotFoundException(String.format("File %s not found in the repository", file), file);
tikhomirov@514: }
tikhomirov@528: if (followAncestry) {
tikhomirov@528: // TODO subject to dedicated method either in HgRepository (getWorkingCopyParentRevisionIndex)
tikhomirov@528: // or in the HgDataFile (getWorkingCopyOriginRevision)
tikhomirov@528: Nodeid wdParentChangeset = repo.getWorkingCopyParents().first();
tikhomirov@528: if (!wdParentChangeset.isNull()) {
tikhomirov@528: int wdParentRevIndex = repo.getChangelog().getRevisionIndex(wdParentChangeset);
tikhomirov@528: startRev = repo.getManifest().getFileRevision(wdParentRevIndex, fileNode.getPath());
tikhomirov@528: }
tikhomirov@528: // else fall-through, assume null (eventually, lastRevision()) is ok here
tikhomirov@528: }
tikhomirov@692: QueueElement p = new QueueElement(fileNode, startRev);
tikhomirov@528: if (!followRenames) {
tikhomirov@692: rv.add(p);
tikhomirov@528: return rv;
tikhomirov@528: }
tikhomirov@692: FileRenameHistory frh = new FileRenameHistory(csetStart, csetEnd);
tikhomirov@692: frh.build(fileNode, p.fileTo());
tikhomirov@692: for (Chunk c : frh.iterate(iterateDirection)) {
tikhomirov@692: rv.add(new QueueElement(c.file(), c.firstFileRev(), c.lastFileRev()));
tikhomirov@692: }
tikhomirov@507: return rv;
tikhomirov@507: }
tikhomirov@528:
tikhomirov@528: /**
tikhomirov@528: * Shall report renames based solely on HgFileRenameHandlerMixin presence,
tikhomirov@528: * even if queue didn't get rename information due to followRenames == false
tikhomirov@528: *
tikhomirov@528: * @param queue value from {@link #buildFileRenamesQueue()}
tikhomirov@528: * @param renameHandler may be null
tikhomirov@528: */
tikhomirov@692: public void reportRenameIfNotInQueue(List queue, HgFileRenameHandlerMixin renameHandler) throws HgCallbackTargetException, HgRuntimeException {
tikhomirov@528: if (renameHandler != null && !followRenames) {
tikhomirov@528: // If followRenames is true, all the historical names were in the queue and are processed already.
tikhomirov@528: // Hence, shall process origin explicitly only when renameHandler is present but followRenames is not requested.
tikhomirov@528: assert queue.size() == 1; // see the way queue is constructed above
tikhomirov@692: QueueElement curRename = queue.get(0);
tikhomirov@692: if (curRename.file().isCopy(curRename.fileFrom())) {
tikhomirov@692: final HgFileRevision src = curRename.file().getCopySource(curRename.fileFrom());
tikhomirov@692: HgFileRevision dst = curRename.first(src);
tikhomirov@528: renameHandler.copy(src, dst);
tikhomirov@528: }
tikhomirov@510: }
tikhomirov@528: }
tikhomirov@507: }
tikhomirov@508:
tikhomirov@692: /**
tikhomirov@692: * Builds list of {@link HistoryNode HistoryNodes} to visit for a given chunk of file rename history
tikhomirov@692: */
tikhomirov@508: private static class TreeBuildInspector implements HgChangelog.ParentInspector, HgChangelog.RevisionInspector {
tikhomirov@508: private final boolean followAncestry;
tikhomirov@508:
tikhomirov@508: private HistoryNode[] completeHistory;
tikhomirov@508: private int[] commitRevisions;
tikhomirov@509: private List resultHistory;
tikhomirov@508:
tikhomirov@508: TreeBuildInspector(boolean _followAncestry) {
tikhomirov@508: followAncestry = _followAncestry;
tikhomirov@508: }
tikhomirov@508:
tikhomirov@508: public void next(int revisionNumber, Nodeid revision, int linkedRevision) {
tikhomirov@508: commitRevisions[revisionNumber] = linkedRevision;
tikhomirov@508: }
tikhomirov@508:
tikhomirov@508: public void next(int revisionNumber, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2) {
tikhomirov@508: HistoryNode p1 = null, p2 = null;
tikhomirov@516: // IMPORTANT: method #one(), below, doesn't expect this code expects reasonable values at parent indexes
tikhomirov@508: if (parent1 != -1) {
tikhomirov@508: p1 = completeHistory[parent1];
tikhomirov@508: }
tikhomirov@508: if (parent2!= -1) {
tikhomirov@508: p2 = completeHistory[parent2];
tikhomirov@508: }
tikhomirov@508: completeHistory[revisionNumber] = new HistoryNode(commitRevisions[revisionNumber], revision, p1, p2);
tikhomirov@508: }
tikhomirov@508:
tikhomirov@628: HistoryNode one(HgDataFile fileNode, Nodeid fileRevision) throws HgRuntimeException {
tikhomirov@516: int fileRevIndexToVisit = fileNode.getRevisionIndex(fileRevision);
tikhomirov@516: return one(fileNode, fileRevIndexToVisit);
tikhomirov@516: }
tikhomirov@516:
tikhomirov@628: HistoryNode one(HgDataFile fileNode, int fileRevIndexToVisit) throws HgRuntimeException {
tikhomirov@516: resultHistory = null;
tikhomirov@516: if (fileRevIndexToVisit == HgRepository.TIP) {
tikhomirov@516: fileRevIndexToVisit = fileNode.getLastRevision();
tikhomirov@516: }
tikhomirov@516: // still, allocate whole array, for #next to be able to get null parent values
tikhomirov@516: completeHistory = new HistoryNode[fileRevIndexToVisit+1];
tikhomirov@516: commitRevisions = new int[completeHistory.length];
tikhomirov@516: fileNode.indexWalk(fileRevIndexToVisit, fileRevIndexToVisit, this);
tikhomirov@516: // it's only single revision, no need to care about followAncestry
tikhomirov@516: // but won't hurt to keep resultHistory != null and commitRevisions initialized just in case
tikhomirov@516: HistoryNode rv = completeHistory[fileRevIndexToVisit];
tikhomirov@516: commitRevisions = new int[] { commitRevisions[fileRevIndexToVisit] };
tikhomirov@516: completeHistory = null; // no need to keep almost empty array in memory
tikhomirov@516: resultHistory = Collections.singletonList(rv);
tikhomirov@516: return rv;
tikhomirov@516: }
tikhomirov@516:
tikhomirov@508: /**
tikhomirov@692: * FIXME pretty much the same as FileRevisionHistoryChunk
tikhomirov@692: *
tikhomirov@508: * Builds history of file changes (in natural order, from oldest to newest) up to (and including) file revision specified.
tikhomirov@508: * If {@link TreeBuildInspector} follows ancestry, only elements that are on the line of ancestry of the revision at
tikhomirov@508: * lastRevisionIndex would be included.
tikhomirov@509: *
tikhomirov@509: * @return list of history elements, from oldest to newest. In case {@link #followAncestry} is true
, the list
tikhomirov@509: * is modifiable (to further augment with last/first elements of renamed file histories)
tikhomirov@508: */
tikhomirov@692: List go(QueueElement qe) throws HgRuntimeException {
tikhomirov@509: resultHistory = null;
tikhomirov@692: HgDataFile fileNode = qe.file();
tikhomirov@692: // TODO int fileLastRevIndexToVisit = qe.fileTo
tikhomirov@692: int fileLastRevIndexToVisit = followAncestry ? fileNode.getRevisionIndex(qe.lastFileRev()) : fileNode.getLastRevision();
tikhomirov@514: completeHistory = new HistoryNode[fileLastRevIndexToVisit+1];
tikhomirov@508: commitRevisions = new int[completeHistory.length];
tikhomirov@692: fileNode.indexWalk(qe.fileFrom(), fileLastRevIndexToVisit, this);
tikhomirov@508: if (!followAncestry) {
tikhomirov@692: resultHistory = new ArrayList(fileLastRevIndexToVisit - qe.fileFrom() + 1);
tikhomirov@692: // items in completeHistory with index < qe.fileFrom are empty
tikhomirov@692: for (int i = qe.fileFrom(); i <= fileLastRevIndexToVisit; i++) {
tikhomirov@692: resultHistory.add(completeHistory[i]);
tikhomirov@692: }
tikhomirov@509: completeHistory = null;
tikhomirov@692: commitRevisions = null;
tikhomirov@509: return resultHistory;
tikhomirov@508: }
tikhomirov@508: /*
tikhomirov@509: * Changesets, newest at the top:
tikhomirov@508: * o <-- cset from working dir parent (as in dirstate), file not changed (file revision recorded points to that from A)
tikhomirov@508: * | x <-- revision with file changed (B')
tikhomirov@508: * x / <-- revision with file changed (A)
tikhomirov@508: * | x <-- revision with file changed (B)
tikhomirov@508: * |/
tikhomirov@508: * o <-- another changeset, where file wasn't changed
tikhomirov@508: * |
tikhomirov@508: * x <-- revision with file changed (C)
tikhomirov@508: *
tikhomirov@508: * File history: B', A, B, C
tikhomirov@508: *
tikhomirov@508: * When "follow", SHALL NOT report B and B', but A and C
tikhomirov@508: */
tikhomirov@508: // strippedHistory: only those HistoryNodes from completeHistory that are on the same
tikhomirov@508: // line of descendant, in order from older to newer
tikhomirov@508: LinkedList strippedHistoryList = new LinkedList();
tikhomirov@508: LinkedList queue = new LinkedList();
tikhomirov@508: // look for ancestors of the selected history node
tikhomirov@514: queue.add(completeHistory[fileLastRevIndexToVisit]);
tikhomirov@508: do {
tikhomirov@508: HistoryNode withFileChange = queue.removeFirst();
tikhomirov@511: if (strippedHistoryList.contains(withFileChange)) {
tikhomirov@511: // fork point for the change that was later merged (and we traced
tikhomirov@511: // both lines of development by now.
tikhomirov@511: continue;
tikhomirov@511: }
tikhomirov@508: if (withFileChange.children != null) {
tikhomirov@508: withFileChange.children.retainAll(strippedHistoryList);
tikhomirov@508: }
tikhomirov@508: strippedHistoryList.addFirst(withFileChange);
tikhomirov@508: if (withFileChange.parent1 != null) {
tikhomirov@508: queue.addLast(withFileChange.parent1);
tikhomirov@508: }
tikhomirov@508: if (withFileChange.parent2 != null) {
tikhomirov@508: queue.addLast(withFileChange.parent2);
tikhomirov@508: }
tikhomirov@508: } while (!queue.isEmpty());
tikhomirov@511: Collections.sort(strippedHistoryList, new Comparator() {
tikhomirov@511:
tikhomirov@511: public int compare(HistoryNode o1, HistoryNode o2) {
tikhomirov@511: return o1.changeset - o2.changeset;
tikhomirov@511: }
tikhomirov@511: });
tikhomirov@508: completeHistory = null;
tikhomirov@508: commitRevisions = null;
tikhomirov@509: return resultHistory = strippedHistoryList;
tikhomirov@508: }
tikhomirov@508:
tikhomirov@508: /**
tikhomirov@508: * handy access to all HistoryNode[i].changeset values
tikhomirov@508: */
tikhomirov@508: int[] getCommitRevisions() {
tikhomirov@509: if (commitRevisions == null) {
tikhomirov@509: commitRevisions = new int[resultHistory.size()];
tikhomirov@509: int i = 0;
tikhomirov@509: for (HistoryNode n : resultHistory) {
tikhomirov@509: commitRevisions[i++] = n.changeset;
tikhomirov@509: }
tikhomirov@509: }
tikhomirov@508: return commitRevisions;
tikhomirov@508: }
tikhomirov@508: };
tikhomirov@508:
tikhomirov@692: /**
tikhomirov@692: * Sends {@link ElementImpl} for each {@link HistoryNode}, and keeps track of junction points - revisions with renames
tikhomirov@692: */
tikhomirov@516: private abstract class HandlerDispatcher {
tikhomirov@516: private final int CACHE_CSET_IN_ADVANCE_THRESHOLD = 100; /* XXX is it really worth it? */
tikhomirov@516: // builds tree of nodes according to parents in file's revlog
tikhomirov@516: private final TreeBuildInspector treeBuildInspector = new TreeBuildInspector(followAncestry);
tikhomirov@516: private List changeHistory;
tikhomirov@516: protected ElementImpl ei = null;
tikhomirov@516: private ProgressSupport progress;
tikhomirov@516: protected HgDataFile currentFileNode;
tikhomirov@516: // node where current file history chunk intersects with same file under other name history
tikhomirov@516: // either mock of B(0) or A(k), depending on iteration order
tikhomirov@516: private HistoryNode junctionNode;
tikhomirov@528: // initialized when there's HgFileRenameHandlerMixin
tikhomirov@528: private HgFileRevision copiedFrom, copiedTo;
tikhomirov@516:
tikhomirov@516: // parentProgress shall be initialized with 4 XXX refactor all this stuff with parentProgress
tikhomirov@692: public void prepare(ProgressSupport parentProgress, QueueElement renameInfo) throws HgRuntimeException {
tikhomirov@692: changeHistory = treeBuildInspector.go(renameInfo);
tikhomirov@516: assert changeHistory.size() > 0;
tikhomirov@516: parentProgress.worked(1);
tikhomirov@516: int historyNodeCount = changeHistory.size();
tikhomirov@516: if (ei == null) {
tikhomirov@516: // when follow is true, changeHistory.size() of the first revision might be quite short
tikhomirov@516: // (e.g. bad fname recognized soon), hence ensure at least cache size at once
tikhomirov@516: ei = new ElementImpl(Math.max(CACHE_CSET_IN_ADVANCE_THRESHOLD, historyNodeCount));
tikhomirov@516: }
tikhomirov@516: if (historyNodeCount < CACHE_CSET_IN_ADVANCE_THRESHOLD ) {
tikhomirov@516: int[] commitRevisions = treeBuildInspector.getCommitRevisions();
tikhomirov@516: assert commitRevisions.length == changeHistory.size();
tikhomirov@516: // read bunch of changesets at once and cache 'em
tikhomirov@516: ei.initTransform();
tikhomirov@516: repo.getChangelog().range(ei, commitRevisions);
tikhomirov@516: parentProgress.worked(1);
tikhomirov@516: progress = new ProgressSupport.Sub(parentProgress, 2);
tikhomirov@516: } else {
tikhomirov@516: progress = new ProgressSupport.Sub(parentProgress, 3);
tikhomirov@516: }
tikhomirov@516: progress.start(historyNodeCount);
tikhomirov@516: // switch to present chunk's file node
tikhomirov@692: switchTo(renameInfo.file());
tikhomirov@516: }
tikhomirov@528:
tikhomirov@692: public void updateJunctionPoint(QueueElement curRename, QueueElement nextRename, boolean needCopyFromTo) throws HgRuntimeException {
tikhomirov@528: copiedFrom = copiedTo = null;
tikhomirov@528: //
tikhomirov@516: // A (old) renamed to B(new). A(0..k..n) -> B(0..m). If followAncestry, k == n
tikhomirov@516: // curRename.second() points to A(k)
tikhomirov@522: if (iterateDirection == HgIterateDirection.OldToNew) {
tikhomirov@516: // looking at A chunk (curRename), nextRename points to B
tikhomirov@692: HistoryNode junctionSrc = findJunctionPointInCurrentChunk(curRename.lastFileRev()); // A(k)
tikhomirov@692: HistoryNode junctionDestMock = treeBuildInspector.one(nextRename.file(), 0); // B(0)
tikhomirov@516: // junstionDestMock is mock object, once we iterate next rename, there'd be different HistoryNode
tikhomirov@516: // for B's first revision. This means we read it twice, but this seems to be reasonable
tikhomirov@516: // price for simplicity of the code (and opportunity to follow renames while not following ancestry)
tikhomirov@516: junctionSrc.bindChild(junctionDestMock);
tikhomirov@516: // Save mock A(k) 1) not to keep whole A history in memory 2) Don't need it's parent and children once get to B
tikhomirov@516: // moreover, children of original A(k) (junctionSrc) would list mock B(0) which is undesired once we iterate over real B
tikhomirov@516: junctionNode = new HistoryNode(junctionSrc.changeset, junctionSrc.fileRevision, null, null);
tikhomirov@528: if (needCopyFromTo) {
tikhomirov@692: copiedFrom = new HgFileRevision(curRename.file(), junctionNode.fileRevision, null); // "A", A(k)
tikhomirov@692: copiedTo = new HgFileRevision(nextRename.file(), junctionDestMock.fileRevision, copiedFrom.getPath()); // "B", B(0)
tikhomirov@528: }
tikhomirov@516: } else {
tikhomirov@522: assert iterateDirection == HgIterateDirection.NewToOld;
tikhomirov@516: // looking at B chunk (curRename), nextRename points at A
tikhomirov@516: HistoryNode junctionDest = changeHistory.get(0); // B(0)
tikhomirov@516: // prepare mock A(k)
tikhomirov@692: HistoryNode junctionSrcMock = treeBuildInspector.one(nextRename.file(), nextRename.lastFileRev()); // A(k)
tikhomirov@516: // B(0) to list A(k) as its parent
tikhomirov@516: // NOTE, A(k) would be different when we reach A chunk on the next iteration,
tikhomirov@516: // but we do not care as long as TreeElement needs only parent/child changesets
tikhomirov@516: // and not other TreeElements; so that it's enough to have mock parent node (just
tikhomirov@516: // for the sake of parent cset revisions). We have to, indeed, update real A(k),
tikhomirov@516: // once we get to iteration over A, with B(0) (junctionDest) as one more child.
tikhomirov@516: junctionSrcMock.bindChild(junctionDest);
tikhomirov@516: // Save mock B(0), for reasons see above for opposite direction
tikhomirov@516: junctionNode = new HistoryNode(junctionDest.changeset, junctionDest.fileRevision, null, null);
tikhomirov@528: if (needCopyFromTo) {
tikhomirov@692: copiedFrom = new HgFileRevision(nextRename.file(), junctionSrcMock.fileRevision, null); // "A", A(k)
tikhomirov@692: copiedTo = new HgFileRevision(curRename.file(), junctionNode.fileRevision, copiedFrom.getPath()); // "B", B(0)
tikhomirov@528: }
tikhomirov@528: }
tikhomirov@528: }
tikhomirov@528:
tikhomirov@628: public void reportRenames(HgFileRenameHandlerMixin renameHandler) throws HgCallbackTargetException, HgRuntimeException {
tikhomirov@528: if (renameHandler != null) { // shall report renames
tikhomirov@528: assert copiedFrom != null;
tikhomirov@528: assert copiedTo != null;
tikhomirov@528: renameHandler.copy(copiedFrom, copiedTo);
tikhomirov@516: }
tikhomirov@516: }
tikhomirov@516:
tikhomirov@516: public void clearJunctionPoint() {
tikhomirov@516: junctionNode = null;
tikhomirov@528: copiedFrom = copiedTo = null;
tikhomirov@516: }
tikhomirov@516:
tikhomirov@528: /**
tikhomirov@528: * Replace mock src/dest HistoryNode connected to junctionNode with a real one
tikhomirov@528: */
tikhomirov@692: public void connectWithLastJunctionPoint(QueueElement curRename, QueueElement prevRename) {
tikhomirov@516: assert junctionNode != null;
tikhomirov@516: // A renamed to B. A(0..k..n) -> B(0..m). If followAncestry: k == n
tikhomirov@522: if (iterateDirection == HgIterateDirection.OldToNew) {
tikhomirov@516: // forward, from old to new:
tikhomirov@516: // changeHistory points to B
tikhomirov@516: // Already reported: A(0)..A(n), A(k) is in junctionNode
tikhomirov@516: // Shall connect histories: A(k).bind(B(0))
tikhomirov@516: HistoryNode junctionDest = changeHistory.get(0); // B(0)
tikhomirov@516: // junctionNode is A(k)
tikhomirov@516: junctionNode.bindChild(junctionDest);
tikhomirov@516: } else {
tikhomirov@522: assert iterateDirection == HgIterateDirection.NewToOld;
tikhomirov@516: // changeHistory points to A
tikhomirov@516: // Already reported B(m), B(m-1)...B(0), B(0) is in junctionNode
tikhomirov@516: // Shall connect histories A(k).bind(B(0))
tikhomirov@516: // if followAncestry: A(k) is latest in changeHistory (k == n)
tikhomirov@692: HistoryNode junctionSrc = findJunctionPointInCurrentChunk(curRename.lastFileRev()); // A(k)
tikhomirov@516: junctionSrc.bindChild(junctionNode);
tikhomirov@516: }
tikhomirov@516: }
tikhomirov@516:
tikhomirov@516: private HistoryNode findJunctionPointInCurrentChunk(Nodeid fileRevision) {
tikhomirov@516: if (followAncestry) {
tikhomirov@516: // use the fact we don't go past junction point when followAncestry == true
tikhomirov@516: HistoryNode rv = changeHistory.get(changeHistory.size() - 1);
tikhomirov@516: assert rv.fileRevision.equals(fileRevision);
tikhomirov@516: return rv;
tikhomirov@516: }
tikhomirov@516: for (HistoryNode n : changeHistory) {
tikhomirov@516: if (n.fileRevision.equals(fileRevision)) {
tikhomirov@516: return n;
tikhomirov@516: }
tikhomirov@516: }
tikhomirov@516: int csetStart = changeHistory.get(0).changeset;
tikhomirov@516: int csetEnd = changeHistory.get(changeHistory.size() - 1).changeset;
tikhomirov@516: throw new HgInvalidStateException(String.format("For change history (cset[%d..%d]) could not find node for file change %s", csetStart, csetEnd, fileRevision.shortNotation()));
tikhomirov@516: }
tikhomirov@516:
tikhomirov@628: protected abstract void once(HistoryNode n) throws HgCallbackTargetException, CancelledException, HgRuntimeException;
tikhomirov@516:
tikhomirov@628: public void dispatchAllChanges() throws HgCallbackTargetException, CancelledException, HgRuntimeException {
tikhomirov@516: // XXX shall sort changeHistory according to changeset numbers?
tikhomirov@516: Iterator it;
tikhomirov@522: if (iterateDirection == HgIterateDirection.OldToNew) {
tikhomirov@516: it = changeHistory.listIterator();
tikhomirov@516: } else {
tikhomirov@522: assert iterateDirection == HgIterateDirection.NewToOld;
tikhomirov@516: it = new ReverseIterator(changeHistory);
tikhomirov@516: }
tikhomirov@516: while(it.hasNext()) {
tikhomirov@516: HistoryNode n = it.next();
tikhomirov@516: once(n);
tikhomirov@516: progress.worked(1);
tikhomirov@516: }
tikhomirov@516: changeHistory = null;
tikhomirov@516: }
tikhomirov@516:
tikhomirov@516: public void switchTo(HgDataFile df) {
tikhomirov@516: // from now on, use df in TreeElement
tikhomirov@516: currentFileNode = df;
tikhomirov@516: }
tikhomirov@516: }
tikhomirov@516:
tikhomirov@507:
tikhomirov@64: //
tikhomirov@64:
tikhomirov@520: private class FilteringInspector extends AdapterPlug implements HgChangelog.Inspector, Adaptable {
tikhomirov@518:
tikhomirov@518: private int firstCset = BAD_REVISION, lastCset = BAD_REVISION;
tikhomirov@520: private HgChangelog.Inspector delegate;
tikhomirov@520: // we use lifecycle to stop when limit is reached.
tikhomirov@520: // delegate, however, may use lifecycle, too, so give it a chance
tikhomirov@520: private LifecycleProxy lifecycleProxy;
tikhomirov@518:
tikhomirov@518: // limit to changesets in this range only
tikhomirov@518: public void changesets(int start, int end) {
tikhomirov@518: firstCset = start;
tikhomirov@518: lastCset = end;
tikhomirov@64: }
tikhomirov@520:
tikhomirov@520: public void delegateTo(HgChangelog.Inspector inspector) {
tikhomirov@520: delegate = inspector;
tikhomirov@520: // let delegate control life cycle, too
tikhomirov@520: if (lifecycleProxy == null) {
tikhomirov@520: super.attachAdapter(Lifecycle.class, lifecycleProxy = new LifecycleProxy(inspector));
tikhomirov@520: } else {
tikhomirov@520: lifecycleProxy.init(inspector);
tikhomirov@520: }
tikhomirov@520: }
tikhomirov@518:
tikhomirov@628: public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) throws HgRuntimeException {
tikhomirov@518: if (limit > 0 && count >= limit) {
tikhomirov@518: return;
tikhomirov@518: }
tikhomirov@518: // XXX may benefit from optional interface with #isInterested(int csetRev) - to avoid
tikhomirov@518: // RawChangeset instantiation
tikhomirov@518: if (firstCset != BAD_REVISION && revisionNumber < firstCset) {
tikhomirov@518: return;
tikhomirov@518: }
tikhomirov@518: if (lastCset != BAD_REVISION && revisionNumber > lastCset) {
tikhomirov@518: return;
tikhomirov@518: }
tikhomirov@518: if (branches != null && !branches.contains(cset.branch())) {
tikhomirov@518: return;
tikhomirov@518: }
tikhomirov@518: if (users != null) {
tikhomirov@518: String csetUser = cset.user().toLowerCase();
tikhomirov@518: boolean found = false;
tikhomirov@518: for (String u : users) {
tikhomirov@518: if (csetUser.indexOf(u) != -1) {
tikhomirov@518: found = true;
tikhomirov@518: break;
tikhomirov@518: }
tikhomirov@518: }
tikhomirov@518: if (!found) {
tikhomirov@518: return;
tikhomirov@64: }
tikhomirov@64: }
tikhomirov@518: if (date != null) {
tikhomirov@518: // TODO post-1.0 implement date support for log
tikhomirov@518: }
tikhomirov@520: delegate.next(revisionNumber, nodeid, cset);
tikhomirov@520: count++;
tikhomirov@520: if (limit > 0 && count >= limit) {
tikhomirov@520: lifecycleProxy.stop();
tikhomirov@64: }
tikhomirov@64: }
tikhomirov@520: }
tikhomirov@518:
tikhomirov@628: private HgParentChildMap getParentHelper(boolean create) throws HgRuntimeException {
tikhomirov@328: if (parentHelper == null && create) {
tikhomirov@432: parentHelper = new HgParentChildMap(repo.getChangelog());
tikhomirov@195: parentHelper.init();
tikhomirov@195: }
tikhomirov@195: return parentHelper;
tikhomirov@195: }
tikhomirov@520:
tikhomirov@205: public static class CollectHandler implements HgChangesetHandler {
tikhomirov@129: private final List result = new LinkedList();
tikhomirov@64:
tikhomirov@129: public List getChanges() {
tikhomirov@64: return Collections.unmodifiableList(result);
tikhomirov@64: }
tikhomirov@64:
tikhomirov@427: public void cset(HgChangeset changeset) {
tikhomirov@64: result.add(changeset.clone());
tikhomirov@64: }
tikhomirov@64: }
tikhomirov@328:
tikhomirov@328: private static class HistoryNode {
tikhomirov@328: final int changeset;
tikhomirov@328: final Nodeid fileRevision;
tikhomirov@509: HistoryNode parent1; // there's special case when we can alter it, see #bindChild()
tikhomirov@509: final HistoryNode parent2;
tikhomirov@328: List children;
tikhomirov@328:
tikhomirov@328: HistoryNode(int cs, Nodeid revision, HistoryNode p1, HistoryNode p2) {
tikhomirov@328: changeset = cs;
tikhomirov@328: fileRevision = revision;
tikhomirov@328: parent1 = p1;
tikhomirov@328: parent2 = p2;
tikhomirov@328: if (p1 != null) {
tikhomirov@328: p1.addChild(this);
tikhomirov@328: }
tikhomirov@328: if (p2 != null) {
tikhomirov@328: p2.addChild(this);
tikhomirov@328: }
tikhomirov@328: }
tikhomirov@328:
tikhomirov@509: private void addChild(HistoryNode child) {
tikhomirov@328: if (children == null) {
tikhomirov@328: children = new ArrayList(2);
tikhomirov@328: }
tikhomirov@328: children.add(child);
tikhomirov@328: }
tikhomirov@509:
tikhomirov@509: /**
tikhomirov@509: * method to merge two history chunks for renamed file so that
tikhomirov@517: * this node's history continues (or forks, if we don't followAncestry)
tikhomirov@517: * with that of child
tikhomirov@509: * @param child
tikhomirov@509: */
tikhomirov@509: public void bindChild(HistoryNode child) {
tikhomirov@509: assert child.parent1 == null && child.parent2 == null;
tikhomirov@509: child.parent1 = this;
tikhomirov@509: addChild(child);
tikhomirov@509: }
tikhomirov@511:
tikhomirov@511: public String toString() {
tikhomirov@511: return String.format("", changeset, parent1 == null ? "-" : String.valueOf(parent1.changeset), parent2 == null ? "-" : String.valueOf(parent2.changeset));
tikhomirov@511: }
tikhomirov@328: }
tikhomirov@328:
tikhomirov@328: private class ElementImpl implements HgChangesetTreeHandler.TreeElement, HgChangelog.Inspector {
tikhomirov@328: private HistoryNode historyNode;
tikhomirov@515: private HgDataFile fileNode;
tikhomirov@328: private Pair parents;
tikhomirov@328: private List children;
tikhomirov@328: private IntMap cachedChangesets;
tikhomirov@328: private ChangesetTransformer.Transformation transform;
tikhomirov@328: private Nodeid changesetRevision;
tikhomirov@328: private Pair parentRevisions;
tikhomirov@328: private List childRevisions;
tikhomirov@328:
tikhomirov@328: public ElementImpl(int total) {
tikhomirov@328: cachedChangesets = new IntMap(total);
tikhomirov@328: }
tikhomirov@328:
tikhomirov@515: ElementImpl init(HistoryNode n, HgDataFile df) {
tikhomirov@328: historyNode = n;
tikhomirov@515: fileNode = df;
tikhomirov@328: parents = null;
tikhomirov@328: children = null;
tikhomirov@328: changesetRevision = null;
tikhomirov@328: parentRevisions = null;
tikhomirov@328: childRevisions = null;
tikhomirov@328: return this;
tikhomirov@328: }
tikhomirov@328:
tikhomirov@328: public Nodeid fileRevision() {
tikhomirov@328: return historyNode.fileRevision;
tikhomirov@328: }
tikhomirov@515:
tikhomirov@515: public HgDataFile file() {
tikhomirov@515: return fileNode;
tikhomirov@515: }
tikhomirov@328:
tikhomirov@628: public HgChangeset changeset() throws HgRuntimeException {
tikhomirov@328: return get(historyNode.changeset)[0];
tikhomirov@328: }
tikhomirov@328:
tikhomirov@628: public Pair parents() throws HgRuntimeException {
tikhomirov@328: if (parents != null) {
tikhomirov@328: return parents;
tikhomirov@328: }
tikhomirov@328: HistoryNode p;
tikhomirov@328: final int p1, p2;
tikhomirov@328: if ((p = historyNode.parent1) != null) {
tikhomirov@328: p1 = p.changeset;
tikhomirov@328: } else {
tikhomirov@328: p1 = -1;
tikhomirov@328: }
tikhomirov@328: if ((p = historyNode.parent2) != null) {
tikhomirov@328: p2 = p.changeset;
tikhomirov@328: } else {
tikhomirov@328: p2 = -1;
tikhomirov@328: }
tikhomirov@328: HgChangeset[] r = get(p1, p2);
tikhomirov@328: return parents = new Pair(r[0], r[1]);
tikhomirov@328: }
tikhomirov@328:
tikhomirov@628: public Collection children() throws HgRuntimeException {
tikhomirov@328: if (children != null) {
tikhomirov@328: return children;
tikhomirov@328: }
tikhomirov@328: if (historyNode.children == null) {
tikhomirov@328: children = Collections.emptyList();
tikhomirov@328: } else {
tikhomirov@328: int[] childrentChangesetNumbers = new int[historyNode.children.size()];
tikhomirov@328: int j = 0;
tikhomirov@328: for (HistoryNode hn : historyNode.children) {
tikhomirov@328: childrentChangesetNumbers[j++] = hn.changeset;
tikhomirov@328: }
tikhomirov@328: children = Arrays.asList(get(childrentChangesetNumbers));
tikhomirov@328: }
tikhomirov@328: return children;
tikhomirov@328: }
tikhomirov@328:
tikhomirov@328: void populate(HgChangeset cs) {
tikhomirov@403: cachedChangesets.put(cs.getRevisionIndex(), cs);
tikhomirov@328: }
tikhomirov@328:
tikhomirov@628: private HgChangeset[] get(int... changelogRevisionIndex) throws HgRuntimeException {
tikhomirov@403: HgChangeset[] rv = new HgChangeset[changelogRevisionIndex.length];
tikhomirov@403: IntVector misses = new IntVector(changelogRevisionIndex.length, -1);
tikhomirov@403: for (int i = 0; i < changelogRevisionIndex.length; i++) {
tikhomirov@403: if (changelogRevisionIndex[i] == -1) {
tikhomirov@328: rv[i] = null;
tikhomirov@328: continue;
tikhomirov@328: }
tikhomirov@403: HgChangeset cached = cachedChangesets.get(changelogRevisionIndex[i]);
tikhomirov@328: if (cached != null) {
tikhomirov@328: rv[i] = cached;
tikhomirov@328: } else {
tikhomirov@403: misses.add(changelogRevisionIndex[i]);
tikhomirov@328: }
tikhomirov@328: }
tikhomirov@328: if (misses.size() > 0) {
tikhomirov@328: final int[] changesets2read = misses.toArray();
tikhomirov@328: initTransform();
tikhomirov@328: repo.getChangelog().range(this, changesets2read);
tikhomirov@328: for (int changeset2read : changesets2read) {
tikhomirov@328: HgChangeset cs = cachedChangesets.get(changeset2read);
tikhomirov@403: if (cs == null) {
tikhomirov@628: throw new HgInvalidStateException(String.format("Can't get changeset for revision %d", changeset2read));
tikhomirov@403: }
tikhomirov@403: // HgChangelog.range may reorder changesets according to their order in the changelog
tikhomirov@403: // thus need to find original index
tikhomirov@403: boolean sanity = false;
tikhomirov@403: for (int i = 0; i < changelogRevisionIndex.length; i++) {
tikhomirov@403: if (changelogRevisionIndex[i] == cs.getRevisionIndex()) {
tikhomirov@403: rv[i] = cs;
tikhomirov@403: sanity = true;
tikhomirov@403: break;
tikhomirov@328: }
tikhomirov@403: }
tikhomirov@403: if (!sanity) {
tikhomirov@490: repo.getSessionContext().getLog().dump(getClass(), Error, "Index of revision %d:%s doesn't match any of requested", cs.getRevisionIndex(), cs.getNodeid().shortNotation());
tikhomirov@403: }
tikhomirov@403: assert sanity;
tikhomirov@328: }
tikhomirov@328: }
tikhomirov@328: return rv;
tikhomirov@328: }
tikhomirov@328:
tikhomirov@328: // init only when needed
tikhomirov@423: void initTransform() throws HgRuntimeException {
tikhomirov@328: if (transform == null) {
tikhomirov@328: transform = new ChangesetTransformer.Transformation(new HgStatusCollector(repo)/*XXX try to reuse from context?*/, getParentHelper(false));
tikhomirov@328: }
tikhomirov@328: }
tikhomirov@328:
tikhomirov@328: public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
tikhomirov@328: HgChangeset cs = transform.handle(revisionNumber, nodeid, cset);
tikhomirov@328: populate(cs.clone());
tikhomirov@328: }
tikhomirov@328:
tikhomirov@628: public Nodeid changesetRevision() throws HgRuntimeException {
tikhomirov@328: if (changesetRevision == null) {
tikhomirov@328: changesetRevision = getRevision(historyNode.changeset);
tikhomirov@328: }
tikhomirov@328: return changesetRevision;
tikhomirov@328: }
tikhomirov@328:
tikhomirov@628: public Pair parentRevisions() throws HgRuntimeException {
tikhomirov@328: if (parentRevisions == null) {
tikhomirov@328: HistoryNode p;
tikhomirov@328: final Nodeid p1, p2;
tikhomirov@328: if ((p = historyNode.parent1) != null) {
tikhomirov@328: p1 = getRevision(p.changeset);
tikhomirov@328: } else {
tikhomirov@328: p1 = Nodeid.NULL;;
tikhomirov@328: }
tikhomirov@328: if ((p = historyNode.parent2) != null) {
tikhomirov@328: p2 = getRevision(p.changeset);
tikhomirov@328: } else {
tikhomirov@328: p2 = Nodeid.NULL;
tikhomirov@328: }
tikhomirov@328: parentRevisions = new Pair(p1, p2);
tikhomirov@328: }
tikhomirov@328: return parentRevisions;
tikhomirov@328: }
tikhomirov@328:
tikhomirov@628: public Collection childRevisions() throws HgRuntimeException {
tikhomirov@328: if (childRevisions != null) {
tikhomirov@328: return childRevisions;
tikhomirov@328: }
tikhomirov@328: if (historyNode.children == null) {
tikhomirov@328: childRevisions = Collections.emptyList();
tikhomirov@328: } else {
tikhomirov@328: ArrayList rv = new ArrayList(historyNode.children.size());
tikhomirov@328: for (HistoryNode hn : historyNode.children) {
tikhomirov@328: rv.add(getRevision(hn.changeset));
tikhomirov@328: }
tikhomirov@328: childRevisions = Collections.unmodifiableList(rv);
tikhomirov@328: }
tikhomirov@328: return childRevisions;
tikhomirov@328: }
tikhomirov@328:
tikhomirov@328: // reading nodeid involves reading index only, guess, can afford not to optimize multiple reads
tikhomirov@628: private Nodeid getRevision(int changelogRevisionNumber) throws HgRuntimeException {
tikhomirov@423: // TODO post-1.0 pipe through pool
tikhomirov@328: HgChangeset cs = cachedChangesets.get(changelogRevisionNumber);
tikhomirov@328: if (cs != null) {
tikhomirov@328: return cs.getNodeid();
tikhomirov@328: } else {
tikhomirov@403: return repo.getChangelog().getRevision(changelogRevisionNumber);
tikhomirov@328: }
tikhomirov@328: }
tikhomirov@328: }
tikhomirov@64: }