tikhomirov@64: /* tikhomirov@427: s * Copyright (c) 2011-2012 TMate Software Ltd tikhomirov@64: * tikhomirov@64: * This program is free software; you can redistribute it and/or modify tikhomirov@64: * it under the terms of the GNU General Public License as published by tikhomirov@64: * the Free Software Foundation; version 2 of the License. tikhomirov@64: * tikhomirov@64: * This program is distributed in the hope that it will be useful, tikhomirov@64: * but WITHOUT ANY WARRANTY; without even the implied warranty of tikhomirov@64: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the tikhomirov@64: * GNU General Public License for more details. tikhomirov@64: * tikhomirov@64: * For information on how to redistribute this software under tikhomirov@64: * the terms of a license other than GNU General Public License tikhomirov@102: * contact TMate Software at support@hg4j.com tikhomirov@64: */ tikhomirov@64: package org.tmatesoft.hg.core; tikhomirov@64: tikhomirov@74: import static org.tmatesoft.hg.repo.HgRepository.TIP; tikhomirov@456: import static org.tmatesoft.hg.util.LogFacility.Severity.Error; tikhomirov@64: tikhomirov@328: import java.util.ArrayList; tikhomirov@328: import java.util.Arrays; tikhomirov@64: import java.util.Calendar; tikhomirov@328: import java.util.Collection; tikhomirov@64: import java.util.Collections; tikhomirov@64: import java.util.ConcurrentModificationException; tikhomirov@510: import java.util.Iterator; tikhomirov@64: import java.util.LinkedList; tikhomirov@64: import java.util.List; tikhomirov@510: import java.util.ListIterator; tikhomirov@64: import java.util.Set; tikhomirov@64: import java.util.TreeSet; tikhomirov@64: tikhomirov@328: import org.tmatesoft.hg.internal.IntMap; tikhomirov@328: import org.tmatesoft.hg.internal.IntVector; tikhomirov@215: import org.tmatesoft.hg.repo.HgChangelog; tikhomirov@154: import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; tikhomirov@80: import org.tmatesoft.hg.repo.HgDataFile; tikhomirov@423: import org.tmatesoft.hg.repo.HgInvalidControlFileException; tikhomirov@457: import org.tmatesoft.hg.repo.HgInvalidRevisionException; tikhomirov@423: import org.tmatesoft.hg.repo.HgInvalidStateException; tikhomirov@456: import org.tmatesoft.hg.repo.HgParentChildMap; tikhomirov@74: import org.tmatesoft.hg.repo.HgRepository; tikhomirov@423: import org.tmatesoft.hg.repo.HgRuntimeException; tikhomirov@328: import org.tmatesoft.hg.repo.HgStatusCollector; tikhomirov@328: import org.tmatesoft.hg.util.CancelSupport; tikhomirov@157: import org.tmatesoft.hg.util.CancelledException; tikhomirov@328: import org.tmatesoft.hg.util.Pair; tikhomirov@133: import org.tmatesoft.hg.util.Path; tikhomirov@215: import org.tmatesoft.hg.util.ProgressSupport; tikhomirov@64: tikhomirov@64: tikhomirov@64: /** tikhomirov@131: * Access to changelog, 'hg log' command counterpart. tikhomirov@131: * tikhomirov@64: *
tikhomirov@131:  * Usage:
tikhomirov@70:  *   new LogCommand().limit(20).branch("maintenance-2.1").user("me").execute(new MyHandler());
tikhomirov@64:  * 
tikhomirov@131: * Not thread-safe (each thread has to use own {@link HgLogCommand} instance). tikhomirov@64: * tikhomirov@64: * @author Artem Tikhomirov tikhomirov@64: * @author TMate Software Ltd. tikhomirov@64: */ tikhomirov@215: public class HgLogCommand extends HgAbstractCommand implements HgChangelog.Inspector { tikhomirov@64: tikhomirov@64: private final HgRepository repo; tikhomirov@64: private Set users; tikhomirov@64: private Set branches; tikhomirov@64: private int limit = 0, count = 0; tikhomirov@64: private int startRev = 0, endRev = TIP; tikhomirov@64: private Calendar date; tikhomirov@77: private Path file; tikhomirov@80: private boolean followHistory; // makes sense only when file != null tikhomirov@193: private ChangesetTransformer csetTransform; tikhomirov@432: private HgParentChildMap parentHelper; tikhomirov@80: tikhomirov@131: public HgLogCommand(HgRepository hgRepo) { tikhomirov@107: repo = hgRepo; tikhomirov@64: } tikhomirov@64: tikhomirov@64: /** tikhomirov@148: * Limit search to specified user. Multiple user names may be specified. Once set, user names can't be tikhomirov@148: * cleared, use new command instance in such cases. tikhomirov@64: * @param user - full or partial name of the user, case-insensitive, non-null. tikhomirov@64: * @return this instance for convenience tikhomirov@148: * @throws IllegalArgumentException when argument is null tikhomirov@64: */ tikhomirov@131: public HgLogCommand user(String user) { tikhomirov@64: if (user == null) { tikhomirov@64: throw new IllegalArgumentException(); tikhomirov@64: } tikhomirov@64: if (users == null) { tikhomirov@64: users = new TreeSet(); tikhomirov@64: } tikhomirov@64: users.add(user.toLowerCase()); tikhomirov@64: return this; tikhomirov@64: } tikhomirov@64: tikhomirov@64: /** tikhomirov@64: * Limit search to specified branch. Multiple branch specification possible (changeset from any of these tikhomirov@148: * would be included in result). If unspecified, all branches are considered. There's no way to clean branch selection tikhomirov@148: * once set, create fresh new command instead. tikhomirov@64: * @param branch - branch name, case-sensitive, non-null. tikhomirov@64: * @return this instance for convenience tikhomirov@148: * @throws IllegalArgumentException when branch argument is null tikhomirov@64: */ tikhomirov@131: public HgLogCommand branch(String branch) { tikhomirov@64: if (branch == null) { tikhomirov@64: throw new IllegalArgumentException(); tikhomirov@64: } tikhomirov@64: if (branches == null) { tikhomirov@64: branches = new TreeSet(); tikhomirov@64: } tikhomirov@64: branches.add(branch); tikhomirov@64: return this; tikhomirov@64: } tikhomirov@64: tikhomirov@64: // limit search to specific date tikhomirov@64: // multiple? tikhomirov@131: public HgLogCommand date(Calendar date) { tikhomirov@64: this.date = date; tikhomirov@418: // TODO post-1.0 implement tikhomirov@64: // isSet(field) - false => don't use in detection of 'same date' tikhomirov@64: throw HgRepository.notImplemented(); tikhomirov@64: } tikhomirov@64: tikhomirov@64: /** tikhomirov@64: * tikhomirov@64: * @param num - number of changeset to produce. Pass 0 to clear the limit. tikhomirov@64: * @return this instance for convenience tikhomirov@64: */ tikhomirov@131: public HgLogCommand limit(int num) { tikhomirov@64: limit = num; tikhomirov@64: return this; tikhomirov@64: } tikhomirov@64: tikhomirov@64: /** tikhomirov@64: * Limit to specified subset of Changelog, [min(rev1,rev2), max(rev1,rev2)], inclusive. tikhomirov@64: * Revision may be specified with {@link HgRepository#TIP} tikhomirov@427: * tikhomirov@427: * @param rev1 - local index of start changeset revision tikhomirov@427: * @param rev2 - index of end changeset revision tikhomirov@64: * @return this instance for convenience tikhomirov@64: */ tikhomirov@131: public HgLogCommand range(int rev1, int rev2) { tikhomirov@64: if (rev1 != TIP && rev2 != TIP) { tikhomirov@64: startRev = rev2 < rev1 ? rev2 : rev1; tikhomirov@64: endRev = startRev == rev2 ? rev1 : rev2; tikhomirov@64: } else if (rev1 == TIP && rev2 != TIP) { tikhomirov@64: startRev = rev2; tikhomirov@64: endRev = rev1; tikhomirov@64: } else { tikhomirov@64: startRev = rev1; tikhomirov@64: endRev = rev2; tikhomirov@64: } tikhomirov@64: return this; tikhomirov@64: } tikhomirov@64: tikhomirov@77: /** tikhomirov@253: * Select specific changeset tikhomirov@253: * tikhomirov@253: * @param nid changeset revision tikhomirov@253: * @return this for convenience tikhomirov@427: * @throws HgBadArgumentException if failed to find supplied changeset revision tikhomirov@253: */ tikhomirov@427: public HgLogCommand changeset(Nodeid nid) throws HgBadArgumentException { tikhomirov@253: // XXX perhaps, shall support multiple (...) arguments and extend #execute to handle not only range, but also set of revisions. tikhomirov@427: try { tikhomirov@427: final int csetRevIndex = repo.getChangelog().getRevisionIndex(nid); tikhomirov@427: return range(csetRevIndex, csetRevIndex); tikhomirov@457: } catch (HgInvalidRevisionException ex) { tikhomirov@427: throw new HgBadArgumentException("Can't find revision", ex).setRevision(nid); tikhomirov@427: } tikhomirov@253: } tikhomirov@253: tikhomirov@253: /** tikhomirov@77: * Visit history of a given file only. tikhomirov@77: * @param file path relative to repository root. Pass null to reset. tikhomirov@80: * @param followCopyRename true to report changesets of the original file(-s), if copy/rename ever occured to the file. tikhomirov@77: */ tikhomirov@131: public HgLogCommand file(Path file, boolean followCopyRename) { tikhomirov@77: // multiple? Bad idea, would need to include extra method into Handler to tell start of next file tikhomirov@77: this.file = file; tikhomirov@80: followHistory = followCopyRename; tikhomirov@77: return this; tikhomirov@64: } tikhomirov@142: tikhomirov@142: /** tikhomirov@142: * Handy analog of {@link #file(Path, boolean)} when clients' paths come from filesystem and need conversion to repository's tikhomirov@142: */ tikhomirov@142: public HgLogCommand file(String file, boolean followCopyRename) { tikhomirov@142: return file(Path.create(repo.getToRepoPathHelper().rewrite(file)), followCopyRename); tikhomirov@142: } tikhomirov@64: tikhomirov@64: /** tikhomirov@419: * Similar to {@link #execute(HgChangesetHandler)}, collects and return result as a list. tikhomirov@427: * tikhomirov@427: * @see #execute(HgChangesetHandler) tikhomirov@427: * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state tikhomirov@64: */ tikhomirov@396: public List execute() throws HgException { tikhomirov@64: CollectHandler collector = new CollectHandler(); tikhomirov@215: try { tikhomirov@215: execute(collector); tikhomirov@423: } catch (HgCallbackTargetException ex) { tikhomirov@423: // see below for CanceledException tikhomirov@423: HgInvalidStateException t = new HgInvalidStateException("Internal error"); tikhomirov@423: t.initCause(ex); tikhomirov@423: throw t; tikhomirov@396: } catch (CancelledException ex) { tikhomirov@215: // can't happen as long as our CollectHandler doesn't throw any exception tikhomirov@423: HgInvalidStateException t = new HgInvalidStateException("Internal error"); tikhomirov@423: t.initCause(ex); tikhomirov@423: throw t; tikhomirov@215: } tikhomirov@64: return collector.getChanges(); tikhomirov@64: } tikhomirov@64: tikhomirov@64: /** tikhomirov@402: * Iterate over range of changesets configured in the command. tikhomirov@64: * tikhomirov@205: * @param handler callback to process changesets. tikhomirov@427: * @throws HgCallbackTargetException propagated exception from the handler tikhomirov@427: * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state tikhomirov@380: * @throws CancelledException if execution of the command was cancelled tikhomirov@64: * @throws IllegalArgumentException when inspector argument is null tikhomirov@64: * @throws ConcurrentModificationException if this log command instance is already running tikhomirov@64: */ tikhomirov@370: public void execute(HgChangesetHandler handler) throws HgCallbackTargetException, HgException, CancelledException { tikhomirov@64: if (handler == null) { tikhomirov@64: throw new IllegalArgumentException(); tikhomirov@64: } tikhomirov@193: if (csetTransform != null) { tikhomirov@64: throw new ConcurrentModificationException(); tikhomirov@64: } tikhomirov@215: final ProgressSupport progressHelper = getProgressSupport(handler); tikhomirov@64: try { tikhomirov@64: count = 0; tikhomirov@432: HgParentChildMap pw = getParentHelper(file == null); // leave it uninitialized unless we iterate whole repo tikhomirov@193: // ChangesetTransfrom creates a blank PathPool, and #file(String, boolean) above tikhomirov@193: // may utilize it as well. CommandContext? How about StatusCollector there as well? tikhomirov@322: csetTransform = new ChangesetTransformer(repo, handler, pw, progressHelper, getCancelSupport(handler, true)); tikhomirov@77: if (file == null) { tikhomirov@215: progressHelper.start(endRev - startRev + 1); tikhomirov@77: repo.getChangelog().range(startRev, endRev, this); tikhomirov@215: csetTransform.checkFailure(); tikhomirov@77: } else { tikhomirov@215: progressHelper.start(-1/*XXX enum const, or a dedicated method startUnspecified(). How about startAtLeast(int)?*/); tikhomirov@80: HgDataFile fileNode = repo.getFileNode(file); tikhomirov@427: if (!fileNode.exists()) { tikhomirov@427: throw new HgPathNotFoundException(String.format("File %s not found in the repository", file), file); tikhomirov@427: } tikhomirov@509: // FIXME startRev and endRev ARE CHANGESET REVISIONS, not that of FILE!!! tikhomirov@80: fileNode.history(startRev, endRev, this); tikhomirov@215: csetTransform.checkFailure(); tikhomirov@126: if (fileNode.isCopy()) { tikhomirov@80: // even if we do not follow history, report file rename tikhomirov@80: do { tikhomirov@427: if (handler instanceof HgChangesetHandler.WithCopyHistory) { tikhomirov@415: HgFileRevision src = new HgFileRevision(repo, fileNode.getCopySourceRevision(), null, fileNode.getCopySourceName()); tikhomirov@415: HgFileRevision dst = new HgFileRevision(repo, fileNode.getRevision(0), null, fileNode.getPath(), src.getPath()); tikhomirov@427: ((HgChangesetHandler.WithCopyHistory) handler).copy(src, dst); tikhomirov@126: } tikhomirov@80: if (limit > 0 && count >= limit) { tikhomirov@80: // if limit reach, follow is useless. tikhomirov@80: break; tikhomirov@80: } tikhomirov@80: if (followHistory) { tikhomirov@126: fileNode = repo.getFileNode(fileNode.getCopySourceName()); tikhomirov@80: fileNode.history(this); tikhomirov@215: csetTransform.checkFailure(); tikhomirov@80: } tikhomirov@80: } while (followHistory && fileNode.isCopy()); tikhomirov@80: } tikhomirov@77: } tikhomirov@427: } catch (HgRuntimeException ex) { tikhomirov@427: throw new HgLibraryFailureException(ex); tikhomirov@64: } finally { tikhomirov@193: csetTransform = null; tikhomirov@215: progressHelper.done(); tikhomirov@64: } tikhomirov@64: } tikhomirov@328: tikhomirov@370: /** tikhomirov@402: * Tree-wise iteration of a file history, with handy access to parent-child relations between changesets. tikhomirov@402: * tikhomirov@402: * @param handler callback to process changesets. tikhomirov@427: * @throws HgCallbackTargetException propagated exception from the handler tikhomirov@427: * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state tikhomirov@380: * @throws CancelledException if execution of the command was cancelled tikhomirov@402: * @throws IllegalArgumentException if command is not satisfied with its arguments tikhomirov@402: * @throws ConcurrentModificationException if this log command instance is already running tikhomirov@370: */ tikhomirov@370: public void execute(HgChangesetTreeHandler handler) throws HgCallbackTargetException, HgException, CancelledException { tikhomirov@328: if (handler == null) { tikhomirov@328: throw new IllegalArgumentException(); tikhomirov@328: } tikhomirov@328: if (csetTransform != null) { tikhomirov@328: throw new ConcurrentModificationException(); tikhomirov@328: } tikhomirov@328: if (file == null) { tikhomirov@328: throw new IllegalArgumentException("History tree is supported for files only (at least now), please specify file"); tikhomirov@328: } tikhomirov@328: final ProgressSupport progressHelper = getProgressSupport(handler); tikhomirov@328: final CancelSupport cancelHelper = getCancelSupport(handler, true); tikhomirov@507: tikhomirov@508: // builds tree of nodes according to parents in file's revlog tikhomirov@508: final TreeBuildInspector treeBuildInspector = new TreeBuildInspector(followHistory); tikhomirov@509: // we iterate separate histories of each filename, need to connect tikhomirov@509: // last node of historyA with first node of historyB (A renamed to B case) tikhomirov@509: // to make overall history smooth. tikhomirov@509: HistoryNode lastFromPrevIteration = null; tikhomirov@509: tikhomirov@509: final int CACHE_CSET_IN_ADVANCE_THRESHOLD = 100; /* XXX is it really worth it? */ tikhomirov@509: ElementImpl ei = null; tikhomirov@508: tikhomirov@510: // renamed files in the queue are placed with respect to #iterateDirection tikhomirov@510: // i.e. if we iterate from new to old, recent filenames come first tikhomirov@508: LinkedList> fileRenamesQueue = buildFileRenamesQueue(); tikhomirov@507: progressHelper.start(4 * fileRenamesQueue.size()); tikhomirov@507: do { tikhomirov@510: tikhomirov@510: Pair renameInfo = fileRenamesQueue.removeFirst(); tikhomirov@507: cancelHelper.checkCancelled(); tikhomirov@508: HgDataFile fileNode = renameInfo.first(); tikhomirov@508: Nodeid fileLastRevToVisit = null; tikhomirov@508: if (followHistory) { tikhomirov@508: fileLastRevToVisit = renameInfo.second(); tikhomirov@508: if (fileLastRevToVisit == null) { tikhomirov@510: // it's either first or last item in the queue, depending on iteration order tikhomirov@510: assert fileRenamesQueue.isEmpty() || /*awful way to find out it's first iteration*/ lastFromPrevIteration == null; tikhomirov@508: // TODO subject to dedicated method either in HgRepository (getWorkingCopyParentRevisionIndex) tikhomirov@508: // or in the HgDataFile (getWorkingCopyOriginRevision) tikhomirov@508: Nodeid wdParentChangeset = repo.getWorkingCopyParents().first(); tikhomirov@508: if (!wdParentChangeset.isNull()) { tikhomirov@508: int wdParentRevIndex = repo.getChangelog().getRevisionIndex(wdParentChangeset); tikhomirov@508: fileLastRevToVisit = repo.getManifest().getFileRevision(wdParentRevIndex, fileNode.getPath()); tikhomirov@508: } tikhomirov@508: // else fall-through, assume lastRevision() is ok here tikhomirov@508: } tikhomirov@508: } tikhomirov@508: int fileLastRevIndexToVisit = fileLastRevToVisit == null ? fileNode.getLastRevision() : fileNode.getRevisionIndex(fileLastRevToVisit); tikhomirov@509: final List changeHistory = treeBuildInspector.go(fileNode, fileLastRevIndexToVisit); tikhomirov@509: assert changeHistory.size() > 0; tikhomirov@328: progressHelper.worked(1); tikhomirov@423: cancelHelper.checkCancelled(); tikhomirov@507: final ProgressSupport ph2; tikhomirov@509: if (ei == null) { tikhomirov@509: // when follow is true, changeHistory.size() of the first revision might be quite short tikhomirov@509: // (e.g. bad fname recognized soon), hence ensure at least cache size at once tikhomirov@509: ei = new ElementImpl(Math.max(CACHE_CSET_IN_ADVANCE_THRESHOLD, changeHistory.size())); tikhomirov@509: } tikhomirov@509: if (changeHistory.size() < CACHE_CSET_IN_ADVANCE_THRESHOLD ) { tikhomirov@509: int[] commitRevisions = treeBuildInspector.getCommitRevisions(); tikhomirov@509: assert changeHistory.size() == commitRevisions.length; tikhomirov@508: // read bunch of changesets at once and cache 'em tikhomirov@507: ei.initTransform(); tikhomirov@508: repo.getChangelog().range(ei, commitRevisions); tikhomirov@507: progressHelper.worked(1); tikhomirov@507: ph2 = new ProgressSupport.Sub(progressHelper, 2); tikhomirov@507: } else { tikhomirov@507: ph2 = new ProgressSupport.Sub(progressHelper, 3); tikhomirov@507: } tikhomirov@509: ph2.start(changeHistory.size()); tikhomirov@509: if (lastFromPrevIteration != null) { tikhomirov@510: if (iterateDirection == IterateDirection.FromOldToNew) { tikhomirov@510: // forward, from old to new: tikhomirov@510: // A(0..n) -> B(0..m). First, report A(0)..A(n-1) tikhomirov@510: // then A(n).bind(B(0)) tikhomirov@510: HistoryNode oldestOfTheNextChunk = changeHistory.get(0); tikhomirov@510: lastFromPrevIteration.bindChild(oldestOfTheNextChunk); tikhomirov@510: changeHistory.add(0, lastFromPrevIteration); tikhomirov@510: } else { tikhomirov@510: assert iterateDirection == IterateDirection.FromNewToOld; tikhomirov@510: // A renamed to B. A(0..n) -> B(0..m). tikhomirov@510: // First, report B(m), B(m-1)...B(1), then A(n).bind(B(0)) tikhomirov@510: HistoryNode newestOfNextChunk = changeHistory.get(changeHistory.size() - 1); // A(n) tikhomirov@510: newestOfNextChunk.bindChild(lastFromPrevIteration); tikhomirov@510: changeHistory.add(lastFromPrevIteration); tikhomirov@510: } tikhomirov@509: } tikhomirov@509: if (!fileRenamesQueue.isEmpty()) { tikhomirov@510: if (iterateDirection == IterateDirection.FromOldToNew) { tikhomirov@510: // save newest, and exclude it from this iteration (postpone for next) tikhomirov@510: lastFromPrevIteration = changeHistory.remove(changeHistory.size()-1); tikhomirov@510: } else { tikhomirov@510: assert iterateDirection == IterateDirection.FromNewToOld; tikhomirov@510: // save oldest, and exclude it from thi iteration (postpone for next) tikhomirov@510: lastFromPrevIteration = changeHistory.remove(0); tikhomirov@510: } tikhomirov@509: } else { tikhomirov@509: lastFromPrevIteration = null; // just for the sake of no references to old items tikhomirov@509: } tikhomirov@509: // XXX shall sort changeHistory according to changeset numbers? tikhomirov@510: Iterator it; tikhomirov@510: if (iterateDirection == IterateDirection.FromOldToNew) { tikhomirov@510: it = changeHistory.listIterator(); tikhomirov@510: } else { tikhomirov@510: assert iterateDirection == IterateDirection.FromNewToOld; tikhomirov@510: it = new ReverseIterator(changeHistory); tikhomirov@510: } tikhomirov@510: while(it.hasNext()) { tikhomirov@510: HistoryNode n = it.next(); tikhomirov@507: handler.treeElement(ei.init(n)); tikhomirov@507: ph2.worked(1); tikhomirov@507: cancelHelper.checkCancelled(); tikhomirov@507: } tikhomirov@507: } while (!fileRenamesQueue.isEmpty()); tikhomirov@328: progressHelper.done(); tikhomirov@328: } tikhomirov@328: tikhomirov@510: private IterateDirection iterateDirection = IterateDirection.FromOldToNew; tikhomirov@510: tikhomirov@510: private static class ReverseIterator implements Iterator { tikhomirov@510: private final ListIterator listIterator; tikhomirov@510: tikhomirov@510: public ReverseIterator(List list) { tikhomirov@510: listIterator = list.listIterator(list.size()); tikhomirov@510: } tikhomirov@510: tikhomirov@510: public boolean hasNext() { tikhomirov@510: return listIterator.hasPrevious(); tikhomirov@510: } tikhomirov@510: public E next() { tikhomirov@510: return listIterator.previous(); tikhomirov@510: } tikhomirov@510: public void remove() { tikhomirov@510: listIterator.remove(); tikhomirov@510: } tikhomirov@510: } tikhomirov@510: tikhomirov@507: /** tikhomirov@508: * Follows file renames and build a list of all corresponding file nodes and revisions they were tikhomirov@508: * copied/renamed/branched at (IOW, their latest revision to look at). tikhomirov@508: * tikhomirov@508: * If {@link #followHistory} is false, the list contains one element only, tikhomirov@508: * file node with the name of the file as it was specified by the user. tikhomirov@508: * tikhomirov@508: * For the most recent file revision is null. tikhomirov@508: * tikhomirov@508: * TODO may use HgFileRevision (after some refactoring to accept HgDataFile and Nodeid) instead of Pair tikhomirov@508: * and possibly reuse this functionality tikhomirov@507: * tikhomirov@510: * @return list of file renames, ordered with respect to {@link #iterateDirection} tikhomirov@507: */ tikhomirov@508: private LinkedList> buildFileRenamesQueue() { tikhomirov@508: LinkedList> rv = new LinkedList>(); tikhomirov@507: if (!followHistory) { tikhomirov@508: rv.add(new Pair(repo.getFileNode(file), null)); tikhomirov@507: return rv; tikhomirov@507: } tikhomirov@507: Path fp = file; tikhomirov@508: Nodeid copyRev = null; tikhomirov@507: boolean isCopy; tikhomirov@507: do { tikhomirov@508: HgDataFile fileNode = repo.getFileNode(fp); tikhomirov@510: Pair p = new Pair(fileNode, copyRev); tikhomirov@510: if (iterateDirection == IterateDirection.FromOldToNew) { tikhomirov@510: rv.addFirst(p); tikhomirov@510: } else { tikhomirov@510: assert iterateDirection == IterateDirection.FromNewToOld; tikhomirov@510: rv.addLast(p); tikhomirov@510: } tikhomirov@507: if (isCopy = fileNode.isCopy()) { tikhomirov@507: fp = fileNode.getCopySourceName(); tikhomirov@508: copyRev = fileNode.getCopySourceRevision(); tikhomirov@507: } tikhomirov@507: } while (isCopy); tikhomirov@507: return rv; tikhomirov@507: } tikhomirov@508: tikhomirov@508: private static class TreeBuildInspector implements HgChangelog.ParentInspector, HgChangelog.RevisionInspector { tikhomirov@508: private final boolean followAncestry; tikhomirov@508: tikhomirov@508: private HistoryNode[] completeHistory; tikhomirov@508: private int[] commitRevisions; tikhomirov@509: private List resultHistory; tikhomirov@508: tikhomirov@508: TreeBuildInspector(boolean _followAncestry) { tikhomirov@508: followAncestry = _followAncestry; tikhomirov@508: } tikhomirov@508: tikhomirov@508: public void next(int revisionNumber, Nodeid revision, int linkedRevision) { tikhomirov@508: commitRevisions[revisionNumber] = linkedRevision; tikhomirov@508: } tikhomirov@508: tikhomirov@508: public void next(int revisionNumber, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2) { tikhomirov@508: HistoryNode p1 = null, p2 = null; tikhomirov@508: if (parent1 != -1) { tikhomirov@508: p1 = completeHistory[parent1]; tikhomirov@508: } tikhomirov@508: if (parent2!= -1) { tikhomirov@508: p2 = completeHistory[parent2]; tikhomirov@508: } tikhomirov@508: completeHistory[revisionNumber] = new HistoryNode(commitRevisions[revisionNumber], revision, p1, p2); tikhomirov@508: } tikhomirov@508: tikhomirov@508: /** tikhomirov@508: * Builds history of file changes (in natural order, from oldest to newest) up to (and including) file revision specified. tikhomirov@508: * If {@link TreeBuildInspector} follows ancestry, only elements that are on the line of ancestry of the revision at tikhomirov@508: * lastRevisionIndex would be included. tikhomirov@509: * tikhomirov@509: * @return list of history elements, from oldest to newest. In case {@link #followAncestry} is true, the list tikhomirov@509: * is modifiable (to further augment with last/first elements of renamed file histories) tikhomirov@508: */ tikhomirov@509: List go(HgDataFile fileNode, int lastRevisionIndex) throws HgInvalidControlFileException { tikhomirov@509: resultHistory = null; tikhomirov@508: completeHistory = new HistoryNode[lastRevisionIndex+1]; tikhomirov@508: commitRevisions = new int[completeHistory.length]; tikhomirov@508: fileNode.indexWalk(0, lastRevisionIndex, this); tikhomirov@508: if (!followAncestry) { tikhomirov@509: // in case when ancestor not followed, it's safe to return unmodifiable list tikhomirov@509: resultHistory = Arrays.asList(completeHistory); tikhomirov@509: completeHistory = null; tikhomirov@509: // keep commitRevisions initialized, no need to recalculate them tikhomirov@509: // as they correspond 1:1 to resultHistory tikhomirov@509: return resultHistory; tikhomirov@508: } tikhomirov@508: /* tikhomirov@509: * Changesets, newest at the top: tikhomirov@508: * o <-- cset from working dir parent (as in dirstate), file not changed (file revision recorded points to that from A) tikhomirov@508: * | x <-- revision with file changed (B') tikhomirov@508: * x / <-- revision with file changed (A) tikhomirov@508: * | x <-- revision with file changed (B) tikhomirov@508: * |/ tikhomirov@508: * o <-- another changeset, where file wasn't changed tikhomirov@508: * | tikhomirov@508: * x <-- revision with file changed (C) tikhomirov@508: * tikhomirov@508: * File history: B', A, B, C tikhomirov@508: * tikhomirov@508: * When "follow", SHALL NOT report B and B', but A and C tikhomirov@508: */ tikhomirov@508: // strippedHistory: only those HistoryNodes from completeHistory that are on the same tikhomirov@508: // line of descendant, in order from older to newer tikhomirov@508: LinkedList strippedHistoryList = new LinkedList(); tikhomirov@508: LinkedList queue = new LinkedList(); tikhomirov@508: // look for ancestors of the selected history node tikhomirov@508: queue.add(completeHistory[lastRevisionIndex]); tikhomirov@508: do { tikhomirov@508: HistoryNode withFileChange = queue.removeFirst(); tikhomirov@508: if (withFileChange.children != null) { tikhomirov@508: withFileChange.children.retainAll(strippedHistoryList); tikhomirov@508: } tikhomirov@508: strippedHistoryList.addFirst(withFileChange); tikhomirov@508: if (withFileChange.parent1 != null) { tikhomirov@508: queue.addLast(withFileChange.parent1); tikhomirov@508: } tikhomirov@508: if (withFileChange.parent2 != null) { tikhomirov@508: queue.addLast(withFileChange.parent2); tikhomirov@508: } tikhomirov@508: } while (!queue.isEmpty()); tikhomirov@508: completeHistory = null; tikhomirov@508: commitRevisions = null; tikhomirov@508: // collected values are no longer valid - shall tikhomirov@508: // strip off elements for missing HistoryNodes, but it's easier just to re-create the array tikhomirov@509: // from resultHistory later, once (and if) needed tikhomirov@509: return resultHistory = strippedHistoryList; tikhomirov@508: } tikhomirov@508: tikhomirov@508: /** tikhomirov@508: * handy access to all HistoryNode[i].changeset values tikhomirov@508: */ tikhomirov@508: int[] getCommitRevisions() { tikhomirov@509: if (commitRevisions == null) { tikhomirov@509: commitRevisions = new int[resultHistory.size()]; tikhomirov@509: int i = 0; tikhomirov@509: for (HistoryNode n : resultHistory) { tikhomirov@509: commitRevisions[i++] = n.changeset; tikhomirov@509: } tikhomirov@509: } tikhomirov@508: return commitRevisions; tikhomirov@508: } tikhomirov@508: }; tikhomirov@508: tikhomirov@507: tikhomirov@64: // tikhomirov@64: tikhomirov@154: public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { tikhomirov@64: if (limit > 0 && count >= limit) { tikhomirov@64: return; tikhomirov@64: } tikhomirov@64: if (branches != null && !branches.contains(cset.branch())) { tikhomirov@64: return; tikhomirov@64: } tikhomirov@64: if (users != null) { tikhomirov@64: String csetUser = cset.user().toLowerCase(); tikhomirov@64: boolean found = false; tikhomirov@64: for (String u : users) { tikhomirov@64: if (csetUser.indexOf(u) != -1) { tikhomirov@64: found = true; tikhomirov@64: break; tikhomirov@64: } tikhomirov@64: } tikhomirov@64: if (!found) { tikhomirov@64: return; tikhomirov@64: } tikhomirov@64: } tikhomirov@64: if (date != null) { tikhomirov@418: // TODO post-1.0 implement date support for log tikhomirov@64: } tikhomirov@64: count++; tikhomirov@193: csetTransform.next(revisionNumber, nodeid, cset); tikhomirov@64: } tikhomirov@195: tikhomirov@432: private HgParentChildMap getParentHelper(boolean create) throws HgInvalidControlFileException { tikhomirov@328: if (parentHelper == null && create) { tikhomirov@432: parentHelper = new HgParentChildMap(repo.getChangelog()); tikhomirov@195: parentHelper.init(); tikhomirov@195: } tikhomirov@195: return parentHelper; tikhomirov@195: } tikhomirov@195: tikhomirov@64: tikhomirov@205: public static class CollectHandler implements HgChangesetHandler { tikhomirov@129: private final List result = new LinkedList(); tikhomirov@64: tikhomirov@129: public List getChanges() { tikhomirov@64: return Collections.unmodifiableList(result); tikhomirov@64: } tikhomirov@64: tikhomirov@427: public void cset(HgChangeset changeset) { tikhomirov@64: result.add(changeset.clone()); tikhomirov@64: } tikhomirov@64: } tikhomirov@328: tikhomirov@328: private static class HistoryNode { tikhomirov@328: final int changeset; tikhomirov@328: final Nodeid fileRevision; tikhomirov@509: HistoryNode parent1; // there's special case when we can alter it, see #bindChild() tikhomirov@509: final HistoryNode parent2; tikhomirov@328: List children; tikhomirov@328: tikhomirov@328: HistoryNode(int cs, Nodeid revision, HistoryNode p1, HistoryNode p2) { tikhomirov@328: changeset = cs; tikhomirov@328: fileRevision = revision; tikhomirov@328: parent1 = p1; tikhomirov@328: parent2 = p2; tikhomirov@328: if (p1 != null) { tikhomirov@328: p1.addChild(this); tikhomirov@328: } tikhomirov@328: if (p2 != null) { tikhomirov@328: p2.addChild(this); tikhomirov@328: } tikhomirov@328: } tikhomirov@328: tikhomirov@509: private void addChild(HistoryNode child) { tikhomirov@328: if (children == null) { tikhomirov@328: children = new ArrayList(2); tikhomirov@328: } tikhomirov@328: children.add(child); tikhomirov@328: } tikhomirov@509: tikhomirov@509: /** tikhomirov@509: * method to merge two history chunks for renamed file so that tikhomirov@509: * this node's history continues with that of child tikhomirov@509: * @param child tikhomirov@509: */ tikhomirov@509: public void bindChild(HistoryNode child) { tikhomirov@509: assert child.parent1 == null && child.parent2 == null; tikhomirov@509: // for the last element in history empty children are by construction: tikhomirov@509: // we don't iterate further than last element of interest in TreeBuildInspector#go tikhomirov@509: assert children == null || children.isEmpty(); tikhomirov@509: child.parent1 = this; tikhomirov@509: addChild(child); tikhomirov@509: } tikhomirov@328: } tikhomirov@328: tikhomirov@328: private class ElementImpl implements HgChangesetTreeHandler.TreeElement, HgChangelog.Inspector { tikhomirov@328: private HistoryNode historyNode; tikhomirov@328: private Pair parents; tikhomirov@328: private List children; tikhomirov@328: private IntMap cachedChangesets; tikhomirov@328: private ChangesetTransformer.Transformation transform; tikhomirov@328: private Nodeid changesetRevision; tikhomirov@328: private Pair parentRevisions; tikhomirov@328: private List childRevisions; tikhomirov@328: tikhomirov@328: public ElementImpl(int total) { tikhomirov@328: cachedChangesets = new IntMap(total); tikhomirov@328: } tikhomirov@328: tikhomirov@328: ElementImpl init(HistoryNode n) { tikhomirov@328: historyNode = n; tikhomirov@328: parents = null; tikhomirov@328: children = null; tikhomirov@328: changesetRevision = null; tikhomirov@328: parentRevisions = null; tikhomirov@328: childRevisions = null; tikhomirov@328: return this; tikhomirov@328: } tikhomirov@328: tikhomirov@328: public Nodeid fileRevision() { tikhomirov@328: return historyNode.fileRevision; tikhomirov@328: } tikhomirov@328: tikhomirov@423: public HgChangeset changeset() { tikhomirov@328: return get(historyNode.changeset)[0]; tikhomirov@328: } tikhomirov@328: tikhomirov@423: public Pair parents() { tikhomirov@328: if (parents != null) { tikhomirov@328: return parents; tikhomirov@328: } tikhomirov@328: HistoryNode p; tikhomirov@328: final int p1, p2; tikhomirov@328: if ((p = historyNode.parent1) != null) { tikhomirov@328: p1 = p.changeset; tikhomirov@328: } else { tikhomirov@328: p1 = -1; tikhomirov@328: } tikhomirov@328: if ((p = historyNode.parent2) != null) { tikhomirov@328: p2 = p.changeset; tikhomirov@328: } else { tikhomirov@328: p2 = -1; tikhomirov@328: } tikhomirov@328: HgChangeset[] r = get(p1, p2); tikhomirov@328: return parents = new Pair(r[0], r[1]); tikhomirov@328: } tikhomirov@328: tikhomirov@423: public Collection children() { tikhomirov@328: if (children != null) { tikhomirov@328: return children; tikhomirov@328: } tikhomirov@328: if (historyNode.children == null) { tikhomirov@328: children = Collections.emptyList(); tikhomirov@328: } else { tikhomirov@328: int[] childrentChangesetNumbers = new int[historyNode.children.size()]; tikhomirov@328: int j = 0; tikhomirov@328: for (HistoryNode hn : historyNode.children) { tikhomirov@328: childrentChangesetNumbers[j++] = hn.changeset; tikhomirov@328: } tikhomirov@328: children = Arrays.asList(get(childrentChangesetNumbers)); tikhomirov@328: } tikhomirov@328: return children; tikhomirov@328: } tikhomirov@328: tikhomirov@328: void populate(HgChangeset cs) { tikhomirov@403: cachedChangesets.put(cs.getRevisionIndex(), cs); tikhomirov@328: } tikhomirov@328: tikhomirov@423: private HgChangeset[] get(int... changelogRevisionIndex) { tikhomirov@403: HgChangeset[] rv = new HgChangeset[changelogRevisionIndex.length]; tikhomirov@403: IntVector misses = new IntVector(changelogRevisionIndex.length, -1); tikhomirov@403: for (int i = 0; i < changelogRevisionIndex.length; i++) { tikhomirov@403: if (changelogRevisionIndex[i] == -1) { tikhomirov@328: rv[i] = null; tikhomirov@328: continue; tikhomirov@328: } tikhomirov@403: HgChangeset cached = cachedChangesets.get(changelogRevisionIndex[i]); tikhomirov@328: if (cached != null) { tikhomirov@328: rv[i] = cached; tikhomirov@328: } else { tikhomirov@403: misses.add(changelogRevisionIndex[i]); tikhomirov@328: } tikhomirov@328: } tikhomirov@328: if (misses.size() > 0) { tikhomirov@328: final int[] changesets2read = misses.toArray(); tikhomirov@328: initTransform(); tikhomirov@328: repo.getChangelog().range(this, changesets2read); tikhomirov@328: for (int changeset2read : changesets2read) { tikhomirov@328: HgChangeset cs = cachedChangesets.get(changeset2read); tikhomirov@403: if (cs == null) { tikhomirov@423: HgInvalidStateException t = new HgInvalidStateException(String.format("Can't get changeset for revision %d", changeset2read)); tikhomirov@423: throw t.setRevisionIndex(changeset2read); tikhomirov@403: } tikhomirov@403: // HgChangelog.range may reorder changesets according to their order in the changelog tikhomirov@403: // thus need to find original index tikhomirov@403: boolean sanity = false; tikhomirov@403: for (int i = 0; i < changelogRevisionIndex.length; i++) { tikhomirov@403: if (changelogRevisionIndex[i] == cs.getRevisionIndex()) { tikhomirov@403: rv[i] = cs; tikhomirov@403: sanity = true; tikhomirov@403: break; tikhomirov@328: } tikhomirov@403: } tikhomirov@403: if (!sanity) { tikhomirov@490: repo.getSessionContext().getLog().dump(getClass(), Error, "Index of revision %d:%s doesn't match any of requested", cs.getRevisionIndex(), cs.getNodeid().shortNotation()); tikhomirov@403: } tikhomirov@403: assert sanity; tikhomirov@328: } tikhomirov@328: } tikhomirov@328: return rv; tikhomirov@328: } tikhomirov@328: tikhomirov@328: // init only when needed tikhomirov@423: void initTransform() throws HgRuntimeException { tikhomirov@328: if (transform == null) { tikhomirov@328: transform = new ChangesetTransformer.Transformation(new HgStatusCollector(repo)/*XXX try to reuse from context?*/, getParentHelper(false)); tikhomirov@328: } tikhomirov@328: } tikhomirov@328: tikhomirov@328: public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { tikhomirov@328: HgChangeset cs = transform.handle(revisionNumber, nodeid, cset); tikhomirov@328: populate(cs.clone()); tikhomirov@328: } tikhomirov@328: tikhomirov@423: public Nodeid changesetRevision() { tikhomirov@328: if (changesetRevision == null) { tikhomirov@328: changesetRevision = getRevision(historyNode.changeset); tikhomirov@328: } tikhomirov@328: return changesetRevision; tikhomirov@328: } tikhomirov@328: tikhomirov@423: public Pair parentRevisions() { tikhomirov@328: if (parentRevisions == null) { tikhomirov@328: HistoryNode p; tikhomirov@328: final Nodeid p1, p2; tikhomirov@328: if ((p = historyNode.parent1) != null) { tikhomirov@328: p1 = getRevision(p.changeset); tikhomirov@328: } else { tikhomirov@328: p1 = Nodeid.NULL;; tikhomirov@328: } tikhomirov@328: if ((p = historyNode.parent2) != null) { tikhomirov@328: p2 = getRevision(p.changeset); tikhomirov@328: } else { tikhomirov@328: p2 = Nodeid.NULL; tikhomirov@328: } tikhomirov@328: parentRevisions = new Pair(p1, p2); tikhomirov@328: } tikhomirov@328: return parentRevisions; tikhomirov@328: } tikhomirov@328: tikhomirov@423: public Collection childRevisions() { tikhomirov@328: if (childRevisions != null) { tikhomirov@328: return childRevisions; tikhomirov@328: } tikhomirov@328: if (historyNode.children == null) { tikhomirov@328: childRevisions = Collections.emptyList(); tikhomirov@328: } else { tikhomirov@328: ArrayList rv = new ArrayList(historyNode.children.size()); tikhomirov@328: for (HistoryNode hn : historyNode.children) { tikhomirov@328: rv.add(getRevision(hn.changeset)); tikhomirov@328: } tikhomirov@328: childRevisions = Collections.unmodifiableList(rv); tikhomirov@328: } tikhomirov@328: return childRevisions; tikhomirov@328: } tikhomirov@328: tikhomirov@328: // reading nodeid involves reading index only, guess, can afford not to optimize multiple reads tikhomirov@423: private Nodeid getRevision(int changelogRevisionNumber) { tikhomirov@423: // TODO post-1.0 pipe through pool tikhomirov@328: HgChangeset cs = cachedChangesets.get(changelogRevisionNumber); tikhomirov@328: if (cs != null) { tikhomirov@328: return cs.getNodeid(); tikhomirov@328: } else { tikhomirov@403: return repo.getChangelog().getRevision(changelogRevisionNumber); tikhomirov@328: } tikhomirov@328: } tikhomirov@328: } tikhomirov@510: tikhomirov@510: private enum IterateDirection { tikhomirov@510: FromOldToNew, FromNewToOld tikhomirov@510: } tikhomirov@64: }