tikhomirov@17: /*
tikhomirov@388: * Copyright (c) 2010-2012 TMate Software Ltd
tikhomirov@74: *
tikhomirov@74: * This program is free software; you can redistribute it and/or modify
tikhomirov@74: * it under the terms of the GNU General Public License as published by
tikhomirov@74: * the Free Software Foundation; version 2 of the License.
tikhomirov@74: *
tikhomirov@74: * This program is distributed in the hope that it will be useful,
tikhomirov@74: * but WITHOUT ANY WARRANTY; without even the implied warranty of
tikhomirov@74: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
tikhomirov@74: * GNU General Public License for more details.
tikhomirov@74: *
tikhomirov@74: * For information on how to redistribute this software under
tikhomirov@74: * the terms of a license other than GNU General Public License
tikhomirov@102: * contact TMate Software at support@hg4j.com
tikhomirov@2: */
tikhomirov@74: package org.tmatesoft.hg.repo;
tikhomirov@2:
tikhomirov@367: import static org.tmatesoft.hg.repo.HgInternals.wrongRevisionIndex;
tikhomirov@148: import static org.tmatesoft.hg.repo.HgRepository.*;
tikhomirov@74:
tikhomirov@157: import java.io.ByteArrayOutputStream;
tikhomirov@237: import java.io.File;
tikhomirov@237: import java.io.FileInputStream;
tikhomirov@148: import java.io.IOException;
tikhomirov@115: import java.nio.ByteBuffer;
tikhomirov@237: import java.nio.channels.FileChannel;
tikhomirov@78: import java.util.ArrayList;
tikhomirov@240: import java.util.Arrays;
tikhomirov@78: import java.util.Collection;
tikhomirov@305: import java.util.Collections;
tikhomirov@305: import java.util.List;
tikhomirov@78:
tikhomirov@157: import org.tmatesoft.hg.core.HgException;
tikhomirov@354: import org.tmatesoft.hg.core.HgInvalidControlFileException;
tikhomirov@396: import org.tmatesoft.hg.core.HgInvalidFileException;
tikhomirov@347: import org.tmatesoft.hg.core.HgInvalidRevisionException;
tikhomirov@328: import org.tmatesoft.hg.core.HgLogCommand;
tikhomirov@74: import org.tmatesoft.hg.core.Nodeid;
tikhomirov@157: import org.tmatesoft.hg.internal.DataAccess;
tikhomirov@121: import org.tmatesoft.hg.internal.FilterByteChannel;
tikhomirov@277: import org.tmatesoft.hg.internal.FilterDataAccess;
tikhomirov@276: import org.tmatesoft.hg.internal.IntMap;
tikhomirov@77: import org.tmatesoft.hg.internal.RevlogStream;
tikhomirov@115: import org.tmatesoft.hg.util.ByteChannel;
tikhomirov@237: import org.tmatesoft.hg.util.CancelSupport;
tikhomirov@148: import org.tmatesoft.hg.util.CancelledException;
tikhomirov@388: import org.tmatesoft.hg.util.LogFacility;
tikhomirov@305: import org.tmatesoft.hg.util.Pair;
tikhomirov@133: import org.tmatesoft.hg.util.Path;
tikhomirov@237: import org.tmatesoft.hg.util.ProgressSupport;
tikhomirov@74:
tikhomirov@5:
tikhomirov@17:
tikhomirov@2: /**
tikhomirov@2: * ? name:HgFileNode?
tikhomirov@74: *
tikhomirov@74: * @author Artem Tikhomirov
tikhomirov@74: * @author TMate Software Ltd.
tikhomirov@2: */
tikhomirov@2: public class HgDataFile extends Revlog {
tikhomirov@2:
tikhomirov@3: // absolute from repo root?
tikhomirov@3: // slashes, unix-style?
tikhomirov@3: // repo location agnostic, just to give info to user, not to access real storage
tikhomirov@74: private final Path path;
tikhomirov@134: private Metadata metadata; // get initialized on first access to file content.
tikhomirov@2:
tikhomirov@115: /*package-local*/HgDataFile(HgRepository hgRepo, Path filePath, RevlogStream content) {
tikhomirov@21: super(hgRepo, content);
tikhomirov@115: path = filePath;
tikhomirov@3: }
tikhomirov@115:
tikhomirov@115: /*package-local*/HgDataFile(HgRepository hgRepo, Path filePath) {
tikhomirov@115: super(hgRepo);
tikhomirov@115: path = filePath;
tikhomirov@115: }
tikhomirov@115:
tikhomirov@115: // exists is not the best name possible. now it means no file with such name was ever known to the repo.
tikhomirov@115: // it might be confused with files existed before but lately removed.
tikhomirov@3: public boolean exists() {
tikhomirov@3: return content != null; // XXX need better impl
tikhomirov@2: }
tikhomirov@2:
tikhomirov@77: // human-readable (i.e. "COPYING", not "store/data/_c_o_p_y_i_n_g.i")
tikhomirov@74: public Path getPath() {
tikhomirov@157: return path; // hgRepo.backresolve(this) -> name? In this case, what about hashed long names?
tikhomirov@2: }
tikhomirov@2:
tikhomirov@275: /**
tikhomirov@367: * Handy shorthand for {@link #length(int) length(getRevisionIndex(nodeid))}
tikhomirov@354: *
tikhomirov@354: * @param nodeid revision of the file
tikhomirov@354: *
tikhomirov@275: * @return size of the file content at the given revision
tikhomirov@396: * @throws HgInvalidRevisionException if supplied nodeid doesn't identify any revision from this revlog (runtime exception)
tikhomirov@354: * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@275: */
tikhomirov@396: public int length(Nodeid nodeid) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@396: try {
tikhomirov@396: return length(getRevisionIndex(nodeid));
tikhomirov@396: } catch (HgInvalidControlFileException ex) {
tikhomirov@396: throw ex.isRevisionSet() ? ex : ex.setRevision(nodeid);
tikhomirov@396: } catch (HgInvalidRevisionException ex) {
tikhomirov@396: throw ex.isRevisionSet() ? ex : ex.setRevision(nodeid);
tikhomirov@396: }
tikhomirov@275: }
tikhomirov@275:
tikhomirov@275: /**
tikhomirov@368: * @param fileRevisionIndex - revision local index, non-negative. From predefined constants, only {@link HgRepository#TIP} makes sense.
tikhomirov@275: * @return size of the file content at the revision identified by local revision number.
tikhomirov@396: * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog (runtime exception)
tikhomirov@354: * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@275: */
tikhomirov@396: public int length(int fileRevisionIndex) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@401: if (wrongRevisionIndex(fileRevisionIndex) || fileRevisionIndex == BAD_REVISION) {
tikhomirov@401: throw new HgInvalidRevisionException(fileRevisionIndex);
tikhomirov@401: }
tikhomirov@401: if (fileRevisionIndex == TIP) {
tikhomirov@401: fileRevisionIndex = getLastRevision();
tikhomirov@401: } else if (fileRevisionIndex == WORKING_COPY) {
tikhomirov@401: File f = getRepo().getFile(this);
tikhomirov@401: if (f.exists()) {
tikhomirov@401: return (int) /*FIXME long!*/ f.length();
tikhomirov@401: }
tikhomirov@401: Nodeid fileRev = getWorkingCopyRevision();
tikhomirov@401: if (fileRev == null) {
tikhomirov@401: throw new HgInvalidRevisionException(String.format("File %s is not part of working copy", getPath()), null, fileRevisionIndex);
tikhomirov@401: }
tikhomirov@401: fileRevisionIndex = getRevisionIndex(fileRev);
tikhomirov@401: }
tikhomirov@367: if (metadata == null || !metadata.checked(fileRevisionIndex)) {
tikhomirov@367: checkAndRecordMetadata(fileRevisionIndex);
tikhomirov@275: }
tikhomirov@367: final int dataLen = content.dataLength(fileRevisionIndex);
tikhomirov@367: if (metadata.known(fileRevisionIndex)) {
tikhomirov@367: return dataLen - metadata.dataOffset(fileRevisionIndex);
tikhomirov@275: }
tikhomirov@275: return dataLen;
tikhomirov@22: }
tikhomirov@22:
tikhomirov@237: /**
tikhomirov@237: * Reads content of the file from working directory. If file present in the working directory, its actual content without
tikhomirov@237: * any filters is supplied through the sink. If file does not exist in the working dir, this method provides content of a file
tikhomirov@401: * as if it would be refreshed in the working copy, i.e. its corresponding revision (according to dirstate) is read from the
tikhomirov@401: * repository, and filters repo -> working copy get applied.
tikhomirov@401: *
tikhomirov@401: * NOTE, if file is missing from the working directory and is not part of the dirstate (but otherwise legal repository file,
tikhomirov@401: * e.g. from another branch), no content would be supplied.
tikhomirov@237: *
tikhomirov@396: * @param sink content consumer
tikhomirov@396: * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@396: * @throws HgInvalidFileException if access to file in working directory failed
tikhomirov@380: * @throws CancelledException if execution of the operation was cancelled
tikhomirov@237: */
tikhomirov@396: public void workingCopy(ByteChannel sink) throws HgException, CancelledException {
tikhomirov@237: File f = getRepo().getFile(this);
tikhomirov@237: if (f.exists()) {
tikhomirov@237: final CancelSupport cs = CancelSupport.Factory.get(sink);
tikhomirov@237: final ProgressSupport progress = ProgressSupport.Factory.get(sink);
tikhomirov@237: final long flength = f.length();
tikhomirov@237: final int bsize = (int) Math.min(flength, 32*1024);
tikhomirov@237: progress.start((int) (flength > Integer.MAX_VALUE ? flength >>> 15 /*32 kb buf size*/ : flength));
tikhomirov@237: ByteBuffer buf = ByteBuffer.allocate(bsize);
tikhomirov@237: FileChannel fc = null;
tikhomirov@237: try {
tikhomirov@237: fc = new FileInputStream(f).getChannel();
tikhomirov@237: while (fc.read(buf) != -1) {
tikhomirov@237: cs.checkCancelled();
tikhomirov@237: buf.flip();
tikhomirov@237: int consumed = sink.write(buf);
tikhomirov@237: progress.worked(flength > Integer.MAX_VALUE ? 1 : consumed);
tikhomirov@237: buf.compact();
tikhomirov@237: }
tikhomirov@237: } catch (IOException ex) {
tikhomirov@396: throw new HgInvalidFileException("Working copy read failed", ex, f);
tikhomirov@237: } finally {
tikhomirov@237: progress.done();
tikhomirov@237: if (fc != null) {
tikhomirov@237: try {
tikhomirov@237: fc.close();
tikhomirov@237: } catch (IOException ex) {
tikhomirov@295: getRepo().getContext().getLog().info(getClass(), ex, null);
tikhomirov@237: }
tikhomirov@237: }
tikhomirov@237: }
tikhomirov@237: } else {
tikhomirov@401: Nodeid fileRev = getWorkingCopyRevision();
tikhomirov@401: if (fileRev == null) {
tikhomirov@401: // no content for this data file in the working copy - it is not part of the actual working state.
tikhomirov@401: // XXX perhaps, shall report this to caller somehow, not silently pass no data?
tikhomirov@388: return;
tikhomirov@388: }
tikhomirov@401: final int fileRevIndex = getRevisionIndex(fileRev);
tikhomirov@401: contentWithFilters(fileRevIndex, sink);
tikhomirov@401: }
tikhomirov@401: }
tikhomirov@401:
tikhomirov@401: /**
tikhomirov@401: * @return file revision as recorded in repository manifest for dirstate parent, or null
if no file revision can be found
tikhomirov@401: */
tikhomirov@401: private Nodeid getWorkingCopyRevision() throws HgInvalidControlFileException {
tikhomirov@401: final Pair wcParents = getRepo().getWorkingCopyParents();
tikhomirov@401: Nodeid p = wcParents.first().isNull() ? wcParents.second() : wcParents.first();
tikhomirov@401: final HgChangelog clog = getRepo().getChangelog();
tikhomirov@401: final int csetRevIndex;
tikhomirov@401: if (p.isNull()) {
tikhomirov@401: // no dirstate parents
tikhomirov@401: getRepo().getContext().getLog().info(getClass(), "No dirstate parents, resort to TIP", getPath());
tikhomirov@401: // if it's a repository with no dirstate, use TIP then
tikhomirov@401: csetRevIndex = clog.getLastRevision();
tikhomirov@401: if (csetRevIndex == -1) {
tikhomirov@401: // shall not happen provided there's .i for this data file (hence at least one cset)
tikhomirov@401: // and perhaps exception is better here. However, null as "can't find" indication seems reasonable.
tikhomirov@401: return null;
tikhomirov@401: }
tikhomirov@401: } else {
tikhomirov@388: // common case to avoid searching complete changelog for nodeid match
tikhomirov@388: final Nodeid tipRev = clog.getRevision(TIP);
tikhomirov@388: if (tipRev.equals(p)) {
tikhomirov@388: csetRevIndex = clog.getLastRevision();
tikhomirov@388: } else {
tikhomirov@388: // bad luck, need to search honestly
tikhomirov@388: csetRevIndex = getRepo().getChangelog().getRevisionIndex(p);
tikhomirov@388: }
tikhomirov@237: }
tikhomirov@401: Nodeid fileRev = getRepo().getManifest().getFileRevision(csetRevIndex, getPath());
tikhomirov@401: // it's possible for a file to be in working dir and have store/.i but to belong e.g. to a different
tikhomirov@401: // branch than the one from dirstate. Thus it's possible to get null fileRev
tikhomirov@401: // which would serve as an indication this data file is not part of working copy
tikhomirov@401: return fileRev;
tikhomirov@2: }
tikhomirov@115:
tikhomirov@396: /**
tikhomirov@396: * Access content of a file revision
tikhomirov@396: * XXX not sure distinct method contentWithFilters() is the best way to do, perhaps, callers shall add filters themselves?
tikhomirov@396: *
tikhomirov@396: * @param fileRevisionIndex - revision local index, non-negative. From predefined constants, {@link HgRepository#TIP} and {@link HgRepository#WORKING_COPY} make sense.
tikhomirov@396: * @param sink content consumer
tikhomirov@396: *
tikhomirov@396: * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@396: * @throws HgInvalidFileException if access to file in working directory failed
tikhomirov@396: * @throws CancelledException if execution of the operation was cancelled
tikhomirov@396: * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog (runtime exception)
tikhomirov@396: */
tikhomirov@396: public void contentWithFilters(int fileRevisionIndex, ByteChannel sink) throws HgException, CancelledException, HgInvalidRevisionException {
tikhomirov@396: if (fileRevisionIndex == WORKING_COPY) {
tikhomirov@237: workingCopy(sink); // pass un-mangled sink
tikhomirov@237: } else {
tikhomirov@396: content(fileRevisionIndex, new FilterByteChannel(sink, getRepo().getFiltersFromRepoToWorkingDir(getPath())));
tikhomirov@237: }
tikhomirov@115: }
tikhomirov@22:
tikhomirov@367: /**
tikhomirov@396: * Retrieve content of specific revision. Content is provided as is, without any filters (e.g. keywords, eol, etc.) applied.
tikhomirov@396: * For filtered content, use {@link #contentWithFilters(int, ByteChannel)}.
tikhomirov@367: *
tikhomirov@368: * @param fileRevisionIndex - revision local index, non-negative. From predefined constants, {@link HgRepository#TIP} and {@link HgRepository#WORKING_COPY} make sense.
tikhomirov@396: * @param sink content consumer
tikhomirov@396: *
tikhomirov@380: * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@396: * @throws HgInvalidFileException if access to file in working directory failed
tikhomirov@380: * @throws CancelledException if execution of the operation was cancelled
tikhomirov@396: * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog (runtime exception)
tikhomirov@367: */
tikhomirov@396: public void content(int fileRevisionIndex, ByteChannel sink) throws HgException, CancelledException, HgInvalidRevisionException {
tikhomirov@367: // for data files need to check heading of the file content for possible metadata
tikhomirov@367: // @see http://mercurial.selenic.com/wiki/FileFormats#data.2BAC8-
tikhomirov@367: if (fileRevisionIndex == TIP) {
tikhomirov@367: fileRevisionIndex = getLastRevision();
tikhomirov@78: }
tikhomirov@367: if (fileRevisionIndex == WORKING_COPY) {
tikhomirov@237: // sink is supposed to come into workingCopy without filters
tikhomirov@237: // thus we shall not get here (into #content) from #contentWithFilters(WC)
tikhomirov@157: workingCopy(sink);
tikhomirov@157: return;
tikhomirov@157: }
tikhomirov@367: if (wrongRevisionIndex(fileRevisionIndex) || fileRevisionIndex == BAD_REVISION) {
tikhomirov@367: throw new HgInvalidRevisionException(fileRevisionIndex);
tikhomirov@148: }
tikhomirov@157: if (sink == null) {
tikhomirov@157: throw new IllegalArgumentException();
tikhomirov@157: }
tikhomirov@134: if (metadata == null) {
tikhomirov@134: metadata = new Metadata();
tikhomirov@134: }
tikhomirov@277: ErrorHandlingInspector insp;
tikhomirov@388: final LogFacility lf = getRepo().getContext().getLog();
tikhomirov@367: if (metadata.none(fileRevisionIndex)) {
tikhomirov@388: insp = new ContentPipe(sink, 0, lf);
tikhomirov@367: } else if (metadata.known(fileRevisionIndex)) {
tikhomirov@388: insp = new ContentPipe(sink, metadata.dataOffset(fileRevisionIndex), lf);
tikhomirov@157: } else {
tikhomirov@157: // do not know if there's metadata
tikhomirov@396: insp = new MetadataInspector(metadata, lf, new ContentPipe(sink, 0, lf));
tikhomirov@78: }
tikhomirov@157: insp.checkCancelled();
tikhomirov@367: super.content.iterate(fileRevisionIndex, fileRevisionIndex, true, insp);
tikhomirov@157: try {
tikhomirov@237: insp.checkFailed(); // XXX is there real need to throw IOException from ContentPipe?
tikhomirov@396: } catch (HgInvalidControlFileException ex) {
tikhomirov@396: ex = ex.setFileName(getPath());
tikhomirov@396: throw ex.isRevisionIndexSet() ? ex : ex.setRevisionIndex(fileRevisionIndex);
tikhomirov@237: } catch (IOException ex) {
tikhomirov@396: HgInvalidControlFileException e = new HgInvalidControlFileException("Revision content access failed", ex, null);
tikhomirov@396: throw content.initWithIndexFile(e).setFileName(getPath()).setRevisionIndex(fileRevisionIndex);
tikhomirov@157: } catch (HgException ex) {
tikhomirov@157: // shall not happen, unless we changed ContentPipe or its subclass
tikhomirov@396: HgInvalidControlFileException e = new HgInvalidControlFileException("Revision content access failed", ex, null);
tikhomirov@396: throw content.initWithIndexFile(e).setFileName(getPath()).setRevisionIndex(fileRevisionIndex);
tikhomirov@78: }
tikhomirov@78: }
tikhomirov@157:
tikhomirov@317: private static class HistoryNode {
tikhomirov@317: int changeset;
tikhomirov@317: Nodeid cset;
tikhomirov@317: HistoryNode parent1, parent2;
tikhomirov@317: List children;
tikhomirov@317:
tikhomirov@317: HistoryNode(int cs, HistoryNode p1, HistoryNode p2) {
tikhomirov@317: changeset = cs;
tikhomirov@317: parent1 = p1;
tikhomirov@317: parent2 = p2;
tikhomirov@317: if (p1 != null) {
tikhomirov@317: p1.addChild(this);
tikhomirov@317: }
tikhomirov@317: if (p2 != null) {
tikhomirov@317: p2.addChild(this);
tikhomirov@317: }
tikhomirov@317: }
tikhomirov@317:
tikhomirov@317: Nodeid changesetRevision() {
tikhomirov@317: assert cset != null : "we initialize all csets prior to use";
tikhomirov@317: return cset;
tikhomirov@317: }
tikhomirov@317:
tikhomirov@317: void addChild(HistoryNode child) {
tikhomirov@317: if (children == null) {
tikhomirov@317: children = new ArrayList(2);
tikhomirov@317: }
tikhomirov@317: children.add(child);
tikhomirov@317: }
tikhomirov@305: }
tikhomirov@305:
tikhomirov@328: /**
tikhomirov@328: * @deprecated use {@link HgLogCommand#execute(org.tmatesoft.hg.core.HgChangesetTreeHandler)} instead
tikhomirov@328: */
tikhomirov@328: @Deprecated
tikhomirov@366: public void history(HgChangelog.TreeInspector inspector) throws HgInvalidControlFileException{
tikhomirov@317: final CancelSupport cancelSupport = CancelSupport.Factory.get(inspector);
tikhomirov@317: try {
tikhomirov@317: final boolean[] needsSorting = { false };
tikhomirov@317: final HistoryNode[] completeHistory = new HistoryNode[getRevisionCount()];
tikhomirov@317: final int[] commitRevisions = new int[completeHistory.length];
tikhomirov@317: RevlogStream.Inspector insp = new RevlogStream.Inspector() {
tikhomirov@317: public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
tikhomirov@317: if (revisionNumber > 0) {
tikhomirov@317: if (commitRevisions[revisionNumber-1] > linkRevision) {
tikhomirov@317: needsSorting[0] = true;
tikhomirov@317: }
tikhomirov@317: }
tikhomirov@317: commitRevisions[revisionNumber] = linkRevision;
tikhomirov@317: HistoryNode p1 = null, p2 = null;
tikhomirov@317: if (parent1Revision != -1) {
tikhomirov@317: p1 = completeHistory[parent1Revision];
tikhomirov@317: }
tikhomirov@317: if (parent2Revision != -1) {
tikhomirov@317: p2 = completeHistory[parent2Revision];
tikhomirov@317: }
tikhomirov@317: completeHistory[revisionNumber] = new HistoryNode(linkRevision, p1, p2);
tikhomirov@305: }
tikhomirov@317: };
tikhomirov@317: content.iterate(0, getLastRevision(), false, insp);
tikhomirov@317: cancelSupport.checkCancelled();
tikhomirov@317: if (needsSorting[0]) {
tikhomirov@317: Arrays.sort(commitRevisions);
tikhomirov@317: }
tikhomirov@317: // read changeset revisions at once (to avoid numerous changelog.getRevision reads)
tikhomirov@317: // but just nodeids, not RawChangeset (changelog.iterate(data=false)
tikhomirov@317: ArrayList changesetRevisions = new ArrayList(commitRevisions.length);
tikhomirov@317: getRepo().getChangelog().getRevisionsInternal(changesetRevisions, commitRevisions);
tikhomirov@317: cancelSupport.checkCancelled();
tikhomirov@317: // assign them to corresponding HistoryNodes
tikhomirov@317: for (int i = 0; i < completeHistory.length; i++ ) {
tikhomirov@317: final HistoryNode n = completeHistory[i];
tikhomirov@317: if (needsSorting[0]) {
tikhomirov@317: int x = Arrays.binarySearch(commitRevisions, n.changeset);
tikhomirov@317: assert x >= 0;
tikhomirov@317: n.cset = changesetRevisions.get(x);
tikhomirov@317: } else {
tikhomirov@317: // commit revisions were not sorted, may use original index directly
tikhomirov@317: n.cset = changesetRevisions.get(i);
tikhomirov@305: }
tikhomirov@305: }
tikhomirov@317: cancelSupport.checkCancelled();
tikhomirov@317: // XXX shall sort completeHistory according to changeset numbers?
tikhomirov@317: for (int i = 0; i < completeHistory.length; i++ ) {
tikhomirov@317: final HistoryNode n = completeHistory[i];
tikhomirov@305: HistoryNode p;
tikhomirov@305: Nodeid p1, p2;
tikhomirov@317: if ((p = n.parent1) != null) {
tikhomirov@305: p1 = p.changesetRevision();
tikhomirov@305: } else {
tikhomirov@305: p1 = Nodeid.NULL;
tikhomirov@305: }
tikhomirov@317: if ((p= n.parent2) != null) {
tikhomirov@305: p2 = p.changesetRevision();
tikhomirov@305: } else {
tikhomirov@305: p2 = Nodeid.NULL;
tikhomirov@305: }
tikhomirov@317: final Pair parentChangesets = new Pair(p1, p2);
tikhomirov@317: final List childChangesets;
tikhomirov@305: if (n.children == null) {
tikhomirov@317: childChangesets = Collections.emptyList();
tikhomirov@317: } else {
tikhomirov@317: Nodeid[] revisions = new Nodeid[n.children.size()];
tikhomirov@317: int j = 0;
tikhomirov@317: for (HistoryNode hn : n.children) {
tikhomirov@317: revisions[j++] = hn.changesetRevision();
tikhomirov@317: }
tikhomirov@317: childChangesets = Arrays.asList(revisions);
tikhomirov@305: }
tikhomirov@317: inspector.next(n.changesetRevision(), parentChangesets, childChangesets);
tikhomirov@317: cancelSupport.checkCancelled();
tikhomirov@305: }
tikhomirov@317: } catch (CancelledException ex) {
tikhomirov@317: return;
tikhomirov@317: }
tikhomirov@305: }
tikhomirov@305:
tikhomirov@366: public void history(HgChangelog.Inspector inspector) throws HgInvalidControlFileException {
tikhomirov@135: history(0, getLastRevision(), inspector);
tikhomirov@48: }
tikhomirov@48:
tikhomirov@366: public void history(int start, int end, HgChangelog.Inspector inspector) throws HgInvalidRevisionException, HgInvalidControlFileException {
tikhomirov@3: if (!exists()) {
tikhomirov@3: throw new IllegalStateException("Can't get history of invalid repository file node");
tikhomirov@3: }
tikhomirov@135: final int last = getLastRevision();
tikhomirov@77: if (end == TIP) {
tikhomirov@77: end = last;
tikhomirov@77: }
tikhomirov@300: if (start == TIP) {
tikhomirov@300: start = last;
tikhomirov@300: }
tikhomirov@300: HgInternals.checkRevlogRange(start, end, last);
tikhomirov@300:
tikhomirov@48: final int[] commitRevisions = new int[end - start + 1];
tikhomirov@242: final boolean[] needsSorting = { false };
tikhomirov@77: RevlogStream.Inspector insp = new RevlogStream.Inspector() {
tikhomirov@3: int count = 0;
tikhomirov@51: public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
tikhomirov@242: if (count > 0) {
tikhomirov@242: if (commitRevisions[count -1] > linkRevision) {
tikhomirov@242: needsSorting[0] = true;
tikhomirov@242: }
tikhomirov@242: }
tikhomirov@3: commitRevisions[count++] = linkRevision;
tikhomirov@3: }
tikhomirov@3: };
tikhomirov@48: content.iterate(start, end, false, insp);
tikhomirov@233: final HgChangelog changelog = getRepo().getChangelog();
tikhomirov@242: if (needsSorting[0]) {
tikhomirov@242: // automatic tools (svnmerge?) produce unnatural file history
tikhomirov@242: // (e.g. cpython/Lib/doctest.py, revision 164 points to cset 63509, 165 - to 38453)
tikhomirov@242: Arrays.sort(commitRevisions);
tikhomirov@233: }
tikhomirov@245: changelog.rangeInternal(inspector, commitRevisions);
tikhomirov@3: }
tikhomirov@88:
tikhomirov@354: /**
tikhomirov@367: * For a given revision of the file (identified with revision index), find out index of the corresponding changeset.
tikhomirov@354: *
tikhomirov@354: * @return changeset revision index
tikhomirov@354: * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog
tikhomirov@354: * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@354: */
tikhomirov@367: public int getChangesetRevisionIndex(int revision) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@367: return content.linkRevision(revision);
tikhomirov@367: }
tikhomirov@367: /**
tikhomirov@367: * @deprecated use {@link #getChangesetRevisionIndex(int)} instead
tikhomirov@367: */
tikhomirov@367: @Deprecated
tikhomirov@354: public int getChangesetLocalRevision(int revision) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@367: return getChangesetRevisionIndex(revision);
tikhomirov@88: }
tikhomirov@88:
tikhomirov@354: /**
tikhomirov@367: * Complements {@link #getChangesetRevisionIndex(int)} to get changeset revision that corresponds to supplied file revision
tikhomirov@354: *
tikhomirov@354: * @param nid revision of the file
tikhomirov@354: * @return changeset revision
tikhomirov@354: * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog
tikhomirov@354: * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@354: */
tikhomirov@354: public Nodeid getChangesetRevision(Nodeid nid) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@367: int changelogRevision = getChangesetRevisionIndex(getRevisionIndex(nid));
tikhomirov@88: return getRepo().getChangelog().getRevision(changelogRevision);
tikhomirov@88: }
tikhomirov@78:
tikhomirov@354: /**
tikhomirov@396: * Tells whether this file originates from another repository file
tikhomirov@396: * @return true
if this file is a copy of another from the repository
tikhomirov@396: * @throws HgInvalidControlFileException if access to revlog or file metadata failed
tikhomirov@354: */
tikhomirov@396: public boolean isCopy() throws HgInvalidControlFileException {
tikhomirov@134: if (metadata == null || !metadata.checked(0)) {
tikhomirov@275: checkAndRecordMetadata(0);
tikhomirov@78: }
tikhomirov@134: if (!metadata.known(0)) {
tikhomirov@78: return false;
tikhomirov@78: }
tikhomirov@78: return metadata.find(0, "copy") != null;
tikhomirov@78: }
tikhomirov@78:
tikhomirov@354: /**
tikhomirov@354: * Get name of the file this one was copied from.
tikhomirov@354: *
tikhomirov@354: * @return name of the file origin
tikhomirov@396: * @throws HgInvalidControlFileException if access to revlog or file metadata failed
tikhomirov@354: * @throws UnsupportedOperationException if this file doesn't represent a copy ({@link #isCopy()} was false)
tikhomirov@354: */
tikhomirov@396: public Path getCopySourceName() throws HgInvalidControlFileException {
tikhomirov@78: if (isCopy()) {
tikhomirov@78: return Path.create(metadata.find(0, "copy"));
tikhomirov@78: }
tikhomirov@78: throw new UnsupportedOperationException(); // XXX REVISIT, think over if Exception is good (clients would check isCopy() anyway, perhaps null is sufficient?)
tikhomirov@78: }
tikhomirov@78:
tikhomirov@415: /**
tikhomirov@415: *
tikhomirov@415: * @return revision this file was copied from
tikhomirov@415: * @throws HgInvalidControlFileException if access to revlog or file metadata failed
tikhomirov@415: * @throws UnsupportedOperationException if this file doesn't represent a copy ({@link #isCopy()} was false)
tikhomirov@415: */
tikhomirov@396: public Nodeid getCopySourceRevision() throws HgInvalidControlFileException {
tikhomirov@78: if (isCopy()) {
tikhomirov@78: return Nodeid.fromAscii(metadata.find(0, "copyrev")); // XXX reuse/cache Nodeid
tikhomirov@78: }
tikhomirov@78: throw new UnsupportedOperationException();
tikhomirov@78: }
tikhomirov@415: /* FIXME
tikhomirov@415: public Nodeid getRevisionAtChangeset(int changesetRevision) {
tikhomirov@415: }
tikhomirov@88:
tikhomirov@415: public HgManifest.Flags getFlagsAtChangeset(int changesetRevisionIndex) {
tikhomirov@415: }
tikhomirov@415: */
tikhomirov@415:
tikhomirov@415: /**
tikhomirov@415: * FIXME EXCEPTIONS
tikhomirov@415: * @throws HgInvalidControlFileException
tikhomirov@415: * @throws HgInvalidRevisionException
tikhomirov@415: */
tikhomirov@415: public HgManifest.Flags getFlags(int fileRevisionIndex) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@415: int changesetRevIndex = getChangesetRevisionIndex(fileRevisionIndex);
tikhomirov@415: return getRepo().getManifest().extractFlags(changesetRevIndex, getPath());
tikhomirov@415: }
tikhomirov@415:
tikhomirov@88: @Override
tikhomirov@88: public String toString() {
tikhomirov@88: StringBuilder sb = new StringBuilder(getClass().getSimpleName());
tikhomirov@88: sb.append('(');
tikhomirov@88: sb.append(getPath());
tikhomirov@88: sb.append(')');
tikhomirov@88: return sb.toString();
tikhomirov@88: }
tikhomirov@275:
tikhomirov@396: private void checkAndRecordMetadata(int localRev) throws HgInvalidControlFileException {
tikhomirov@275: // content() always initializes metadata.
tikhomirov@401: // TODO [post-1.0] this is expensive way to find out metadata, distinct RevlogStream.Iterator would be better.
tikhomirov@275: // Alternatively, may parameterize MetadataContentPipe to do prepare only.
tikhomirov@275: // For reference, when throwing CancelledException, hg status -A --rev 3:80 takes 70 ms
tikhomirov@275: // however, if we just consume buffer instead (buffer.position(buffer.limit()), same command takes ~320ms
tikhomirov@275: // (compared to command-line counterpart of 190ms)
tikhomirov@275: try {
tikhomirov@275: content(localRev, new ByteChannel() { // No-op channel
tikhomirov@275: public int write(ByteBuffer buffer) throws IOException, CancelledException {
tikhomirov@275: throw new CancelledException();
tikhomirov@275: }
tikhomirov@275: });
tikhomirov@275: } catch (CancelledException ex) {
tikhomirov@275: // it's ok, we did that
tikhomirov@366: } catch (HgInvalidControlFileException ex) {
tikhomirov@396: throw ex.isRevisionIndexSet() ? ex : ex.setRevisionIndex(localRev);
tikhomirov@396: } catch (HgException ex) {
tikhomirov@396: // metadata comes from the content, hence initWithDataFile
tikhomirov@396: throw content.initWithDataFile(new HgInvalidControlFileException(null, ex, null));
tikhomirov@275: }
tikhomirov@275: }
tikhomirov@78:
tikhomirov@88: private static final class MetadataEntry {
tikhomirov@78: private final String entry;
tikhomirov@78: private final int valueStart;
tikhomirov@388:
tikhomirov@388: // key may be null
tikhomirov@78: /*package-local*/MetadataEntry(String key, String value) {
tikhomirov@388: if (key == null) {
tikhomirov@388: entry = value;
tikhomirov@388: valueStart = -1; // not 0 to tell between key == null and key == ""
tikhomirov@388: } else {
tikhomirov@388: entry = key + value;
tikhomirov@388: valueStart = key.length();
tikhomirov@388: }
tikhomirov@78: }
tikhomirov@78: /*package-local*/boolean matchKey(String key) {
tikhomirov@388: return key == null ? valueStart == -1 : key.length() == valueStart && entry.startsWith(key);
tikhomirov@78: }
tikhomirov@134: // uncomment once/if needed
tikhomirov@134: // public String key() {
tikhomirov@134: // return entry.substring(0, valueStart);
tikhomirov@134: // }
tikhomirov@78: public String value() {
tikhomirov@388: return valueStart == -1 ? entry : entry.substring(valueStart);
tikhomirov@78: }
tikhomirov@78: }
tikhomirov@78:
tikhomirov@78: private static class Metadata {
tikhomirov@276: private static class Record {
tikhomirov@276: public final int offset;
tikhomirov@276: public final MetadataEntry[] entries;
tikhomirov@276:
tikhomirov@276: public Record(int off, MetadataEntry[] entr) {
tikhomirov@276: offset = off;
tikhomirov@276: entries = entr;
tikhomirov@276: }
tikhomirov@276: }
tikhomirov@78: // XXX sparse array needed
tikhomirov@276: private final IntMap entries = new IntMap(5);
tikhomirov@134:
tikhomirov@276: private final Record NONE = new Record(-1, null); // don't want statics
tikhomirov@134:
tikhomirov@134: // true when there's metadata for given revision
tikhomirov@78: boolean known(int revision) {
tikhomirov@276: Record i = entries.get(revision);
tikhomirov@134: return i != null && NONE != i;
tikhomirov@134: }
tikhomirov@134:
tikhomirov@134: // true when revision has been checked for metadata presence.
tikhomirov@134: public boolean checked(int revision) {
tikhomirov@276: return entries.containsKey(revision);
tikhomirov@78: }
tikhomirov@134:
tikhomirov@134: // true when revision has been checked and found not having any metadata
tikhomirov@134: boolean none(int revision) {
tikhomirov@276: Record i = entries.get(revision);
tikhomirov@134: return i == NONE;
tikhomirov@134: }
tikhomirov@134:
tikhomirov@134: // mark revision as having no metadata.
tikhomirov@134: void recordNone(int revision) {
tikhomirov@276: Record i = entries.get(revision);
tikhomirov@134: if (i == NONE) {
tikhomirov@134: return; // already there
tikhomirov@134: }
tikhomirov@134: if (i != null) {
tikhomirov@134: throw new IllegalStateException(String.format("Trying to override Metadata state for revision %d (known offset: %d)", revision, i));
tikhomirov@134: }
tikhomirov@276: entries.put(revision, NONE);
tikhomirov@134: }
tikhomirov@134:
tikhomirov@78: // since this is internal class, callers are supposed to ensure arg correctness (i.e. ask known() before)
tikhomirov@78: int dataOffset(int revision) {
tikhomirov@276: return entries.get(revision).offset;
tikhomirov@78: }
tikhomirov@78: void add(int revision, int dataOffset, Collection e) {
tikhomirov@276: assert !entries.containsKey(revision);
tikhomirov@276: entries.put(revision, new Record(dataOffset, e.toArray(new MetadataEntry[e.size()])));
tikhomirov@78: }
tikhomirov@276:
tikhomirov@78: String find(int revision, String key) {
tikhomirov@276: for (MetadataEntry me : entries.get(revision).entries) {
tikhomirov@78: if (me.matchKey(key)) {
tikhomirov@78: return me.value();
tikhomirov@78: }
tikhomirov@78: }
tikhomirov@78: return null;
tikhomirov@78: }
tikhomirov@78: }
tikhomirov@157:
tikhomirov@277: private static class MetadataInspector extends ErrorHandlingInspector implements RevlogStream.Inspector {
tikhomirov@157: private final Metadata metadata;
tikhomirov@277: private final RevlogStream.Inspector delegate;
tikhomirov@388: private final LogFacility log;
tikhomirov@157:
tikhomirov@396: public MetadataInspector(Metadata _metadata, LogFacility logFacility, RevlogStream.Inspector chain) {
tikhomirov@157: metadata = _metadata;
tikhomirov@388: log = logFacility;
tikhomirov@277: delegate = chain;
tikhomirov@277: setCancelSupport(CancelSupport.Factory.get(chain));
tikhomirov@157: }
tikhomirov@157:
tikhomirov@366: public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgException {
tikhomirov@277: try {
tikhomirov@277: final int daLength = data.length();
tikhomirov@277: if (daLength < 4 || data.readByte() != 1 || data.readByte() != 10) {
tikhomirov@277: metadata.recordNone(revisionNumber);
tikhomirov@277: data.reset();
tikhomirov@277: } else {
tikhomirov@277: ArrayList _metadata = new ArrayList();
tikhomirov@277: int offset = parseMetadata(data, daLength, _metadata);
tikhomirov@277: metadata.add(revisionNumber, offset, _metadata);
tikhomirov@277: // da is in prepared state (i.e. we consumed all bytes up to metadata end).
tikhomirov@277: // However, it's not safe to assume delegate won't call da.reset() for some reason,
tikhomirov@277: // and we need to ensure predictable result.
tikhomirov@277: data.reset();
tikhomirov@277: data = new FilterDataAccess(data, offset, daLength - offset);
tikhomirov@277: }
tikhomirov@277: if (delegate != null) {
tikhomirov@277: delegate.next(revisionNumber, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, nodeid, data);
tikhomirov@277: }
tikhomirov@277: } catch (IOException ex) {
tikhomirov@277: recordFailure(ex);
tikhomirov@396: } catch (HgInvalidControlFileException ex) {
tikhomirov@396: recordFailure(ex.isRevisionIndexSet() ? ex : ex.setRevisionIndex(revisionNumber));
tikhomirov@157: }
tikhomirov@277: }
tikhomirov@277:
tikhomirov@396: private int parseMetadata(DataAccess data, final int daLength, ArrayList _metadata) throws IOException, HgInvalidControlFileException {
tikhomirov@157: int lastEntryStart = 2;
tikhomirov@157: int lastColon = -1;
tikhomirov@157: // XXX in fact, need smth like ByteArrayBuilder, similar to StringBuilder,
tikhomirov@157: // which can't be used here because we can't convert bytes to chars as we read them
tikhomirov@157: // (there might be multi-byte encoding), and we need to collect all bytes before converting to string
tikhomirov@157: ByteArrayOutputStream bos = new ByteArrayOutputStream();
tikhomirov@157: String key = null, value = null;
tikhomirov@157: boolean byteOne = false;
tikhomirov@323: boolean metadataIsComplete = false;
tikhomirov@157: for (int i = 2; i < daLength; i++) {
tikhomirov@277: byte b = data.readByte();
tikhomirov@157: if (b == '\n') {
tikhomirov@157: if (byteOne) { // i.e. \n follows 1
tikhomirov@157: lastEntryStart = i+1;
tikhomirov@323: metadataIsComplete = true;
tikhomirov@157: // XXX is it possible to have here incomplete key/value (i.e. if last pair didn't end with \n)
tikhomirov@323: // if yes, need to set metadataIsComplete to true in that case as well
tikhomirov@157: break;
tikhomirov@157: }
tikhomirov@157: if (key == null || lastColon == -1 || i <= lastColon) {
tikhomirov@388: log.error(getClass(), "Missing key in file revision metadata at index %d", i);
tikhomirov@157: }
tikhomirov@157: value = new String(bos.toByteArray()).trim();
tikhomirov@157: bos.reset();
tikhomirov@157: _metadata.add(new MetadataEntry(key, value));
tikhomirov@157: key = value = null;
tikhomirov@157: lastColon = -1;
tikhomirov@157: lastEntryStart = i+1;
tikhomirov@157: continue;
tikhomirov@157: }
tikhomirov@277: // byteOne has to be consumed up to this line, if not yet, consume it
tikhomirov@157: if (byteOne) {
tikhomirov@157: // insert 1 we've read on previous step into the byte builder
tikhomirov@157: bos.write(1);
tikhomirov@277: byteOne = false;
tikhomirov@157: // fall-through to consume current byte
tikhomirov@157: }
tikhomirov@157: if (b == (int) ':') {
tikhomirov@157: assert value == null;
tikhomirov@157: key = new String(bos.toByteArray());
tikhomirov@157: bos.reset();
tikhomirov@157: lastColon = i;
tikhomirov@157: } else if (b == 1) {
tikhomirov@157: byteOne = true;
tikhomirov@157: } else {
tikhomirov@157: bos.write(b);
tikhomirov@157: }
tikhomirov@157: }
tikhomirov@323: // data.isEmpty is not reliable, renamed files of size==0 keep only metadata
tikhomirov@323: if (!metadataIsComplete) {
tikhomirov@323: // XXX perhaps, worth a testcase (empty file, renamed, read or ask ifCopy
tikhomirov@396: throw new HgInvalidControlFileException("Metadata is not closed properly", null, null);
tikhomirov@157: }
tikhomirov@277: return lastEntryStart;
tikhomirov@17: }
tikhomirov@322:
tikhomirov@322: @Override
tikhomirov@322: public void checkFailed() throws HgException, IOException, CancelledException {
tikhomirov@322: super.checkFailed();
tikhomirov@322: if (delegate instanceof ErrorHandlingInspector) {
tikhomirov@322: // XXX need to add ErrorDestination and pass it around (much like CancelSupport get passed)
tikhomirov@322: // so that delegate would be able report its failures directly to caller without this hack
tikhomirov@322: ((ErrorHandlingInspector) delegate).checkFailed();
tikhomirov@322: }
tikhomirov@322: }
tikhomirov@17: }
tikhomirov@2: }