tikhomirov@17: /*
tikhomirov@74:  * Copyright (c) 2010-2011 TMate Software Ltd
tikhomirov@74:  *  
tikhomirov@74:  * This program is free software; you can redistribute it and/or modify
tikhomirov@74:  * it under the terms of the GNU General Public License as published by
tikhomirov@74:  * the Free Software Foundation; version 2 of the License.
tikhomirov@74:  *
tikhomirov@74:  * This program is distributed in the hope that it will be useful,
tikhomirov@74:  * but WITHOUT ANY WARRANTY; without even the implied warranty of
tikhomirov@74:  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
tikhomirov@74:  * GNU General Public License for more details.
tikhomirov@74:  *
tikhomirov@74:  * For information on how to redistribute this software under
tikhomirov@74:  * the terms of a license other than GNU General Public License
tikhomirov@102:  * contact TMate Software at support@hg4j.com
tikhomirov@2:  */
tikhomirov@74: package org.tmatesoft.hg.repo;
tikhomirov@2: 
tikhomirov@367: import static org.tmatesoft.hg.repo.HgInternals.wrongRevisionIndex;
tikhomirov@148: import static org.tmatesoft.hg.repo.HgRepository.*;
tikhomirov@74: 
tikhomirov@157: import java.io.ByteArrayOutputStream;
tikhomirov@237: import java.io.File;
tikhomirov@237: import java.io.FileInputStream;
tikhomirov@148: import java.io.IOException;
tikhomirov@115: import java.nio.ByteBuffer;
tikhomirov@237: import java.nio.channels.FileChannel;
tikhomirov@78: import java.util.ArrayList;
tikhomirov@240: import java.util.Arrays;
tikhomirov@78: import java.util.Collection;
tikhomirov@305: import java.util.Collections;
tikhomirov@305: import java.util.List;
tikhomirov@78: 
tikhomirov@148: import org.tmatesoft.hg.core.HgDataStreamException;
tikhomirov@157: import org.tmatesoft.hg.core.HgException;
tikhomirov@354: import org.tmatesoft.hg.core.HgInvalidControlFileException;
tikhomirov@347: import org.tmatesoft.hg.core.HgInvalidRevisionException;
tikhomirov@328: import org.tmatesoft.hg.core.HgLogCommand;
tikhomirov@74: import org.tmatesoft.hg.core.Nodeid;
tikhomirov@157: import org.tmatesoft.hg.internal.DataAccess;
tikhomirov@121: import org.tmatesoft.hg.internal.FilterByteChannel;
tikhomirov@277: import org.tmatesoft.hg.internal.FilterDataAccess;
tikhomirov@276: import org.tmatesoft.hg.internal.IntMap;
tikhomirov@77: import org.tmatesoft.hg.internal.RevlogStream;
tikhomirov@115: import org.tmatesoft.hg.util.ByteChannel;
tikhomirov@237: import org.tmatesoft.hg.util.CancelSupport;
tikhomirov@148: import org.tmatesoft.hg.util.CancelledException;
tikhomirov@305: import org.tmatesoft.hg.util.Pair;
tikhomirov@133: import org.tmatesoft.hg.util.Path;
tikhomirov@237: import org.tmatesoft.hg.util.ProgressSupport;
tikhomirov@74: 
tikhomirov@5: 
tikhomirov@17: 
tikhomirov@2: /**
tikhomirov@2:  * ? name:HgFileNode?
tikhomirov@74:  *
tikhomirov@74:  * @author Artem Tikhomirov
tikhomirov@74:  * @author TMate Software Ltd.
tikhomirov@2:  */
tikhomirov@2: public class HgDataFile extends Revlog {
tikhomirov@2: 
tikhomirov@3: 	// absolute from repo root?
tikhomirov@3: 	// slashes, unix-style?
tikhomirov@3: 	// repo location agnostic, just to give info to user, not to access real storage
tikhomirov@74: 	private final Path path;
tikhomirov@134: 	private Metadata metadata; // get initialized on first access to file content.
tikhomirov@2: 	
tikhomirov@115: 	/*package-local*/HgDataFile(HgRepository hgRepo, Path filePath, RevlogStream content) {
tikhomirov@21: 		super(hgRepo, content);
tikhomirov@115: 		path = filePath;
tikhomirov@3: 	}
tikhomirov@115: 
tikhomirov@115: 	/*package-local*/HgDataFile(HgRepository hgRepo, Path filePath) {
tikhomirov@115: 		super(hgRepo);
tikhomirov@115: 		path = filePath;
tikhomirov@115: 	}
tikhomirov@115: 
tikhomirov@115: 	// exists is not the best name possible. now it means no file with such name was ever known to the repo.
tikhomirov@115: 	// it might be confused with files existed before but lately removed. 
tikhomirov@3: 	public boolean exists() {
tikhomirov@3: 		return content != null; // XXX need better impl
tikhomirov@2: 	}
tikhomirov@2: 
tikhomirov@77: 	// human-readable (i.e. "COPYING", not "store/data/_c_o_p_y_i_n_g.i")
tikhomirov@74: 	public Path getPath() {
tikhomirov@157: 		return path; // hgRepo.backresolve(this) -> name? In this case, what about hashed long names?
tikhomirov@2: 	}
tikhomirov@2: 
tikhomirov@275: 	/**
tikhomirov@367: 	 * Handy shorthand for {@link #length(int) length(getRevisionIndex(nodeid))}
tikhomirov@354: 	 *
tikhomirov@354: 	 * @param nodeid revision of the file
tikhomirov@354: 	 * 
tikhomirov@275: 	 * @return size of the file content at the given revision
tikhomirov@380: 	 * @throws HgInvalidRevisionException if supplied nodeid doesn't identify any revision from this revlog  
tikhomirov@354: 	 * @throws HgDataStreamException if attempt to access file metadata failed
tikhomirov@354: 	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@275: 	 */
tikhomirov@354: 	public int length(Nodeid nodeid) throws HgDataStreamException, HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@367: 		return length(getRevisionIndex(nodeid));
tikhomirov@275: 	}
tikhomirov@275: 	
tikhomirov@275: 	/**
tikhomirov@368:  	 * @param fileRevisionIndex - revision local index, non-negative. From predefined constants, only {@link HgRepository#TIP} makes sense. 
tikhomirov@275: 	 * @return size of the file content at the revision identified by local revision number.
tikhomirov@354: 	 * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog
tikhomirov@354: 	 * @throws HgDataStreamException if attempt to access file metadata failed
tikhomirov@354: 	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@275: 	 */
tikhomirov@367: 	public int length(int fileRevisionIndex) throws HgDataStreamException, HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@367: 		// TODO support WORKING_COPY constant
tikhomirov@367: 		if (metadata == null || !metadata.checked(fileRevisionIndex)) {
tikhomirov@367: 			checkAndRecordMetadata(fileRevisionIndex);
tikhomirov@275: 		}
tikhomirov@367: 		final int dataLen = content.dataLength(fileRevisionIndex);
tikhomirov@367: 		if (metadata.known(fileRevisionIndex)) {
tikhomirov@367: 			return dataLen - metadata.dataOffset(fileRevisionIndex);
tikhomirov@275: 		}
tikhomirov@275: 		return dataLen;
tikhomirov@22: 	}
tikhomirov@22: 
tikhomirov@237: 	/**
tikhomirov@237: 	 * Reads content of the file from working directory. If file present in the working directory, its actual content without
tikhomirov@237: 	 * any filters is supplied through the sink. If file does not exist in the working dir, this method provides content of a file 
tikhomirov@237: 	 * as if it would be refreshed in the working copy, i.e. its corresponding revision 
tikhomirov@237: 	 * (XXX according to dirstate? file tip?) is read from the repository, and filters repo -> working copy get applied.
tikhomirov@237: 	 *     
tikhomirov@237: 	 * @param sink where to pipe content to
tikhomirov@237: 	 * @throws HgDataStreamException to indicate troubles reading repository file
tikhomirov@380: 	 * @throws CancelledException if execution of the operation was cancelled
tikhomirov@237: 	 */
tikhomirov@366: 	public void workingCopy(ByteChannel sink) throws HgDataStreamException, HgInvalidControlFileException, CancelledException {
tikhomirov@237: 		File f = getRepo().getFile(this);
tikhomirov@237: 		if (f.exists()) {
tikhomirov@237: 			final CancelSupport cs = CancelSupport.Factory.get(sink);
tikhomirov@237: 			final ProgressSupport progress = ProgressSupport.Factory.get(sink);
tikhomirov@237: 			final long flength = f.length();
tikhomirov@237: 			final int bsize = (int) Math.min(flength, 32*1024);
tikhomirov@237: 			progress.start((int) (flength > Integer.MAX_VALUE ? flength >>> 15 /*32 kb buf size*/ : flength));
tikhomirov@237: 			ByteBuffer buf = ByteBuffer.allocate(bsize);
tikhomirov@237: 			FileChannel fc = null;
tikhomirov@237: 			try {
tikhomirov@237: 				fc = new FileInputStream(f).getChannel();
tikhomirov@237: 				while (fc.read(buf) != -1) {
tikhomirov@237: 					cs.checkCancelled();
tikhomirov@237: 					buf.flip();
tikhomirov@237: 					int consumed = sink.write(buf);
tikhomirov@237: 					progress.worked(flength > Integer.MAX_VALUE ? 1 : consumed);
tikhomirov@237: 					buf.compact();
tikhomirov@237: 				}
tikhomirov@237: 			} catch (IOException ex) {
tikhomirov@237: 				throw new HgDataStreamException(getPath(), ex);
tikhomirov@237: 			} finally {
tikhomirov@237: 				progress.done();
tikhomirov@237: 				if (fc != null) {
tikhomirov@237: 					try {
tikhomirov@237: 						fc.close();
tikhomirov@237: 					} catch (IOException ex) {
tikhomirov@295: 						getRepo().getContext().getLog().info(getClass(), ex, null);
tikhomirov@237: 					}
tikhomirov@237: 				}
tikhomirov@237: 			}
tikhomirov@237: 		} else {
tikhomirov@355: 			// FIXME not TIP, but revision according to dirstate!!!
tikhomirov@355: 			// add tests for this case
tikhomirov@237: 			contentWithFilters(TIP, sink);
tikhomirov@237: 		}
tikhomirov@2: 	}
tikhomirov@115: 	
tikhomirov@157: //	public void content(int revision, ByteChannel sink, boolean applyFilters) throws HgDataStreamException, IOException, CancelledException {
tikhomirov@157: //		byte[] content = content(revision);
tikhomirov@157: //		final CancelSupport cancelSupport = CancelSupport.Factory.get(sink);
tikhomirov@157: //		final ProgressSupport progressSupport = ProgressSupport.Factory.get(sink);
tikhomirov@157: //		ByteBuffer buf = ByteBuffer.allocate(512);
tikhomirov@157: //		int left = content.length;
tikhomirov@157: //		progressSupport.start(left);
tikhomirov@157: //		int offset = 0;
tikhomirov@157: //		cancelSupport.checkCancelled();
tikhomirov@157: //		ByteChannel _sink = applyFilters ? new FilterByteChannel(sink, getRepo().getFiltersFromRepoToWorkingDir(getPath())) : sink;
tikhomirov@157: //		do {
tikhomirov@157: //			buf.put(content, offset, Math.min(left, buf.remaining()));
tikhomirov@157: //			buf.flip();
tikhomirov@157: //			cancelSupport.checkCancelled();
tikhomirov@157: //			// XXX I may not rely on returned number of bytes but track change in buf position instead.
tikhomirov@157: //			int consumed = _sink.write(buf);
tikhomirov@157: //			buf.compact();
tikhomirov@157: //			offset += consumed;
tikhomirov@157: //			left -= consumed;
tikhomirov@157: //			progressSupport.worked(consumed);
tikhomirov@157: //		} while (left > 0);
tikhomirov@157: //		progressSupport.done(); // XXX shall specify whether #done() is invoked always or only if completed successfully.
tikhomirov@157: //	}
tikhomirov@157: 	
tikhomirov@157: 	/*XXX not sure distinct method contentWithFilters() is the best way to do, perhaps, callers shall add filters themselves?*/
tikhomirov@366: 	public void contentWithFilters(int revision, ByteChannel sink) throws HgDataStreamException, HgInvalidControlFileException, CancelledException, HgInvalidRevisionException {
tikhomirov@237: 		if (revision == WORKING_COPY) {
tikhomirov@237: 			workingCopy(sink); // pass un-mangled sink
tikhomirov@237: 		} else {
tikhomirov@237: 			content(revision, new FilterByteChannel(sink, getRepo().getFiltersFromRepoToWorkingDir(getPath())));
tikhomirov@237: 		}
tikhomirov@115: 	}
tikhomirov@22: 
tikhomirov@367: 	/**
tikhomirov@367: 	 * 
tikhomirov@368:  	 * @param fileRevisionIndex - revision local index, non-negative. From predefined constants, {@link HgRepository#TIP} and {@link HgRepository#WORKING_COPY} make sense. 
tikhomirov@367: 	 * @param sink
tikhomirov@367: 	 * @throws HgDataStreamException FIXME
tikhomirov@380: 	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@380: 	 * @throws CancelledException if execution of the operation was cancelled
tikhomirov@380: 	 * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog
tikhomirov@367: 	 */
tikhomirov@367: 	public void content(int fileRevisionIndex, ByteChannel sink) throws HgDataStreamException, HgInvalidControlFileException, CancelledException, HgInvalidRevisionException {
tikhomirov@367: 		// for data files need to check heading of the file content for possible metadata
tikhomirov@367: 		// @see http://mercurial.selenic.com/wiki/FileFormats#data.2BAC8-
tikhomirov@367: 		if (fileRevisionIndex == TIP) {
tikhomirov@367: 			fileRevisionIndex = getLastRevision();
tikhomirov@78: 		}
tikhomirov@367: 		if (fileRevisionIndex == WORKING_COPY) {
tikhomirov@237: 			// sink is supposed to come into workingCopy without filters
tikhomirov@237: 			// thus we shall not get here (into #content) from #contentWithFilters(WC)
tikhomirov@157: 			workingCopy(sink);
tikhomirov@157: 			return;
tikhomirov@157: 		}
tikhomirov@367: 		if (wrongRevisionIndex(fileRevisionIndex) || fileRevisionIndex == BAD_REVISION) {
tikhomirov@367: 			throw new HgInvalidRevisionException(fileRevisionIndex);
tikhomirov@148: 		}
tikhomirov@157: 		if (sink == null) {
tikhomirov@157: 			throw new IllegalArgumentException();
tikhomirov@157: 		}
tikhomirov@134: 		if (metadata == null) {
tikhomirov@134: 			metadata = new Metadata();
tikhomirov@134: 		}
tikhomirov@277: 		ErrorHandlingInspector insp;
tikhomirov@367: 		if (metadata.none(fileRevisionIndex)) {
tikhomirov@355: 			insp = new ContentPipe(sink, 0, getRepo().getContext().getLog());
tikhomirov@367: 		} else if (metadata.known(fileRevisionIndex)) {
tikhomirov@367: 			insp = new ContentPipe(sink, metadata.dataOffset(fileRevisionIndex), getRepo().getContext().getLog());
tikhomirov@157: 		} else {
tikhomirov@157: 			// do not know if there's metadata
tikhomirov@355: 			insp = new MetadataInspector(metadata, getPath(), new ContentPipe(sink, 0, getRepo().getContext().getLog()));
tikhomirov@78: 		}
tikhomirov@157: 		insp.checkCancelled();
tikhomirov@367: 		super.content.iterate(fileRevisionIndex, fileRevisionIndex, true, insp);
tikhomirov@157: 		try {
tikhomirov@237: 			insp.checkFailed(); // XXX is there real need to throw IOException from ContentPipe?
tikhomirov@157: 		} catch (HgDataStreamException ex) {
tikhomirov@157: 			throw ex;
tikhomirov@237: 		} catch (IOException ex) {
tikhomirov@367: 			throw new HgDataStreamException(getPath(), ex).setRevisionIndex(fileRevisionIndex);
tikhomirov@157: 		} catch (HgException ex) {
tikhomirov@157: 			// shall not happen, unless we changed ContentPipe or its subclass
tikhomirov@215: 			throw new HgDataStreamException(getPath(), ex.getClass().getName(), ex);
tikhomirov@78: 		}
tikhomirov@78: 	}
tikhomirov@157: 	
tikhomirov@317: 	private static class HistoryNode {
tikhomirov@317: 		int changeset;
tikhomirov@317: 		Nodeid cset;
tikhomirov@317: 		HistoryNode parent1, parent2;
tikhomirov@317: 		List<HistoryNode> children;
tikhomirov@317: 
tikhomirov@317: 		HistoryNode(int cs, HistoryNode p1, HistoryNode p2) {
tikhomirov@317: 			changeset = cs;
tikhomirov@317: 			parent1 = p1;
tikhomirov@317: 			parent2 = p2;
tikhomirov@317: 			if (p1 != null) {
tikhomirov@317: 				p1.addChild(this);
tikhomirov@317: 			}
tikhomirov@317: 			if (p2 != null) {
tikhomirov@317: 				p2.addChild(this);
tikhomirov@317: 			}
tikhomirov@317: 		}
tikhomirov@317: 		
tikhomirov@317: 		Nodeid changesetRevision() {
tikhomirov@317: 			assert cset != null : "we initialize all csets prior to use";
tikhomirov@317: 			return cset;
tikhomirov@317: 		}
tikhomirov@317: 
tikhomirov@317: 		void addChild(HistoryNode child) {
tikhomirov@317: 			if (children == null) {
tikhomirov@317: 				children = new ArrayList<HistoryNode>(2);
tikhomirov@317: 			}
tikhomirov@317: 			children.add(child);
tikhomirov@317: 		}
tikhomirov@305: 	}
tikhomirov@305: 	
tikhomirov@328: 	/**
tikhomirov@328: 	 * @deprecated use {@link HgLogCommand#execute(org.tmatesoft.hg.core.HgChangesetTreeHandler)} instead
tikhomirov@328: 	 */
tikhomirov@328: 	@Deprecated
tikhomirov@366: 	public void history(HgChangelog.TreeInspector inspector) throws HgInvalidControlFileException{
tikhomirov@317: 		final CancelSupport cancelSupport = CancelSupport.Factory.get(inspector);
tikhomirov@317: 		try {
tikhomirov@317: 			final boolean[] needsSorting = { false };
tikhomirov@317: 			final HistoryNode[] completeHistory = new HistoryNode[getRevisionCount()];
tikhomirov@317: 			final int[] commitRevisions = new int[completeHistory.length];
tikhomirov@317: 			RevlogStream.Inspector insp = new RevlogStream.Inspector() {
tikhomirov@317: 				public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
tikhomirov@317: 					if (revisionNumber > 0) {
tikhomirov@317: 						if (commitRevisions[revisionNumber-1] > linkRevision) {
tikhomirov@317: 							needsSorting[0] = true;
tikhomirov@317: 						}
tikhomirov@317: 					}
tikhomirov@317: 					commitRevisions[revisionNumber] = linkRevision;
tikhomirov@317: 					HistoryNode p1 = null, p2 = null;
tikhomirov@317: 					if (parent1Revision != -1) {
tikhomirov@317: 						p1 = completeHistory[parent1Revision];
tikhomirov@317: 					}
tikhomirov@317: 					if (parent2Revision != -1) {
tikhomirov@317: 						p2 = completeHistory[parent2Revision];
tikhomirov@317: 					}
tikhomirov@317: 					completeHistory[revisionNumber] = new HistoryNode(linkRevision, p1, p2);
tikhomirov@305: 				}
tikhomirov@317: 			};
tikhomirov@317: 			content.iterate(0, getLastRevision(), false, insp);
tikhomirov@317: 			cancelSupport.checkCancelled();
tikhomirov@317: 			if (needsSorting[0]) {
tikhomirov@317: 				Arrays.sort(commitRevisions);
tikhomirov@317: 			}
tikhomirov@317: 			// read changeset revisions at once (to avoid numerous changelog.getRevision reads)
tikhomirov@317: 			// but just nodeids, not RawChangeset (changelog.iterate(data=false)
tikhomirov@317: 			ArrayList<Nodeid> changesetRevisions = new ArrayList<Nodeid>(commitRevisions.length);
tikhomirov@317: 			getRepo().getChangelog().getRevisionsInternal(changesetRevisions, commitRevisions);
tikhomirov@317: 			cancelSupport.checkCancelled();
tikhomirov@317: 			// assign them to corresponding HistoryNodes
tikhomirov@317: 			for (int i = 0; i < completeHistory.length; i++ ) {
tikhomirov@317: 				final HistoryNode n = completeHistory[i];
tikhomirov@317: 				if (needsSorting[0]) {
tikhomirov@317: 					int x = Arrays.binarySearch(commitRevisions, n.changeset);
tikhomirov@317: 					assert x >= 0;
tikhomirov@317: 					n.cset = changesetRevisions.get(x);
tikhomirov@317: 				} else {
tikhomirov@317: 					// commit revisions were not sorted, may use original index directly
tikhomirov@317: 					n.cset = changesetRevisions.get(i);
tikhomirov@305: 				}
tikhomirov@305: 			}
tikhomirov@317: 			cancelSupport.checkCancelled();
tikhomirov@317: 			// XXX shall sort completeHistory according to changeset numbers?
tikhomirov@317: 			for (int i = 0; i < completeHistory.length; i++ ) {
tikhomirov@317: 				final HistoryNode n = completeHistory[i];
tikhomirov@305: 				HistoryNode p;
tikhomirov@305: 				Nodeid p1, p2;
tikhomirov@317: 				if ((p = n.parent1) != null) {
tikhomirov@305: 					p1 = p.changesetRevision();
tikhomirov@305: 				} else {
tikhomirov@305: 					p1 = Nodeid.NULL;
tikhomirov@305: 				}
tikhomirov@317: 				if ((p= n.parent2) != null) {
tikhomirov@305: 					p2 = p.changesetRevision();
tikhomirov@305: 				} else {
tikhomirov@305: 					p2 = Nodeid.NULL;
tikhomirov@305: 				}
tikhomirov@317: 				final Pair<Nodeid, Nodeid> parentChangesets = new Pair<Nodeid, Nodeid>(p1, p2);
tikhomirov@317: 				final List<Nodeid> childChangesets;
tikhomirov@305: 				if (n.children == null) {
tikhomirov@317: 					childChangesets = Collections.emptyList();
tikhomirov@317: 				} else {
tikhomirov@317: 					Nodeid[] revisions = new Nodeid[n.children.size()];
tikhomirov@317: 					int j = 0;
tikhomirov@317: 					for (HistoryNode hn : n.children) {
tikhomirov@317: 						revisions[j++] = hn.changesetRevision();
tikhomirov@317: 					}
tikhomirov@317: 					childChangesets = Arrays.asList(revisions);
tikhomirov@305: 				}
tikhomirov@317: 				inspector.next(n.changesetRevision(), parentChangesets, childChangesets);
tikhomirov@317: 				cancelSupport.checkCancelled();
tikhomirov@305: 			}
tikhomirov@317: 		} catch (CancelledException ex) {
tikhomirov@317: 			return;
tikhomirov@317: 		}
tikhomirov@305: 	}
tikhomirov@305: 	
tikhomirov@366: 	public void history(HgChangelog.Inspector inspector) throws HgInvalidControlFileException {
tikhomirov@135: 		history(0, getLastRevision(), inspector);
tikhomirov@48: 	}
tikhomirov@48: 
tikhomirov@366: 	public void history(int start, int end, HgChangelog.Inspector inspector) throws HgInvalidRevisionException, HgInvalidControlFileException {
tikhomirov@3: 		if (!exists()) {
tikhomirov@3: 			throw new IllegalStateException("Can't get history of invalid repository file node"); 
tikhomirov@3: 		}
tikhomirov@135: 		final int last = getLastRevision();
tikhomirov@77: 		if (end == TIP) {
tikhomirov@77: 			end = last;
tikhomirov@77: 		}
tikhomirov@300: 		if (start == TIP) {
tikhomirov@300: 			start = last;
tikhomirov@300: 		}
tikhomirov@300: 		HgInternals.checkRevlogRange(start, end, last);
tikhomirov@300: 
tikhomirov@48: 		final int[] commitRevisions = new int[end - start + 1];
tikhomirov@242: 		final boolean[] needsSorting = { false };
tikhomirov@77: 		RevlogStream.Inspector insp = new RevlogStream.Inspector() {
tikhomirov@3: 			int count = 0;
tikhomirov@51: 			public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) {
tikhomirov@242: 				if (count > 0) {
tikhomirov@242: 					if (commitRevisions[count -1] > linkRevision) {
tikhomirov@242: 						needsSorting[0] = true;
tikhomirov@242: 					}
tikhomirov@242: 				}
tikhomirov@3: 				commitRevisions[count++] = linkRevision;
tikhomirov@3: 			}
tikhomirov@3: 		};
tikhomirov@48: 		content.iterate(start, end, false, insp);
tikhomirov@233: 		final HgChangelog changelog = getRepo().getChangelog();
tikhomirov@242: 		if (needsSorting[0]) {
tikhomirov@242: 			// automatic tools (svnmerge?) produce unnatural file history
tikhomirov@242: 			// (e.g. cpython/Lib/doctest.py, revision 164 points to cset 63509, 165 - to 38453) 
tikhomirov@242: 			Arrays.sort(commitRevisions);
tikhomirov@233: 		}
tikhomirov@245: 		changelog.rangeInternal(inspector, commitRevisions);
tikhomirov@3: 	}
tikhomirov@88: 	
tikhomirov@354: 	/**
tikhomirov@367: 	 * For a given revision of the file (identified with revision index), find out index of the corresponding changeset.
tikhomirov@354: 	 *
tikhomirov@354: 	 * @return changeset revision index
tikhomirov@354: 	 * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog
tikhomirov@354: 	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@354: 	 */
tikhomirov@367: 	public int getChangesetRevisionIndex(int revision) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@367: 		return content.linkRevision(revision);
tikhomirov@367: 	}
tikhomirov@367: 	/**
tikhomirov@367: 	 * @deprecated use {@link #getChangesetRevisionIndex(int)} instead
tikhomirov@367: 	 */
tikhomirov@367: 	@Deprecated
tikhomirov@354: 	public int getChangesetLocalRevision(int revision) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@367: 		return getChangesetRevisionIndex(revision);
tikhomirov@88: 	}
tikhomirov@88: 
tikhomirov@354: 	/**
tikhomirov@367: 	 * Complements {@link #getChangesetRevisionIndex(int)} to get changeset revision that corresponds to supplied file revision
tikhomirov@354: 	 * 
tikhomirov@354: 	 * @param nid revision of the file
tikhomirov@354: 	 * @return changeset revision
tikhomirov@354: 	 * @throws HgInvalidRevisionException if supplied argument doesn't represent revision index in this revlog
tikhomirov@354: 	 * @throws HgInvalidControlFileException if access to revlog index/data entry failed
tikhomirov@354: 	 */
tikhomirov@354: 	public Nodeid getChangesetRevision(Nodeid nid) throws HgInvalidControlFileException, HgInvalidRevisionException {
tikhomirov@367: 		int changelogRevision = getChangesetRevisionIndex(getRevisionIndex(nid));
tikhomirov@88: 		return getRepo().getChangelog().getRevision(changelogRevision);
tikhomirov@88: 	}
tikhomirov@78: 
tikhomirov@354: 	/**
tikhomirov@354: 	 * 
tikhomirov@354: 	 * @return
tikhomirov@354: 	 * @throws HgDataStreamException if attempt to access file metadata failed
tikhomirov@354: 	 */
tikhomirov@157: 	public boolean isCopy() throws HgDataStreamException {
tikhomirov@134: 		if (metadata == null || !metadata.checked(0)) {
tikhomirov@275: 			checkAndRecordMetadata(0);
tikhomirov@78: 		}
tikhomirov@134: 		if (!metadata.known(0)) {
tikhomirov@78: 			return false;
tikhomirov@78: 		}
tikhomirov@78: 		return metadata.find(0, "copy") != null;
tikhomirov@78: 	}
tikhomirov@78: 
tikhomirov@354: 	/**
tikhomirov@354: 	 * Get name of the file this one was copied from.
tikhomirov@354: 	 * 
tikhomirov@354: 	 * @return name of the file origin
tikhomirov@354: 	 * @throws HgDataStreamException if attempt to access file metadata failed
tikhomirov@354: 	 * @throws UnsupportedOperationException if this file doesn't represent a copy ({@link #isCopy()} was false)
tikhomirov@354: 	 */
tikhomirov@157: 	public Path getCopySourceName() throws HgDataStreamException {
tikhomirov@78: 		if (isCopy()) {
tikhomirov@78: 			return Path.create(metadata.find(0, "copy"));
tikhomirov@78: 		}
tikhomirov@78: 		throw new UnsupportedOperationException(); // XXX REVISIT, think over if Exception is good (clients would check isCopy() anyway, perhaps null is sufficient?)
tikhomirov@78: 	}
tikhomirov@78: 	
tikhomirov@157: 	public Nodeid getCopySourceRevision() throws HgDataStreamException {
tikhomirov@78: 		if (isCopy()) {
tikhomirov@78: 			return Nodeid.fromAscii(metadata.find(0, "copyrev")); // XXX reuse/cache Nodeid
tikhomirov@78: 		}
tikhomirov@78: 		throw new UnsupportedOperationException();
tikhomirov@78: 	}
tikhomirov@88: 	
tikhomirov@88: 	@Override
tikhomirov@88: 	public String toString() {
tikhomirov@88: 		StringBuilder sb = new StringBuilder(getClass().getSimpleName());
tikhomirov@88: 		sb.append('(');
tikhomirov@88: 		sb.append(getPath());
tikhomirov@88: 		sb.append(')');
tikhomirov@88: 		return sb.toString();
tikhomirov@88: 	}
tikhomirov@275: 	
tikhomirov@275: 	private void checkAndRecordMetadata(int localRev) throws HgDataStreamException {
tikhomirov@275: 		// content() always initializes metadata.
tikhomirov@275: 		// FIXME this is expensive way to find out metadata, distinct RevlogStream.Iterator would be better.
tikhomirov@275: 		// Alternatively, may parameterize MetadataContentPipe to do prepare only.
tikhomirov@275: 		// For reference, when throwing CancelledException, hg status -A --rev 3:80 takes 70 ms
tikhomirov@275: 		// however, if we just consume buffer instead (buffer.position(buffer.limit()), same command takes ~320ms
tikhomirov@275: 		// (compared to command-line counterpart of 190ms)
tikhomirov@275: 		try {
tikhomirov@275: 			content(localRev, new ByteChannel() { // No-op channel
tikhomirov@275: 				public int write(ByteBuffer buffer) throws IOException, CancelledException {
tikhomirov@275: 					throw new CancelledException();
tikhomirov@275: 				}
tikhomirov@275: 			});
tikhomirov@275: 		} catch (CancelledException ex) {
tikhomirov@275: 			// it's ok, we did that
tikhomirov@366: 		} catch (HgInvalidControlFileException ex) {
tikhomirov@366: 			throw new HgDataStreamException(getPath(), ex);
tikhomirov@275: 		}
tikhomirov@275: 	}
tikhomirov@78: 
tikhomirov@88: 	private static final class MetadataEntry {
tikhomirov@78: 		private final String entry;
tikhomirov@78: 		private final int valueStart;
tikhomirov@78: 		/*package-local*/MetadataEntry(String key, String value) {
tikhomirov@78: 			entry = key + value;
tikhomirov@78: 			valueStart = key.length();
tikhomirov@78: 		}
tikhomirov@78: 		/*package-local*/boolean matchKey(String key) {
tikhomirov@78: 			return key.length() == valueStart && entry.startsWith(key);
tikhomirov@78: 		}
tikhomirov@134: //		uncomment once/if needed
tikhomirov@134: //		public String key() {
tikhomirov@134: //			return entry.substring(0, valueStart);
tikhomirov@134: //		}
tikhomirov@78: 		public String value() {
tikhomirov@78: 			return entry.substring(valueStart);
tikhomirov@78: 		}
tikhomirov@78: 	}
tikhomirov@78: 
tikhomirov@78: 	private static class Metadata {
tikhomirov@276: 		private static class Record {
tikhomirov@276: 			public final int offset;
tikhomirov@276: 			public final MetadataEntry[] entries;
tikhomirov@276: 			
tikhomirov@276: 			public Record(int off, MetadataEntry[] entr) {
tikhomirov@276: 				offset = off;
tikhomirov@276: 				entries = entr;
tikhomirov@276: 			}
tikhomirov@276: 		}
tikhomirov@78: 		// XXX sparse array needed
tikhomirov@276: 		private final IntMap<Record> entries = new IntMap<Record>(5);
tikhomirov@134: 		
tikhomirov@276: 		private final Record NONE = new Record(-1, null); // don't want statics
tikhomirov@134: 
tikhomirov@134: 		// true when there's metadata for given revision
tikhomirov@78: 		boolean known(int revision) {
tikhomirov@276: 			Record i = entries.get(revision);
tikhomirov@134: 			return i != null && NONE != i;
tikhomirov@134: 		}
tikhomirov@134: 
tikhomirov@134: 		// true when revision has been checked for metadata presence.
tikhomirov@134: 		public boolean checked(int revision) {
tikhomirov@276: 			return entries.containsKey(revision);
tikhomirov@78: 		}
tikhomirov@134: 
tikhomirov@134: 		// true when revision has been checked and found not having any metadata
tikhomirov@134: 		boolean none(int revision) {
tikhomirov@276: 			Record i = entries.get(revision);
tikhomirov@134: 			return i == NONE;
tikhomirov@134: 		}
tikhomirov@134: 
tikhomirov@134: 		// mark revision as having no metadata.
tikhomirov@134: 		void recordNone(int revision) {
tikhomirov@276: 			Record i = entries.get(revision);
tikhomirov@134: 			if (i == NONE) {
tikhomirov@134: 				return; // already there
tikhomirov@134: 			} 
tikhomirov@134: 			if (i != null) {
tikhomirov@134: 				throw new IllegalStateException(String.format("Trying to override Metadata state for revision %d (known offset: %d)", revision, i));
tikhomirov@134: 			}
tikhomirov@276: 			entries.put(revision, NONE);
tikhomirov@134: 		}
tikhomirov@134: 
tikhomirov@78: 		// since this is internal class, callers are supposed to ensure arg correctness (i.e. ask known() before)
tikhomirov@78: 		int dataOffset(int revision) {
tikhomirov@276: 			return entries.get(revision).offset;
tikhomirov@78: 		}
tikhomirov@78: 		void add(int revision, int dataOffset, Collection<MetadataEntry> e) {
tikhomirov@276: 			assert !entries.containsKey(revision);
tikhomirov@276: 			entries.put(revision, new Record(dataOffset, e.toArray(new MetadataEntry[e.size()])));
tikhomirov@78: 		}
tikhomirov@276: 
tikhomirov@78: 		String find(int revision, String key) {
tikhomirov@276: 			for (MetadataEntry me : entries.get(revision).entries) {
tikhomirov@78: 				if (me.matchKey(key)) {
tikhomirov@78: 					return me.value();
tikhomirov@78: 				}
tikhomirov@78: 			}
tikhomirov@78: 			return null;
tikhomirov@78: 		}
tikhomirov@78: 	}
tikhomirov@157: 
tikhomirov@277: 	private static class MetadataInspector extends ErrorHandlingInspector implements RevlogStream.Inspector {
tikhomirov@157: 		private final Metadata metadata;
tikhomirov@215: 		private final Path fname; // need this only for error reporting
tikhomirov@277: 		private final RevlogStream.Inspector delegate;
tikhomirov@157: 
tikhomirov@277: 		public MetadataInspector(Metadata _metadata, Path file, RevlogStream.Inspector chain) {
tikhomirov@157: 			metadata = _metadata;
tikhomirov@215: 			fname = file;
tikhomirov@277: 			delegate = chain;
tikhomirov@277: 			setCancelSupport(CancelSupport.Factory.get(chain));
tikhomirov@157: 		}
tikhomirov@157: 
tikhomirov@366: 		public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgException {
tikhomirov@277: 			try {
tikhomirov@277: 				final int daLength = data.length();
tikhomirov@277: 				if (daLength < 4 || data.readByte() != 1 || data.readByte() != 10) {
tikhomirov@277: 					metadata.recordNone(revisionNumber);
tikhomirov@277: 					data.reset();
tikhomirov@277: 				} else {
tikhomirov@277: 					ArrayList<MetadataEntry> _metadata = new ArrayList<MetadataEntry>();
tikhomirov@277: 					int offset = parseMetadata(data, daLength, _metadata);
tikhomirov@277: 					metadata.add(revisionNumber, offset, _metadata);
tikhomirov@277: 					// da is in prepared state (i.e. we consumed all bytes up to metadata end).
tikhomirov@277: 					// However, it's not safe to assume delegate won't call da.reset() for some reason,
tikhomirov@277: 					// and we need to ensure predictable result.
tikhomirov@277: 					data.reset();
tikhomirov@277: 					data = new FilterDataAccess(data, offset, daLength - offset);
tikhomirov@277: 				}
tikhomirov@277: 				if (delegate != null) {
tikhomirov@277: 					delegate.next(revisionNumber, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, nodeid, data);
tikhomirov@277: 				}
tikhomirov@277: 			} catch (IOException ex) {
tikhomirov@277: 				recordFailure(ex);
tikhomirov@277: 			} catch (HgDataStreamException ex) {
tikhomirov@367: 				recordFailure(ex.setRevisionIndex(revisionNumber));
tikhomirov@157: 			}
tikhomirov@277: 		}
tikhomirov@277: 
tikhomirov@277: 		private int parseMetadata(DataAccess data, final int daLength, ArrayList<MetadataEntry> _metadata) throws IOException, HgDataStreamException {
tikhomirov@157: 			int lastEntryStart = 2;
tikhomirov@157: 			int lastColon = -1;
tikhomirov@157: 			// XXX in fact, need smth like ByteArrayBuilder, similar to StringBuilder,
tikhomirov@157: 			// which can't be used here because we can't convert bytes to chars as we read them
tikhomirov@157: 			// (there might be multi-byte encoding), and we need to collect all bytes before converting to string 
tikhomirov@157: 			ByteArrayOutputStream bos = new ByteArrayOutputStream();
tikhomirov@157: 			String key = null, value = null;
tikhomirov@157: 			boolean byteOne = false;
tikhomirov@323: 			boolean metadataIsComplete = false;
tikhomirov@157: 			for (int i = 2; i < daLength; i++) {
tikhomirov@277: 				byte b = data.readByte();
tikhomirov@157: 				if (b == '\n') {
tikhomirov@157: 					if (byteOne) { // i.e. \n follows 1
tikhomirov@157: 						lastEntryStart = i+1;
tikhomirov@323: 						metadataIsComplete = true;
tikhomirov@157: 						// XXX is it possible to have here incomplete key/value (i.e. if last pair didn't end with \n)
tikhomirov@323: 						// if yes, need to set metadataIsComplete to true in that case as well
tikhomirov@157: 						break;
tikhomirov@157: 					}
tikhomirov@157: 					if (key == null || lastColon == -1 || i <= lastColon) {
tikhomirov@157: 						throw new IllegalStateException(); // FIXME log instead and record null key in the metadata. Ex just to fail fast during dev
tikhomirov@157: 					}
tikhomirov@157: 					value = new String(bos.toByteArray()).trim();
tikhomirov@157: 					bos.reset();
tikhomirov@157: 					_metadata.add(new MetadataEntry(key, value));
tikhomirov@157: 					key = value = null;
tikhomirov@157: 					lastColon = -1;
tikhomirov@157: 					lastEntryStart = i+1;
tikhomirov@157: 					continue;
tikhomirov@157: 				} 
tikhomirov@277: 				// byteOne has to be consumed up to this line, if not yet, consume it
tikhomirov@157: 				if (byteOne) {
tikhomirov@157: 					// insert 1 we've read on previous step into the byte builder
tikhomirov@157: 					bos.write(1);
tikhomirov@277: 					byteOne = false;
tikhomirov@157: 					// fall-through to consume current byte
tikhomirov@157: 				}
tikhomirov@157: 				if (b == (int) ':') {
tikhomirov@157: 					assert value == null;
tikhomirov@157: 					key = new String(bos.toByteArray());
tikhomirov@157: 					bos.reset();
tikhomirov@157: 					lastColon = i;
tikhomirov@157: 				} else if (b == 1) {
tikhomirov@157: 					byteOne = true;
tikhomirov@157: 				} else {
tikhomirov@157: 					bos.write(b);
tikhomirov@157: 				}
tikhomirov@157: 			}
tikhomirov@323: 			// data.isEmpty is not reliable, renamed files of size==0 keep only metadata
tikhomirov@323: 			if (!metadataIsComplete) {
tikhomirov@323: 				// XXX perhaps, worth a testcase (empty file, renamed, read or ask ifCopy
tikhomirov@277: 				throw new HgDataStreamException(fname, "Metadata is not closed properly", null);
tikhomirov@157: 			}
tikhomirov@277: 			return lastEntryStart;
tikhomirov@17: 		}
tikhomirov@322: 
tikhomirov@322: 		@Override
tikhomirov@322: 		public void checkFailed() throws HgException, IOException, CancelledException {
tikhomirov@322: 			super.checkFailed();
tikhomirov@322: 			if (delegate instanceof ErrorHandlingInspector) {
tikhomirov@322: 				// XXX need to add ErrorDestination and pass it around (much like CancelSupport get passed)
tikhomirov@322: 				// so that delegate would be able report its failures directly to caller without this hack
tikhomirov@322: 				((ErrorHandlingInspector) delegate).checkFailed();
tikhomirov@322: 			}
tikhomirov@322: 		}
tikhomirov@17: 	}
tikhomirov@2: }