tikhomirov@58: /*
tikhomirov@74: * Copyright (c) 2011 TMate Software Ltd
tikhomirov@74: *
tikhomirov@74: * This program is free software; you can redistribute it and/or modify
tikhomirov@74: * it under the terms of the GNU General Public License as published by
tikhomirov@74: * the Free Software Foundation; version 2 of the License.
tikhomirov@74: *
tikhomirov@74: * This program is distributed in the hope that it will be useful,
tikhomirov@74: * but WITHOUT ANY WARRANTY; without even the implied warranty of
tikhomirov@74: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
tikhomirov@74: * GNU General Public License for more details.
tikhomirov@74: *
tikhomirov@74: * For information on how to redistribute this software under
tikhomirov@74: * the terms of a license other than GNU General Public License
tikhomirov@102: * contact TMate Software at support@hg4j.com
tikhomirov@58: */
tikhomirov@74: package org.tmatesoft.hg.repo;
tikhomirov@58:
tikhomirov@120: import static java.lang.Math.max;
tikhomirov@117: import static java.lang.Math.min;
tikhomirov@218: import static org.tmatesoft.hg.repo.HgRepository.*;
tikhomirov@58:
tikhomirov@58: import java.io.File;
tikhomirov@58: import java.io.IOException;
tikhomirov@117: import java.nio.ByteBuffer;
tikhomirov@287: import java.nio.channels.ReadableByteChannel;
tikhomirov@229: import java.util.ArrayList;
tikhomirov@58: import java.util.Collections;
tikhomirov@226: import java.util.NoSuchElementException;
tikhomirov@58: import java.util.Set;
tikhomirov@58: import java.util.TreeSet;
tikhomirov@58:
tikhomirov@337: import org.tmatesoft.hg.core.HgBadStateException;
tikhomirov@157: import org.tmatesoft.hg.core.HgException;
tikhomirov@348: import org.tmatesoft.hg.core.HgInvalidControlFileException;
tikhomirov@74: import org.tmatesoft.hg.core.Nodeid;
tikhomirov@157: import org.tmatesoft.hg.internal.ByteArrayChannel;
tikhomirov@226: import org.tmatesoft.hg.internal.Experimental;
tikhomirov@117: import org.tmatesoft.hg.internal.FilterByteChannel;
tikhomirov@248: import org.tmatesoft.hg.internal.ManifestRevision;
tikhomirov@229: import org.tmatesoft.hg.internal.PathScope;
tikhomirov@117: import org.tmatesoft.hg.util.ByteChannel;
tikhomirov@157: import org.tmatesoft.hg.util.CancelledException;
tikhomirov@287: import org.tmatesoft.hg.util.FileInfo;
tikhomirov@141: import org.tmatesoft.hg.util.FileIterator;
tikhomirov@226: import org.tmatesoft.hg.util.FileWalker;
tikhomirov@133: import org.tmatesoft.hg.util.Path;
tikhomirov@93: import org.tmatesoft.hg.util.PathPool;
tikhomirov@93: import org.tmatesoft.hg.util.PathRewrite;
tikhomirov@287: import org.tmatesoft.hg.util.RegularFileInfo;
tikhomirov@58:
tikhomirov@58: /**
tikhomirov@58: *
tikhomirov@74: * @author Artem Tikhomirov
tikhomirov@74: * @author TMate Software Ltd.
tikhomirov@58: */
tikhomirov@94: public class HgWorkingCopyStatusCollector {
tikhomirov@58:
tikhomirov@58: private final HgRepository repo;
tikhomirov@141: private final FileIterator repoWalker;
tikhomirov@59: private HgDirstate dirstate;
tikhomirov@94: private HgStatusCollector baseRevisionCollector;
tikhomirov@93: private PathPool pathPool;
tikhomirov@282: private ManifestRevision dirstateParentManifest;
tikhomirov@58:
tikhomirov@94: public HgWorkingCopyStatusCollector(HgRepository hgRepo) {
tikhomirov@229: this(hgRepo, new HgInternals(hgRepo).createWorkingDirWalker(null));
tikhomirov@74: }
tikhomirov@74:
tikhomirov@229: // FIXME document cons
tikhomirov@229: public HgWorkingCopyStatusCollector(HgRepository hgRepo, FileIterator hgRepoWalker) {
tikhomirov@218: repo = hgRepo;
tikhomirov@218: repoWalker = hgRepoWalker;
tikhomirov@58: }
tikhomirov@59:
tikhomirov@59: /**
tikhomirov@59: * Optionally, supply a collector instance that may cache (or have already cached) base revision
tikhomirov@59: * @param sc may be null
tikhomirov@59: */
tikhomirov@94: public void setBaseRevisionCollector(HgStatusCollector sc) {
tikhomirov@59: baseRevisionCollector = sc;
tikhomirov@59: }
tikhomirov@93:
tikhomirov@93: /*package-local*/ PathPool getPathPool() {
tikhomirov@93: if (pathPool == null) {
tikhomirov@93: if (baseRevisionCollector == null) {
tikhomirov@93: pathPool = new PathPool(new PathRewrite.Empty());
tikhomirov@93: } else {
tikhomirov@93: return baseRevisionCollector.getPathPool();
tikhomirov@93: }
tikhomirov@93: }
tikhomirov@93: return pathPool;
tikhomirov@93: }
tikhomirov@93:
tikhomirov@93: public void setPathPool(PathPool pathPool) {
tikhomirov@93: this.pathPool = pathPool;
tikhomirov@93: }
tikhomirov@93:
tikhomirov@290: /**
tikhomirov@290: * Access to directory state information this collector uses.
tikhomirov@290: * @return directory state holder, never null
tikhomirov@290: */
tikhomirov@348: public HgDirstate getDirstate() throws HgInvalidControlFileException {
tikhomirov@59: if (dirstate == null) {
tikhomirov@284: dirstate = repo.loadDirstate(getPathPool());
tikhomirov@59: }
tikhomirov@59: return dirstate;
tikhomirov@59: }
tikhomirov@275:
tikhomirov@348: private HgDirstate getDirstateImpl() {
tikhomirov@348: return dirstate;
tikhomirov@348: }
tikhomirov@348:
tikhomirov@282: private ManifestRevision getManifest(int changelogLocalRev) {
tikhomirov@284: assert changelogLocalRev >= 0;
tikhomirov@282: ManifestRevision mr;
tikhomirov@282: if (baseRevisionCollector != null) {
tikhomirov@282: mr = baseRevisionCollector.raw(changelogLocalRev);
tikhomirov@282: } else {
tikhomirov@282: mr = new ManifestRevision(null, null);
tikhomirov@282: repo.getManifest().walk(changelogLocalRev, changelogLocalRev, mr);
tikhomirov@282: }
tikhomirov@282: return mr;
tikhomirov@282: }
tikhomirov@354:
tikhomirov@354: private void initDirstateParentManifest() throws HgInvalidControlFileException {
tikhomirov@354: Nodeid dirstateParent = getDirstateImpl().parents().first();
tikhomirov@354: if (dirstateParent.isNull()) {
tikhomirov@354: dirstateParentManifest = baseRevisionCollector != null ? baseRevisionCollector.raw(-1) : HgStatusCollector.createEmptyManifestRevision();
tikhomirov@354: } else {
tikhomirov@354: int changelogLocalRev = repo.getChangelog().getLocalRevision(dirstateParent);
tikhomirov@354: dirstateParentManifest = getManifest(changelogLocalRev);
tikhomirov@354: }
tikhomirov@354: }
tikhomirov@354:
tikhomirov@354: // WC not necessarily points to TIP, but may be result of update to any previous revision.
tikhomirov@354: // In such case, we need to compare local files not to their TIP content, but to specific version at the time of selected revision
tikhomirov@282: private ManifestRevision getDirstateParentManifest() {
tikhomirov@282: return dirstateParentManifest;
tikhomirov@282: }
tikhomirov@282:
tikhomirov@282: // may be invoked few times, TIP or WORKING_COPY indicate comparison shall be run against working copy parent
tikhomirov@275: // NOTE, use of TIP constant requires certain care. TIP here doesn't mean latest cset, but actual working copy parent.
tikhomirov@350: public void walk(int baseRevision, HgStatusInspector inspector) throws IOException {
tikhomirov@282: if (HgInternals.wrongLocalRevision(baseRevision) || baseRevision == BAD_REVISION) {
tikhomirov@218: throw new IllegalArgumentException(String.valueOf(baseRevision));
tikhomirov@218: }
tikhomirov@354: try {
tikhomirov@354: if (getDirstateImpl() == null) {
tikhomirov@354: // XXX this is a hack to avoid declaring throws for the #walk() at the moment
tikhomirov@354: // once I decide whether to have mediator that collects errors or to use exceptions here
tikhomirov@354: // this hack shall be removed in favor of either severe error in mediator or a re-thrown exception.
tikhomirov@354: getDirstate();
tikhomirov@348: }
tikhomirov@354: if (getDirstateParentManifest() == null) {
tikhomirov@354: initDirstateParentManifest();
tikhomirov@354: }
tikhomirov@354: } catch (HgInvalidControlFileException ex) {
tikhomirov@354: repo.getContext().getLog().error(getClass(), ex, "Failed to initialize with dirstate information");
tikhomirov@354: return;
tikhomirov@348: }
tikhomirov@282: ManifestRevision collect = null; // non null indicates we compare against base revision
tikhomirov@285: Set baseRevFiles = Collections.emptySet(); // files from base revision not affected by status calculation
tikhomirov@282: if (baseRevision != TIP && baseRevision != WORKING_COPY) {
tikhomirov@282: collect = getManifest(baseRevision);
tikhomirov@285: baseRevFiles = new TreeSet(collect.files());
tikhomirov@58: }
tikhomirov@94: if (inspector instanceof HgStatusCollector.Record) {
tikhomirov@94: HgStatusCollector sc = baseRevisionCollector == null ? new HgStatusCollector(repo) : baseRevisionCollector;
tikhomirov@282: // nodeidAfterChange(dirstate's parent) doesn't make too much sense,
tikhomirov@282: // because the change might be actually in working copy. Nevertheless,
tikhomirov@282: // as long as no nodeids can be provided for WC, seems reasonable to report
tikhomirov@282: // latest known nodeid change (although at the moment this is not used and
tikhomirov@282: // is done mostly not to leave stale initialization in the Record)
tikhomirov@282: int rev1,rev2 = getDirstateParentManifest().changesetLocalRev();
tikhomirov@282: if (baseRevision == TIP || baseRevision == WORKING_COPY) {
tikhomirov@282: rev1 = rev2 - 1; // just use revision prior to dirstate's parent
tikhomirov@282: } else {
tikhomirov@282: rev1 = baseRevision;
tikhomirov@282: }
tikhomirov@282: ((HgStatusCollector.Record) inspector).init(rev1, rev2, sc);
tikhomirov@68: }
tikhomirov@282: final HgIgnore hgIgnore = repo.getIgnore();
tikhomirov@58: repoWalker.reset();
tikhomirov@293: TreeSet processed = new TreeSet(); // names of files we handled as they known to Dirstate (not FileIterator)
tikhomirov@348: final HgDirstate ds = getDirstateImpl();
tikhomirov@293: TreeSet knownEntries = ds.all(); // here just to get dirstate initialized
tikhomirov@58: while (repoWalker.hasNext()) {
tikhomirov@58: repoWalker.next();
tikhomirov@284: final Path fname = getPathPool().path(repoWalker.name());
tikhomirov@287: FileInfo f = repoWalker.file();
tikhomirov@293: Path knownInDirstate;
tikhomirov@226: if (!f.exists()) {
tikhomirov@226: // file coming from iterator doesn't exist.
tikhomirov@293: if ((knownInDirstate = ds.known(fname)) != null) {
tikhomirov@293: // found in dirstate
tikhomirov@293: processed.add(knownInDirstate);
tikhomirov@294: if (ds.checkRemoved(knownInDirstate) == null) {
tikhomirov@294: inspector.missing(knownInDirstate);
tikhomirov@226: } else {
tikhomirov@294: inspector.removed(knownInDirstate);
tikhomirov@226: }
tikhomirov@226: // do not report it as removed later
tikhomirov@226: if (collect != null) {
tikhomirov@294: baseRevFiles.remove(knownInDirstate);
tikhomirov@226: }
tikhomirov@226: } else {
tikhomirov@226: // chances are it was known in baseRevision. We may rely
tikhomirov@226: // that later iteration over baseRevFiles leftovers would yield correct Removed,
tikhomirov@226: // but it doesn't hurt to be explicit (provided we know fname *is* inScope of the FileIterator
tikhomirov@285: if (collect != null && baseRevFiles.remove(fname)) {
tikhomirov@226: inspector.removed(fname);
tikhomirov@226: } else {
tikhomirov@226: // not sure I shall report such files (i.e. arbitrary name coming from FileIterator)
tikhomirov@226: // as unknown. Command-line HG aborts "system can't find the file specified"
tikhomirov@226: // in similar case (against wc), or just gives nothing if --change is specified.
tikhomirov@226: // however, as it's unlikely to get unexisting files from FileIterator, and
tikhomirov@226: // its better to see erroneous file status rather than not to see any (which is too easy
tikhomirov@226: // to overlook), I think unknown() is reasonable approach here
tikhomirov@226: inspector.unknown(fname);
tikhomirov@226: }
tikhomirov@226: }
tikhomirov@226: continue;
tikhomirov@226: }
tikhomirov@293: if ((knownInDirstate = ds.known(fname)) != null) {
tikhomirov@226: // tracked file.
tikhomirov@58: // modified, added, removed, clean
tikhomirov@293: processed.add(knownInDirstate);
tikhomirov@58: if (collect != null) { // need to check against base revision, not FS file
tikhomirov@294: checkLocalStatusAgainstBaseRevision(baseRevFiles, collect, baseRevision, knownInDirstate, f, inspector);
tikhomirov@58: } else {
tikhomirov@294: checkLocalStatusAgainstFile(knownInDirstate, f, inspector);
tikhomirov@58: }
tikhomirov@58: } else {
tikhomirov@226: if (hgIgnore.isIgnored(fname)) { // hgignore shall be consulted only for non-tracked files
tikhomirov@226: inspector.ignored(fname);
tikhomirov@226: } else {
tikhomirov@226: inspector.unknown(fname);
tikhomirov@226: }
tikhomirov@226: // the file is not tracked. Even if it's known at baseRevision, we don't need to remove it
tikhomirov@226: // from baseRevFiles, it might need to be reported as removed as well (cmdline client does
tikhomirov@226: // yield two statuses for the same file)
tikhomirov@58: }
tikhomirov@58: }
tikhomirov@58: if (collect != null) {
tikhomirov@285: for (Path fromBase : baseRevFiles) {
tikhomirov@226: if (repoWalker.inScope(fromBase)) {
tikhomirov@226: inspector.removed(fromBase);
tikhomirov@226: }
tikhomirov@58: }
tikhomirov@58: }
tikhomirov@293: knownEntries.removeAll(processed);
tikhomirov@284: for (Path m : knownEntries) {
tikhomirov@284: if (!repoWalker.inScope(m)) {
tikhomirov@226: // do not report as missing/removed those FileIterator doesn't care about.
tikhomirov@226: continue;
tikhomirov@226: }
tikhomirov@74: // missing known file from a working dir
tikhomirov@293: if (ds.checkRemoved(m) == null) {
tikhomirov@74: // not removed from the repository = 'deleted'
tikhomirov@284: inspector.missing(m);
tikhomirov@74: } else {
tikhomirov@74: // removed from the repo
tikhomirov@76: // if we check against non-tip revision, do not report files that were added past that revision and now removed.
tikhomirov@285: if (collect == null || baseRevFiles.contains(m)) {
tikhomirov@284: inspector.removed(m);
tikhomirov@76: }
tikhomirov@58: }
tikhomirov@58: }
tikhomirov@58: }
tikhomirov@58:
tikhomirov@350: public HgStatusCollector.Record status(int baseRevision) throws IOException {
tikhomirov@94: HgStatusCollector.Record rv = new HgStatusCollector.Record();
tikhomirov@58: walk(baseRevision, rv);
tikhomirov@58: return rv;
tikhomirov@58: }
tikhomirov@58:
tikhomirov@58: //********************************************
tikhomirov@58:
tikhomirov@58:
tikhomirov@287: private void checkLocalStatusAgainstFile(Path fname, FileInfo f, HgStatusInspector inspector) {
tikhomirov@58: HgDirstate.Record r;
tikhomirov@348: if ((r = getDirstateImpl().checkNormal(fname)) != null) {
tikhomirov@58: // either clean or modified
tikhomirov@290: final boolean timestampEqual = f.lastModified() == r.modificationTime(), sizeEqual = r.size() == f.length();
tikhomirov@280: if (timestampEqual && sizeEqual) {
tikhomirov@280: inspector.clean(fname);
tikhomirov@290: } else if (!sizeEqual && r.size() >= 0) {
tikhomirov@280: inspector.modified(fname);
tikhomirov@58: } else {
tikhomirov@280: // size is the same or unknown, and, perhaps, different timestamp
tikhomirov@120: // check actual content to avoid false modified files
tikhomirov@120: HgDataFile df = repo.getFileNode(fname);
tikhomirov@337: if (!df.exists()) {
tikhomirov@337: String msg = String.format("File %s known as normal in dirstate (%d, %d), doesn't exist at %s", fname, r.modificationTime(), r.size(), repo.getStoragePath(df));
tikhomirov@337: throw new HgBadStateException(msg);
tikhomirov@337: }
tikhomirov@285: Nodeid rev = getDirstateParentManifest().nodeid(fname);
tikhomirov@314: // rev might be null here if fname comes to dirstate as a result of a merge operation
tikhomirov@314: // where one of the parents (first parent) had no fname file, but second parent had.
tikhomirov@314: // E.g. fork revision 3, revision 4 gets .hgtags, few modifications and merge(3,12)
tikhomirov@314: // see Issue 14 for details
tikhomirov@314: if (rev == null || !areTheSame(f, df, rev)) {
tikhomirov@120: inspector.modified(df.getPath());
tikhomirov@180: } else {
tikhomirov@180: inspector.clean(df.getPath());
tikhomirov@120: }
tikhomirov@58: }
tikhomirov@348: } else if ((r = getDirstateImpl().checkAdded(fname)) != null) {
tikhomirov@290: if (r.copySource() == null) {
tikhomirov@280: inspector.added(fname);
tikhomirov@58: } else {
tikhomirov@290: inspector.copied(r.copySource(), fname);
tikhomirov@58: }
tikhomirov@348: } else if ((r = getDirstateImpl().checkRemoved(fname)) != null) {
tikhomirov@280: inspector.removed(fname);
tikhomirov@348: } else if ((r = getDirstateImpl().checkMerged(fname)) != null) {
tikhomirov@280: inspector.modified(fname);
tikhomirov@58: }
tikhomirov@58: }
tikhomirov@58:
tikhomirov@58: // XXX refactor checkLocalStatus methods in more OO way
tikhomirov@287: private void checkLocalStatusAgainstBaseRevision(Set baseRevNames, ManifestRevision collect, int baseRevision, Path fname, FileInfo f, HgStatusInspector inspector) {
tikhomirov@58: // fname is in the dirstate, either Normal, Added, Removed or Merged
tikhomirov@285: Nodeid nid1 = collect.nodeid(fname);
tikhomirov@285: HgManifest.Flags flags = collect.flags(fname);
tikhomirov@58: HgDirstate.Record r;
tikhomirov@58: if (nid1 == null) {
tikhomirov@58: // normal: added?
tikhomirov@58: // added: not known at the time of baseRevision, shall report
tikhomirov@58: // merged: was not known, report as added?
tikhomirov@348: if ((r = getDirstateImpl().checkNormal(fname)) != null) {
tikhomirov@157: try {
tikhomirov@157: Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision);
tikhomirov@157: if (origin != null) {
tikhomirov@226: inspector.copied(getPathPool().path(origin), fname);
tikhomirov@157: return;
tikhomirov@157: }
tikhomirov@354: } catch (HgException ex) {
tikhomirov@157: ex.printStackTrace();
tikhomirov@157: // FIXME report to a mediator, continue status collection
tikhomirov@90: }
tikhomirov@348: } else if ((r = getDirstateImpl().checkAdded(fname)) != null) {
tikhomirov@290: if (r.copySource() != null && baseRevNames.contains(r.copySource())) {
tikhomirov@290: baseRevNames.remove(r.copySource()); // XXX surely I shall not report rename source as Removed?
tikhomirov@290: inspector.copied(r.copySource(), fname);
tikhomirov@58: return;
tikhomirov@58: }
tikhomirov@58: // fall-through, report as added
tikhomirov@348: } else if (getDirstateImpl().checkRemoved(fname) != null) {
tikhomirov@58: // removed: removed file was not known at the time of baseRevision, and we should not report it as removed
tikhomirov@58: return;
tikhomirov@58: }
tikhomirov@226: inspector.added(fname);
tikhomirov@58: } else {
tikhomirov@58: // was known; check whether clean or modified
tikhomirov@285: Nodeid nidFromDirstate = getDirstateParentManifest().nodeid(fname);
tikhomirov@348: if ((r = getDirstateImpl().checkNormal(fname)) != null && nid1.equals(nidFromDirstate)) {
tikhomirov@282: // regular file, was the same up to WC initialization. Check if was modified since, and, if not, report right away
tikhomirov@282: // same code as in #checkLocalStatusAgainstFile
tikhomirov@290: final boolean timestampEqual = f.lastModified() == r.modificationTime(), sizeEqual = r.size() == f.length();
tikhomirov@282: boolean handled = false;
tikhomirov@280: if (timestampEqual && sizeEqual) {
tikhomirov@280: inspector.clean(fname);
tikhomirov@282: handled = true;
tikhomirov@290: } else if (!sizeEqual && r.size() >= 0) {
tikhomirov@280: inspector.modified(fname);
tikhomirov@282: handled = true;
tikhomirov@282: } else if (!todoCheckFlagsEqual(f, flags)) {
tikhomirov@282: // seems like flags have changed, no reason to check content further
tikhomirov@282: inspector.modified(fname);
tikhomirov@282: handled = true;
tikhomirov@282: }
tikhomirov@282: if (handled) {
tikhomirov@285: baseRevNames.remove(fname); // consumed, processed, handled.
tikhomirov@280: return;
tikhomirov@280: }
tikhomirov@282: // otherwise, shall check actual content (size not the same, or unknown (-1 or -2), or timestamp is different,
tikhomirov@282: // or nodeid in dirstate is different, but local change might have brought it back to baseRevision state)
tikhomirov@280: // FALL THROUGH
tikhomirov@280: }
tikhomirov@348: if (r != null || (r = getDirstateImpl().checkMerged(fname)) != null || (r = getDirstateImpl().checkAdded(fname)) != null) {
tikhomirov@282: // check actual content to see actual changes
tikhomirov@280: // when added - seems to be the case of a file added once again, hence need to check if content is different
tikhomirov@58: // either clean or modified
tikhomirov@282: HgDataFile fileNode = repo.getFileNode(fname);
tikhomirov@282: if (areTheSame(f, fileNode, nid1)) {
tikhomirov@282: inspector.clean(fname);
tikhomirov@58: } else {
tikhomirov@282: inspector.modified(fname);
tikhomirov@58: }
tikhomirov@285: baseRevNames.remove(fname); // consumed, processed, handled.
tikhomirov@348: } else if (getDirstateImpl().checkRemoved(fname) != null) {
tikhomirov@226: // was known, and now marked as removed, report it right away, do not rely on baseRevNames processing later
tikhomirov@226: inspector.removed(fname);
tikhomirov@285: baseRevNames.remove(fname); // consumed, processed, handled.
tikhomirov@58: }
tikhomirov@226: // only those left in baseRevNames after processing are reported as removed
tikhomirov@58: }
tikhomirov@58:
tikhomirov@58: // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest
tikhomirov@58: // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively
tikhomirov@58: // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids:
tikhomirov@58: // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest
tikhomirov@58: // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids).
tikhomirov@58: // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean'
tikhomirov@58: }
tikhomirov@58:
tikhomirov@287: private boolean areTheSame(FileInfo f, HgDataFile dataFile, Nodeid revision) {
tikhomirov@157: // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison
tikhomirov@157: ByteArrayChannel bac = new ByteArrayChannel();
tikhomirov@157: boolean ioFailed = false;
tikhomirov@157: try {
tikhomirov@282: int localRevision = dataFile.getLocalRevision(revision);
tikhomirov@157: // need content with metadata striped off - although theoretically chances are metadata may be different,
tikhomirov@157: // WC doesn't have it anyway
tikhomirov@157: dataFile.content(localRevision, bac);
tikhomirov@157: } catch (CancelledException ex) {
tikhomirov@157: // silently ignore - can't happen, ByteArrayChannel is not cancellable
tikhomirov@157: } catch (HgException ex) {
tikhomirov@323: repo.getContext().getLog().warn(getClass(), ex, null);
tikhomirov@157: ioFailed = true;
tikhomirov@157: }
tikhomirov@157: return !ioFailed && areTheSame(f, bac.toArray(), dataFile.getPath());
tikhomirov@157: }
tikhomirov@157:
tikhomirov@287: private boolean areTheSame(FileInfo f, final byte[] data, Path p) {
tikhomirov@287: ReadableByteChannel is = null;
tikhomirov@295: class Check implements ByteChannel {
tikhomirov@295: final boolean debug = repo.getContext().getLog().isDebug();
tikhomirov@295: boolean sameSoFar = true;
tikhomirov@295: int x = 0;
tikhomirov@219:
tikhomirov@295: public int write(ByteBuffer buffer) {
tikhomirov@295: for (int i = buffer.remaining(); i > 0; i--, x++) {
tikhomirov@295: if (x >= data.length /*file has been appended*/ || data[x] != buffer.get()) {
tikhomirov@295: if (debug) {
tikhomirov@295: byte[] xx = new byte[15];
tikhomirov@295: if (buffer.position() > 5) {
tikhomirov@295: buffer.position(buffer.position() - 5);
tikhomirov@117: }
tikhomirov@334: buffer.get(xx, 0, min(xx.length, i-1 /*-1 for the one potentially read at buffer.get in if() */));
tikhomirov@334: String exp;
tikhomirov@334: if (x < data.length) {
tikhomirov@334: exp = new String(data, max(0, x - 4), min(data.length - x, 20));
tikhomirov@334: } else {
tikhomirov@334: int offset = max(0, x - 4);
tikhomirov@334: exp = new String(data, offset, min(data.length - offset, 20));
tikhomirov@334: }
tikhomirov@334: repo.getContext().getLog().debug(getClass(), "expected >>%s<< but got >>%s<<", exp, new String(xx));
tikhomirov@117: }
tikhomirov@295: sameSoFar = false;
tikhomirov@295: break;
tikhomirov@219: }
tikhomirov@117: }
tikhomirov@295: buffer.position(buffer.limit()); // mark as read
tikhomirov@295: return buffer.limit();
tikhomirov@295: }
tikhomirov@295:
tikhomirov@295: public boolean sameSoFar() {
tikhomirov@295: return sameSoFar;
tikhomirov@295: }
tikhomirov@295: public boolean ultimatelyTheSame() {
tikhomirov@295: return sameSoFar && x == data.length;
tikhomirov@295: }
tikhomirov@295: };
tikhomirov@295: Check check = new Check();
tikhomirov@295: try {
tikhomirov@295: is = f.newInputChannel();
tikhomirov@295: ByteBuffer fb = ByteBuffer.allocate(min(1 + data.length * 2 /*to fit couple of lines appended; never zero*/, 8192));
tikhomirov@295: FilterByteChannel filters = new FilterByteChannel(check, repo.getFiltersFromWorkingDirToRepo(p));
tikhomirov@295: while (is.read(fb) != -1 && check.sameSoFar()) {
tikhomirov@295: fb.flip();
tikhomirov@295: filters.write(fb);
tikhomirov@295: fb.compact();
tikhomirov@295: }
tikhomirov@295: return check.ultimatelyTheSame();
tikhomirov@295: } catch (CancelledException ex) {
tikhomirov@295: repo.getContext().getLog().warn(getClass(), ex, "Unexpected cancellation");
tikhomirov@295: return check.ultimatelyTheSame();
tikhomirov@295: } catch (IOException ex) {
tikhomirov@295: repo.getContext().getLog().warn(getClass(), ex, null);
tikhomirov@295: } finally {
tikhomirov@295: if (is != null) {
tikhomirov@295: try {
tikhomirov@287: is.close();
tikhomirov@295: } catch (IOException ex) {
tikhomirov@295: repo.getContext().getLog().info(getClass(), ex, null);
tikhomirov@117: }
tikhomirov@117: }
tikhomirov@117: }
tikhomirov@117: return false;
tikhomirov@117: }
tikhomirov@117:
tikhomirov@287: private static boolean todoCheckFlagsEqual(FileInfo f, HgManifest.Flags originalManifestFlags) {
tikhomirov@58: // FIXME implement
tikhomirov@280: return true;
tikhomirov@58: }
tikhomirov@58:
tikhomirov@229: /**
tikhomirov@229: * Configure status collector to consider only subset of a working copy tree. Tries to be as effective as possible, and to
tikhomirov@229: * traverse only relevant part of working copy on the filesystem.
tikhomirov@229: *
tikhomirov@229: * @param hgRepo repository
tikhomirov@229: * @param paths repository-relative files and/or directories. Directories are processed recursively.
tikhomirov@229: *
tikhomirov@229: * @return new instance of {@link HgWorkingCopyStatusCollector}, ready to {@link #walk(int, HgStatusInspector) walk} associated working copy
tikhomirov@229: */
tikhomirov@229: @Experimental(reason="Provisional API")
tikhomirov@229: public static HgWorkingCopyStatusCollector create(HgRepository hgRepo, Path... paths) {
tikhomirov@229: ArrayList f = new ArrayList(5);
tikhomirov@229: ArrayList d = new ArrayList(5);
tikhomirov@229: for (Path p : paths) {
tikhomirov@229: if (p.isDirectory()) {
tikhomirov@229: d.add(p);
tikhomirov@229: } else {
tikhomirov@229: f.add(p);
tikhomirov@229: }
tikhomirov@229: }
tikhomirov@229: // final Path[] dirs = f.toArray(new Path[d.size()]);
tikhomirov@229: if (d.isEmpty()) {
tikhomirov@229: final Path[] files = f.toArray(new Path[f.size()]);
tikhomirov@237: FileIterator fi = new FileListIterator(hgRepo.getWorkingDir(), files);
tikhomirov@229: return new HgWorkingCopyStatusCollector(hgRepo, fi);
tikhomirov@229: }
tikhomirov@229: //
tikhomirov@229:
tikhomirov@229: //FileIterator fi = file.isDirectory() ? new DirFileIterator(hgRepo, file) : new FileListIterator(, file);
tikhomirov@229: FileIterator fi = new HgInternals(hgRepo).createWorkingDirWalker(new PathScope(true, paths));
tikhomirov@226: return new HgWorkingCopyStatusCollector(hgRepo, fi);
tikhomirov@226: }
tikhomirov@229:
tikhomirov@229: /**
tikhomirov@229: * Configure collector object to calculate status for matching files only.
tikhomirov@229: * This method may be less effective than explicit list of files as it iterates over whole repository
tikhomirov@229: * (thus supplied matcher doesn't need to care if directories to files in question are also in scope,
tikhomirov@229: * see {@link FileWalker#FileWalker(File, Path.Source, Path.Matcher)})
tikhomirov@229: *
tikhomirov@229: * @return new instance of {@link HgWorkingCopyStatusCollector}, ready to {@link #walk(int, HgStatusInspector) walk} associated working copy
tikhomirov@229: */
tikhomirov@229: @Experimental(reason="Provisional API. May add boolean strict argument for those who write smart matchers that can be used in FileWalker")
tikhomirov@229: public static HgWorkingCopyStatusCollector create(HgRepository hgRepo, Path.Matcher scope) {
tikhomirov@229: FileIterator w = new HgInternals(hgRepo).createWorkingDirWalker(null);
tikhomirov@229: FileIterator wf = (scope == null || scope instanceof Path.Matcher.Any) ? w : new FileIteratorFilter(w, scope);
tikhomirov@229: // the reason I need to iterate over full repo and apply filter is that I have no idea whatsoever about
tikhomirov@229: // patterns in the scope. I.e. if scope lists a file (PathGlobMatcher("a/b/c.txt")), FileWalker won't get deep
tikhomirov@229: // to the file unless matcher would also explicitly include "a/", "a/b/" in scope. Since I can't rely
tikhomirov@229: // users would write robust matchers, and I don't see a decent way to enforce that (i.e. factory to produce
tikhomirov@229: // correct matcher from Path is much like what PathScope does, and can be accessed directly with #create(repo, Path...)
tikhomirov@229: // method above/
tikhomirov@229: return new HgWorkingCopyStatusCollector(hgRepo, wf);
tikhomirov@229: }
tikhomirov@226:
tikhomirov@226: private static class FileListIterator implements FileIterator {
tikhomirov@226: private final File dir;
tikhomirov@226: private final Path[] paths;
tikhomirov@226: private int index;
tikhomirov@287: private RegularFileInfo nextFile;
tikhomirov@226:
tikhomirov@226: public FileListIterator(File startDir, Path... files) {
tikhomirov@226: dir = startDir;
tikhomirov@226: paths = files;
tikhomirov@226: reset();
tikhomirov@226: }
tikhomirov@226:
tikhomirov@226: public void reset() {
tikhomirov@226: index = -1;
tikhomirov@287: nextFile = new RegularFileInfo();
tikhomirov@226: }
tikhomirov@226:
tikhomirov@226: public boolean hasNext() {
tikhomirov@226: return paths.length > 0 && index < paths.length-1;
tikhomirov@226: }
tikhomirov@226:
tikhomirov@226: public void next() {
tikhomirov@226: index++;
tikhomirov@226: if (index == paths.length) {
tikhomirov@226: throw new NoSuchElementException();
tikhomirov@226: }
tikhomirov@287: nextFile.init(new File(dir, paths[index].toString()));
tikhomirov@226: }
tikhomirov@226:
tikhomirov@226: public Path name() {
tikhomirov@226: return paths[index];
tikhomirov@226: }
tikhomirov@226:
tikhomirov@287: public FileInfo file() {
tikhomirov@226: return nextFile;
tikhomirov@226: }
tikhomirov@226:
tikhomirov@226: public boolean inScope(Path file) {
tikhomirov@226: for (int i = 0; i < paths.length; i++) {
tikhomirov@226: if (paths[i].equals(file)) {
tikhomirov@226: return true;
tikhomirov@226: }
tikhomirov@226: }
tikhomirov@226: return false;
tikhomirov@226: }
tikhomirov@226: }
tikhomirov@226:
tikhomirov@229: private static class FileIteratorFilter implements FileIterator {
tikhomirov@229: private final Path.Matcher filter;
tikhomirov@229: private final FileIterator walker;
tikhomirov@229: private boolean didNext = false;
tikhomirov@226:
tikhomirov@229: public FileIteratorFilter(FileIterator fileWalker, Path.Matcher filterMatcher) {
tikhomirov@229: assert fileWalker != null;
tikhomirov@229: assert filterMatcher != null;
tikhomirov@229: filter = filterMatcher;
tikhomirov@229: walker = fileWalker;
tikhomirov@226: }
tikhomirov@226:
tikhomirov@350: public void reset() throws IOException {
tikhomirov@226: walker.reset();
tikhomirov@226: }
tikhomirov@226:
tikhomirov@350: public boolean hasNext() throws IOException {
tikhomirov@229: while (walker.hasNext()) {
tikhomirov@229: walker.next();
tikhomirov@229: if (filter.accept(walker.name())) {
tikhomirov@229: didNext = true;
tikhomirov@229: return true;
tikhomirov@229: }
tikhomirov@229: }
tikhomirov@229: return false;
tikhomirov@226: }
tikhomirov@226:
tikhomirov@350: public void next() throws IOException {
tikhomirov@229: if (didNext) {
tikhomirov@229: didNext = false;
tikhomirov@229: } else {
tikhomirov@229: if (!hasNext()) {
tikhomirov@229: throw new NoSuchElementException();
tikhomirov@229: }
tikhomirov@229: }
tikhomirov@226: }
tikhomirov@226:
tikhomirov@226: public Path name() {
tikhomirov@226: return walker.name();
tikhomirov@226: }
tikhomirov@226:
tikhomirov@287: public FileInfo file() {
tikhomirov@226: return walker.file();
tikhomirov@226: }
tikhomirov@226:
tikhomirov@226: public boolean inScope(Path file) {
tikhomirov@229: return filter.accept(file);
tikhomirov@226: }
tikhomirov@226: }
tikhomirov@58: }