tikhomirov@58: /* tikhomirov@74: * Copyright (c) 2011 TMate Software Ltd tikhomirov@74: * tikhomirov@74: * This program is free software; you can redistribute it and/or modify tikhomirov@74: * it under the terms of the GNU General Public License as published by tikhomirov@74: * the Free Software Foundation; version 2 of the License. tikhomirov@74: * tikhomirov@74: * This program is distributed in the hope that it will be useful, tikhomirov@74: * but WITHOUT ANY WARRANTY; without even the implied warranty of tikhomirov@74: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the tikhomirov@74: * GNU General Public License for more details. tikhomirov@74: * tikhomirov@74: * For information on how to redistribute this software under tikhomirov@74: * the terms of a license other than GNU General Public License tikhomirov@102: * contact TMate Software at support@hg4j.com tikhomirov@58: */ tikhomirov@74: package org.tmatesoft.hg.repo; tikhomirov@58: tikhomirov@120: import static java.lang.Math.max; tikhomirov@117: import static java.lang.Math.min; tikhomirov@218: import static org.tmatesoft.hg.repo.HgRepository.*; tikhomirov@58: tikhomirov@58: import java.io.File; tikhomirov@58: import java.io.FileInputStream; tikhomirov@58: import java.io.IOException; tikhomirov@117: import java.nio.ByteBuffer; tikhomirov@117: import java.nio.channels.FileChannel; tikhomirov@229: import java.util.ArrayList; tikhomirov@58: import java.util.Collections; tikhomirov@226: import java.util.NoSuchElementException; tikhomirov@58: import java.util.Set; tikhomirov@58: import java.util.TreeSet; tikhomirov@58: tikhomirov@157: import org.tmatesoft.hg.core.HgDataStreamException; tikhomirov@157: import org.tmatesoft.hg.core.HgException; tikhomirov@74: import org.tmatesoft.hg.core.Nodeid; tikhomirov@157: import org.tmatesoft.hg.internal.ByteArrayChannel; tikhomirov@226: import org.tmatesoft.hg.internal.Experimental; tikhomirov@117: import org.tmatesoft.hg.internal.FilterByteChannel; tikhomirov@248: import org.tmatesoft.hg.internal.ManifestRevision; tikhomirov@229: import org.tmatesoft.hg.internal.PathScope; tikhomirov@117: import org.tmatesoft.hg.util.ByteChannel; tikhomirov@157: import org.tmatesoft.hg.util.CancelledException; tikhomirov@141: import org.tmatesoft.hg.util.FileIterator; tikhomirov@226: import org.tmatesoft.hg.util.FileWalker; tikhomirov@133: import org.tmatesoft.hg.util.Path; tikhomirov@93: import org.tmatesoft.hg.util.PathPool; tikhomirov@93: import org.tmatesoft.hg.util.PathRewrite; tikhomirov@58: tikhomirov@58: /** tikhomirov@58: * tikhomirov@74: * @author Artem Tikhomirov tikhomirov@74: * @author TMate Software Ltd. tikhomirov@58: */ tikhomirov@94: public class HgWorkingCopyStatusCollector { tikhomirov@58: tikhomirov@58: private final HgRepository repo; tikhomirov@141: private final FileIterator repoWalker; tikhomirov@59: private HgDirstate dirstate; tikhomirov@94: private HgStatusCollector baseRevisionCollector; tikhomirov@93: private PathPool pathPool; tikhomirov@275: private ManifestRevision dirstateParentManifest; tikhomirov@58: tikhomirov@94: public HgWorkingCopyStatusCollector(HgRepository hgRepo) { tikhomirov@229: this(hgRepo, new HgInternals(hgRepo).createWorkingDirWalker(null)); tikhomirov@74: } tikhomirov@74: tikhomirov@229: // FIXME document cons tikhomirov@229: public HgWorkingCopyStatusCollector(HgRepository hgRepo, FileIterator hgRepoWalker) { tikhomirov@218: repo = hgRepo; tikhomirov@218: repoWalker = hgRepoWalker; tikhomirov@58: } tikhomirov@59: tikhomirov@59: /** tikhomirov@59: * Optionally, supply a collector instance that may cache (or have already cached) base revision tikhomirov@59: * @param sc may be null tikhomirov@59: */ tikhomirov@94: public void setBaseRevisionCollector(HgStatusCollector sc) { tikhomirov@59: baseRevisionCollector = sc; tikhomirov@59: } tikhomirov@93: tikhomirov@93: /*package-local*/ PathPool getPathPool() { tikhomirov@93: if (pathPool == null) { tikhomirov@93: if (baseRevisionCollector == null) { tikhomirov@93: pathPool = new PathPool(new PathRewrite.Empty()); tikhomirov@93: } else { tikhomirov@93: return baseRevisionCollector.getPathPool(); tikhomirov@93: } tikhomirov@93: } tikhomirov@93: return pathPool; tikhomirov@93: } tikhomirov@93: tikhomirov@93: public void setPathPool(PathPool pathPool) { tikhomirov@93: this.pathPool = pathPool; tikhomirov@93: } tikhomirov@93: tikhomirov@59: tikhomirov@59: private HgDirstate getDirstate() { tikhomirov@59: if (dirstate == null) { tikhomirov@74: dirstate = repo.loadDirstate(); tikhomirov@59: } tikhomirov@59: return dirstate; tikhomirov@59: } tikhomirov@275: tikhomirov@59: // may be invoked few times tikhomirov@275: // NOTE, use of TIP constant requires certain care. TIP here doesn't mean latest cset, but actual working copy parent. tikhomirov@275: // XXX this shall be changed, though, and use of TIP throughout code shall be revised - tikhomirov@275: // consider case when repository is updated to one of its previous revisions. TIP points to last change, but a lot of tikhomirov@275: // commands need to work with revision that is in dirstate now. tikhomirov@93: public void walk(int baseRevision, HgStatusInspector inspector) { tikhomirov@218: if (HgInternals.wrongLocalRevision(baseRevision) || baseRevision == BAD_REVISION || baseRevision == WORKING_COPY) { tikhomirov@218: throw new IllegalArgumentException(String.valueOf(baseRevision)); tikhomirov@218: } tikhomirov@91: final HgIgnore hgIgnore = repo.getIgnore(); tikhomirov@59: TreeSet knownEntries = getDirstate().all(); tikhomirov@68: if (baseRevision == TIP) { tikhomirov@275: // WC not necessarily points to TIP, but may be result of update to any previous revision. tikhomirov@275: // In such case, we need to compare local files not to their TIP content, but to specific version at the time of selected revision tikhomirov@275: Nodeid dirstateParentRev = getDirstate().parents()[0]; tikhomirov@275: Nodeid lastCsetRev = repo.getChangelog().getRevision(HgRepository.TIP); tikhomirov@275: if (lastCsetRev.equals(dirstateParentRev)) { tikhomirov@275: baseRevision = repo.getChangelog().getLastRevision(); tikhomirov@275: } else { tikhomirov@275: // can do it right away, but explicit check above might save few cycles (unless getLocalRevision(Nodeid) is effective) tikhomirov@275: baseRevision = repo.getChangelog().getLocalRevision(dirstateParentRev); tikhomirov@275: } tikhomirov@68: } tikhomirov@275: final boolean isTipBase = baseRevision == repo.getChangelog().getLastRevision(); tikhomirov@248: ManifestRevision collect = null; tikhomirov@226: Set baseRevFiles = Collections.emptySet(); // files from base revision not affected by status calculation tikhomirov@58: if (!isTipBase) { tikhomirov@59: if (baseRevisionCollector != null) { tikhomirov@59: collect = baseRevisionCollector.raw(baseRevision); tikhomirov@59: } else { tikhomirov@248: collect = new ManifestRevision(null, null); tikhomirov@59: repo.getManifest().walk(baseRevision, baseRevision, collect); tikhomirov@59: } tikhomirov@89: baseRevFiles = new TreeSet(collect.files()); tikhomirov@58: } tikhomirov@94: if (inspector instanceof HgStatusCollector.Record) { tikhomirov@94: HgStatusCollector sc = baseRevisionCollector == null ? new HgStatusCollector(repo) : baseRevisionCollector; tikhomirov@94: ((HgStatusCollector.Record) inspector).init(baseRevision, BAD_REVISION, sc); tikhomirov@68: } tikhomirov@58: repoWalker.reset(); tikhomirov@93: final PathPool pp = getPathPool(); tikhomirov@58: while (repoWalker.hasNext()) { tikhomirov@58: repoWalker.next(); tikhomirov@226: Path fname = pp.path(repoWalker.name()); tikhomirov@58: File f = repoWalker.file(); tikhomirov@226: if (!f.exists()) { tikhomirov@226: // file coming from iterator doesn't exist. tikhomirov@226: if (knownEntries.remove(fname.toString())) { tikhomirov@226: if (getDirstate().checkRemoved(fname) == null) { tikhomirov@226: inspector.missing(fname); tikhomirov@226: } else { tikhomirov@226: inspector.removed(fname); tikhomirov@226: } tikhomirov@226: // do not report it as removed later tikhomirov@226: if (collect != null) { tikhomirov@226: baseRevFiles.remove(fname.toString()); tikhomirov@226: } tikhomirov@226: } else { tikhomirov@226: // chances are it was known in baseRevision. We may rely tikhomirov@226: // that later iteration over baseRevFiles leftovers would yield correct Removed, tikhomirov@226: // but it doesn't hurt to be explicit (provided we know fname *is* inScope of the FileIterator tikhomirov@226: if (collect != null && baseRevFiles.remove(fname.toString())) { tikhomirov@226: inspector.removed(fname); tikhomirov@226: } else { tikhomirov@226: // not sure I shall report such files (i.e. arbitrary name coming from FileIterator) tikhomirov@226: // as unknown. Command-line HG aborts "system can't find the file specified" tikhomirov@226: // in similar case (against wc), or just gives nothing if --change is specified. tikhomirov@226: // however, as it's unlikely to get unexisting files from FileIterator, and tikhomirov@226: // its better to see erroneous file status rather than not to see any (which is too easy tikhomirov@226: // to overlook), I think unknown() is reasonable approach here tikhomirov@226: inspector.unknown(fname); tikhomirov@226: } tikhomirov@226: } tikhomirov@226: continue; tikhomirov@226: } tikhomirov@229: assert f.isFile(); tikhomirov@226: if (knownEntries.remove(fname.toString())) { tikhomirov@226: // tracked file. tikhomirov@58: // modified, added, removed, clean tikhomirov@58: if (collect != null) { // need to check against base revision, not FS file tikhomirov@90: checkLocalStatusAgainstBaseRevision(baseRevFiles, collect, baseRevision, fname, f, inspector); tikhomirov@58: } else { tikhomirov@59: checkLocalStatusAgainstFile(fname, f, inspector); tikhomirov@58: } tikhomirov@58: } else { tikhomirov@226: if (hgIgnore.isIgnored(fname)) { // hgignore shall be consulted only for non-tracked files tikhomirov@226: inspector.ignored(fname); tikhomirov@226: } else { tikhomirov@226: inspector.unknown(fname); tikhomirov@226: } tikhomirov@226: // the file is not tracked. Even if it's known at baseRevision, we don't need to remove it tikhomirov@226: // from baseRevFiles, it might need to be reported as removed as well (cmdline client does tikhomirov@226: // yield two statuses for the same file) tikhomirov@58: } tikhomirov@58: } tikhomirov@58: if (collect != null) { tikhomirov@58: for (String r : baseRevFiles) { tikhomirov@226: final Path fromBase = pp.path(r); tikhomirov@226: if (repoWalker.inScope(fromBase)) { tikhomirov@226: inspector.removed(fromBase); tikhomirov@226: } tikhomirov@58: } tikhomirov@58: } tikhomirov@58: for (String m : knownEntries) { tikhomirov@226: if (!repoWalker.inScope(pp.path(m))) { tikhomirov@226: // do not report as missing/removed those FileIterator doesn't care about. tikhomirov@226: continue; tikhomirov@226: } tikhomirov@74: // missing known file from a working dir tikhomirov@59: if (getDirstate().checkRemoved(m) == null) { tikhomirov@74: // not removed from the repository = 'deleted' tikhomirov@93: inspector.missing(pp.path(m)); tikhomirov@74: } else { tikhomirov@74: // removed from the repo tikhomirov@76: // if we check against non-tip revision, do not report files that were added past that revision and now removed. tikhomirov@76: if (collect == null || baseRevFiles.contains(m)) { tikhomirov@93: inspector.removed(pp.path(m)); tikhomirov@76: } tikhomirov@58: } tikhomirov@58: } tikhomirov@58: } tikhomirov@58: tikhomirov@94: public HgStatusCollector.Record status(int baseRevision) { tikhomirov@94: HgStatusCollector.Record rv = new HgStatusCollector.Record(); tikhomirov@58: walk(baseRevision, rv); tikhomirov@58: return rv; tikhomirov@58: } tikhomirov@58: tikhomirov@58: //******************************************** tikhomirov@58: tikhomirov@58: tikhomirov@141: private void checkLocalStatusAgainstFile(Path fname, File f, HgStatusInspector inspector) { tikhomirov@58: HgDirstate.Record r; tikhomirov@59: if ((r = getDirstate().checkNormal(fname)) != null) { tikhomirov@58: // either clean or modified tikhomirov@58: if (f.lastModified() / 1000 == r.time && r.size == f.length()) { tikhomirov@93: inspector.clean(getPathPool().path(fname)); tikhomirov@58: } else { tikhomirov@120: // check actual content to avoid false modified files tikhomirov@120: HgDataFile df = repo.getFileNode(fname); tikhomirov@157: if (!areTheSame(f, df, HgRepository.TIP)) { tikhomirov@120: inspector.modified(df.getPath()); tikhomirov@180: } else { tikhomirov@180: inspector.clean(df.getPath()); tikhomirov@120: } tikhomirov@58: } tikhomirov@59: } else if ((r = getDirstate().checkAdded(fname)) != null) { tikhomirov@58: if (r.name2 == null) { tikhomirov@93: inspector.added(getPathPool().path(fname)); tikhomirov@58: } else { tikhomirov@93: inspector.copied(getPathPool().path(r.name2), getPathPool().path(fname)); tikhomirov@58: } tikhomirov@59: } else if ((r = getDirstate().checkRemoved(fname)) != null) { tikhomirov@93: inspector.removed(getPathPool().path(fname)); tikhomirov@59: } else if ((r = getDirstate().checkMerged(fname)) != null) { tikhomirov@93: inspector.modified(getPathPool().path(fname)); tikhomirov@58: } tikhomirov@58: } tikhomirov@58: tikhomirov@58: // XXX refactor checkLocalStatus methods in more OO way tikhomirov@248: private void checkLocalStatusAgainstBaseRevision(Set baseRevNames, ManifestRevision collect, int baseRevision, Path fname, File f, HgStatusInspector inspector) { tikhomirov@58: // fname is in the dirstate, either Normal, Added, Removed or Merged tikhomirov@141: Nodeid nid1 = collect.nodeid(fname.toString()); tikhomirov@141: String flags = collect.flags(fname.toString()); tikhomirov@58: HgDirstate.Record r; tikhomirov@58: if (nid1 == null) { tikhomirov@58: // normal: added? tikhomirov@58: // added: not known at the time of baseRevision, shall report tikhomirov@58: // merged: was not known, report as added? tikhomirov@90: if ((r = getDirstate().checkNormal(fname)) != null) { tikhomirov@157: try { tikhomirov@157: Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision); tikhomirov@157: if (origin != null) { tikhomirov@226: inspector.copied(getPathPool().path(origin), fname); tikhomirov@157: return; tikhomirov@157: } tikhomirov@157: } catch (HgDataStreamException ex) { tikhomirov@157: ex.printStackTrace(); tikhomirov@157: // FIXME report to a mediator, continue status collection tikhomirov@90: } tikhomirov@90: } else if ((r = getDirstate().checkAdded(fname)) != null) { tikhomirov@58: if (r.name2 != null && baseRevNames.contains(r.name2)) { tikhomirov@90: baseRevNames.remove(r.name2); // XXX surely I shall not report rename source as Removed? tikhomirov@226: inspector.copied(getPathPool().path(r.name2), fname); tikhomirov@58: return; tikhomirov@58: } tikhomirov@58: // fall-through, report as added tikhomirov@59: } else if (getDirstate().checkRemoved(fname) != null) { tikhomirov@58: // removed: removed file was not known at the time of baseRevision, and we should not report it as removed tikhomirov@58: return; tikhomirov@58: } tikhomirov@226: inspector.added(fname); tikhomirov@58: } else { tikhomirov@58: // was known; check whether clean or modified tikhomirov@58: // when added - seems to be the case of a file added once again, hence need to check if content is different tikhomirov@59: if ((r = getDirstate().checkNormal(fname)) != null || (r = getDirstate().checkMerged(fname)) != null || (r = getDirstate().checkAdded(fname)) != null) { tikhomirov@58: // either clean or modified tikhomirov@58: HgDataFile fileNode = repo.getFileNode(fname); tikhomirov@275: int lengthAtRevision; tikhomirov@275: try { tikhomirov@275: lengthAtRevision = fileNode.length(nid1); tikhomirov@275: } catch (HgDataStreamException ex) { tikhomirov@275: ex.printStackTrace(); // XXX log error tikhomirov@275: lengthAtRevision = -1; // compare file content then tikhomirov@275: } tikhomirov@275: // XXX is it safe with respect to filters (keyword, eol) to compare lengthAtRevision (unprocessed) with size tikhomirov@275: // from dirstate, which I assume is size of processed data? tikhomirov@275: if (r.size != -1 && r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { tikhomirov@226: inspector.modified(fname); tikhomirov@58: } else { tikhomirov@58: // check actual content to see actual changes tikhomirov@157: if (areTheSame(f, fileNode, fileNode.getLocalRevision(nid1))) { tikhomirov@226: inspector.clean(fname); tikhomirov@58: } else { tikhomirov@226: inspector.modified(fname); tikhomirov@58: } tikhomirov@58: } tikhomirov@226: baseRevNames.remove(fname.toString()); // consumed, processed, handled. tikhomirov@226: } else if (getDirstate().checkRemoved(fname) != null) { tikhomirov@226: // was known, and now marked as removed, report it right away, do not rely on baseRevNames processing later tikhomirov@226: inspector.removed(fname); tikhomirov@226: baseRevNames.remove(fname.toString()); // consumed, processed, handled. tikhomirov@58: } tikhomirov@226: // only those left in baseRevNames after processing are reported as removed tikhomirov@58: } tikhomirov@58: tikhomirov@58: // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest tikhomirov@58: // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively tikhomirov@58: // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: tikhomirov@58: // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest tikhomirov@58: // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). tikhomirov@58: // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' tikhomirov@58: } tikhomirov@58: tikhomirov@157: private boolean areTheSame(File f, HgDataFile dataFile, int localRevision) { tikhomirov@157: // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison tikhomirov@157: ByteArrayChannel bac = new ByteArrayChannel(); tikhomirov@157: boolean ioFailed = false; tikhomirov@157: try { tikhomirov@157: // need content with metadata striped off - although theoretically chances are metadata may be different, tikhomirov@157: // WC doesn't have it anyway tikhomirov@157: dataFile.content(localRevision, bac); tikhomirov@157: } catch (CancelledException ex) { tikhomirov@157: // silently ignore - can't happen, ByteArrayChannel is not cancellable tikhomirov@157: } catch (HgException ex) { tikhomirov@157: ioFailed = true; tikhomirov@157: } tikhomirov@157: return !ioFailed && areTheSame(f, bac.toArray(), dataFile.getPath()); tikhomirov@157: } tikhomirov@157: tikhomirov@117: private boolean areTheSame(File f, final byte[] data, Path p) { tikhomirov@117: FileInputStream fis = null; tikhomirov@117: try { tikhomirov@117: try { tikhomirov@117: fis = new FileInputStream(f); tikhomirov@117: FileChannel fc = fis.getChannel(); tikhomirov@226: ByteBuffer fb = ByteBuffer.allocate(min(1 + data.length * 2 /*to fit couple of lines appended; never zero*/, 8192)); tikhomirov@219: class Check implements ByteChannel { tikhomirov@219: final boolean debug = false; // XXX may want to add global variable to allow clients to turn tikhomirov@219: boolean sameSoFar = true; tikhomirov@117: int x = 0; tikhomirov@219: tikhomirov@148: public int write(ByteBuffer buffer) { tikhomirov@117: for (int i = buffer.remaining(); i > 0; i--, x++) { tikhomirov@219: if (x >= data.length /*file has been appended*/ || data[x] != buffer.get()) { tikhomirov@120: if (debug) { tikhomirov@120: byte[] xx = new byte[15]; tikhomirov@120: if (buffer.position() > 5) { tikhomirov@120: buffer.position(buffer.position() - 5); tikhomirov@120: } tikhomirov@120: buffer.get(xx); tikhomirov@120: System.out.print("expected >>" + new String(data, max(0, x - 4), 20) + "<< but got >>"); tikhomirov@120: System.out.println(new String(xx) + "<<"); tikhomirov@120: } tikhomirov@219: sameSoFar = false; tikhomirov@117: break; tikhomirov@117: } tikhomirov@117: } tikhomirov@117: buffer.position(buffer.limit()); // mark as read tikhomirov@117: return buffer.limit(); tikhomirov@117: } tikhomirov@219: tikhomirov@219: public boolean sameSoFar() { tikhomirov@219: return sameSoFar; tikhomirov@219: } tikhomirov@219: public boolean ultimatelyTheSame() { tikhomirov@219: return sameSoFar && x == data.length; tikhomirov@219: } tikhomirov@117: }; tikhomirov@219: Check check = new Check(); tikhomirov@117: FilterByteChannel filters = new FilterByteChannel(check, repo.getFiltersFromWorkingDirToRepo(p)); tikhomirov@219: while (fc.read(fb) != -1 && check.sameSoFar()) { tikhomirov@117: fb.flip(); tikhomirov@117: filters.write(fb); tikhomirov@117: fb.compact(); tikhomirov@117: } tikhomirov@246: fis.close(); tikhomirov@219: return check.ultimatelyTheSame(); tikhomirov@117: } catch (IOException ex) { tikhomirov@117: if (fis != null) { tikhomirov@117: fis.close(); tikhomirov@117: } tikhomirov@117: ex.printStackTrace(); // log warn tikhomirov@117: } tikhomirov@117: } catch (/*TODO typed*/Exception ex) { tikhomirov@117: ex.printStackTrace(); tikhomirov@117: } tikhomirov@117: return false; tikhomirov@117: } tikhomirov@117: tikhomirov@141: private static String todoGenerateFlags(Path fname) { tikhomirov@58: // FIXME implement tikhomirov@58: return null; tikhomirov@58: } tikhomirov@58: tikhomirov@229: /** tikhomirov@229: * Configure status collector to consider only subset of a working copy tree. Tries to be as effective as possible, and to tikhomirov@229: * traverse only relevant part of working copy on the filesystem. tikhomirov@229: * tikhomirov@229: * @param hgRepo repository tikhomirov@229: * @param paths repository-relative files and/or directories. Directories are processed recursively. tikhomirov@229: * tikhomirov@229: * @return new instance of {@link HgWorkingCopyStatusCollector}, ready to {@link #walk(int, HgStatusInspector) walk} associated working copy tikhomirov@229: */ tikhomirov@229: @Experimental(reason="Provisional API") tikhomirov@229: public static HgWorkingCopyStatusCollector create(HgRepository hgRepo, Path... paths) { tikhomirov@229: ArrayList f = new ArrayList(5); tikhomirov@229: ArrayList d = new ArrayList(5); tikhomirov@229: for (Path p : paths) { tikhomirov@229: if (p.isDirectory()) { tikhomirov@229: d.add(p); tikhomirov@229: } else { tikhomirov@229: f.add(p); tikhomirov@229: } tikhomirov@229: } tikhomirov@229: // final Path[] dirs = f.toArray(new Path[d.size()]); tikhomirov@229: if (d.isEmpty()) { tikhomirov@229: final Path[] files = f.toArray(new Path[f.size()]); tikhomirov@237: FileIterator fi = new FileListIterator(hgRepo.getWorkingDir(), files); tikhomirov@229: return new HgWorkingCopyStatusCollector(hgRepo, fi); tikhomirov@229: } tikhomirov@229: // tikhomirov@229: tikhomirov@229: //FileIterator fi = file.isDirectory() ? new DirFileIterator(hgRepo, file) : new FileListIterator(, file); tikhomirov@229: FileIterator fi = new HgInternals(hgRepo).createWorkingDirWalker(new PathScope(true, paths)); tikhomirov@226: return new HgWorkingCopyStatusCollector(hgRepo, fi); tikhomirov@226: } tikhomirov@229: tikhomirov@229: /** tikhomirov@229: * Configure collector object to calculate status for matching files only. tikhomirov@229: * This method may be less effective than explicit list of files as it iterates over whole repository tikhomirov@229: * (thus supplied matcher doesn't need to care if directories to files in question are also in scope, tikhomirov@229: * see {@link FileWalker#FileWalker(File, Path.Source, Path.Matcher)}) tikhomirov@229: * tikhomirov@229: * @return new instance of {@link HgWorkingCopyStatusCollector}, ready to {@link #walk(int, HgStatusInspector) walk} associated working copy tikhomirov@229: */ tikhomirov@229: @Experimental(reason="Provisional API. May add boolean strict argument for those who write smart matchers that can be used in FileWalker") tikhomirov@229: public static HgWorkingCopyStatusCollector create(HgRepository hgRepo, Path.Matcher scope) { tikhomirov@229: FileIterator w = new HgInternals(hgRepo).createWorkingDirWalker(null); tikhomirov@229: FileIterator wf = (scope == null || scope instanceof Path.Matcher.Any) ? w : new FileIteratorFilter(w, scope); tikhomirov@229: // the reason I need to iterate over full repo and apply filter is that I have no idea whatsoever about tikhomirov@229: // patterns in the scope. I.e. if scope lists a file (PathGlobMatcher("a/b/c.txt")), FileWalker won't get deep tikhomirov@229: // to the file unless matcher would also explicitly include "a/", "a/b/" in scope. Since I can't rely tikhomirov@229: // users would write robust matchers, and I don't see a decent way to enforce that (i.e. factory to produce tikhomirov@229: // correct matcher from Path is much like what PathScope does, and can be accessed directly with #create(repo, Path...) tikhomirov@229: // method above/ tikhomirov@229: return new HgWorkingCopyStatusCollector(hgRepo, wf); tikhomirov@229: } tikhomirov@226: tikhomirov@226: private static class FileListIterator implements FileIterator { tikhomirov@226: private final File dir; tikhomirov@226: private final Path[] paths; tikhomirov@226: private int index; tikhomirov@226: private File nextFile; // cache file() in case it's called more than once tikhomirov@226: tikhomirov@226: public FileListIterator(File startDir, Path... files) { tikhomirov@226: dir = startDir; tikhomirov@226: paths = files; tikhomirov@226: reset(); tikhomirov@226: } tikhomirov@226: tikhomirov@226: public void reset() { tikhomirov@226: index = -1; tikhomirov@226: nextFile = null; tikhomirov@226: } tikhomirov@226: tikhomirov@226: public boolean hasNext() { tikhomirov@226: return paths.length > 0 && index < paths.length-1; tikhomirov@226: } tikhomirov@226: tikhomirov@226: public void next() { tikhomirov@226: index++; tikhomirov@226: if (index == paths.length) { tikhomirov@226: throw new NoSuchElementException(); tikhomirov@226: } tikhomirov@226: nextFile = new File(dir, paths[index].toString()); tikhomirov@226: } tikhomirov@226: tikhomirov@226: public Path name() { tikhomirov@226: return paths[index]; tikhomirov@226: } tikhomirov@226: tikhomirov@226: public File file() { tikhomirov@226: return nextFile; tikhomirov@226: } tikhomirov@226: tikhomirov@226: public boolean inScope(Path file) { tikhomirov@226: for (int i = 0; i < paths.length; i++) { tikhomirov@226: if (paths[i].equals(file)) { tikhomirov@226: return true; tikhomirov@226: } tikhomirov@226: } tikhomirov@226: return false; tikhomirov@226: } tikhomirov@226: } tikhomirov@226: tikhomirov@229: private static class FileIteratorFilter implements FileIterator { tikhomirov@229: private final Path.Matcher filter; tikhomirov@229: private final FileIterator walker; tikhomirov@229: private boolean didNext = false; tikhomirov@226: tikhomirov@229: public FileIteratorFilter(FileIterator fileWalker, Path.Matcher filterMatcher) { tikhomirov@229: assert fileWalker != null; tikhomirov@229: assert filterMatcher != null; tikhomirov@229: filter = filterMatcher; tikhomirov@229: walker = fileWalker; tikhomirov@226: } tikhomirov@226: tikhomirov@226: public void reset() { tikhomirov@226: walker.reset(); tikhomirov@226: } tikhomirov@226: tikhomirov@226: public boolean hasNext() { tikhomirov@229: while (walker.hasNext()) { tikhomirov@229: walker.next(); tikhomirov@229: if (filter.accept(walker.name())) { tikhomirov@229: didNext = true; tikhomirov@229: return true; tikhomirov@229: } tikhomirov@229: } tikhomirov@229: return false; tikhomirov@226: } tikhomirov@226: tikhomirov@226: public void next() { tikhomirov@229: if (didNext) { tikhomirov@229: didNext = false; tikhomirov@229: } else { tikhomirov@229: if (!hasNext()) { tikhomirov@229: throw new NoSuchElementException(); tikhomirov@229: } tikhomirov@229: } tikhomirov@226: } tikhomirov@226: tikhomirov@226: public Path name() { tikhomirov@226: return walker.name(); tikhomirov@226: } tikhomirov@226: tikhomirov@226: public File file() { tikhomirov@226: return walker.file(); tikhomirov@226: } tikhomirov@226: tikhomirov@226: public boolean inScope(Path file) { tikhomirov@229: return filter.accept(file); tikhomirov@226: } tikhomirov@226: } tikhomirov@58: }