Mercurial > jhg
changeset 348:a0864b2892cd
Expose errors reading mercurial control files with exception
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Thu, 24 Nov 2011 02:57:03 +0100 |
parents | 8da7ade36c57 |
children | bba9f52cacf3 |
files | cmdline/org/tmatesoft/hg/console/Main.java src/org/tmatesoft/hg/core/HgInvalidControlFileException.java src/org/tmatesoft/hg/internal/SubrepoManager.java src/org/tmatesoft/hg/repo/HgBranches.java src/org/tmatesoft/hg/repo/HgDirstate.java src/org/tmatesoft/hg/repo/HgInternals.java src/org/tmatesoft/hg/repo/HgMergeState.java src/org/tmatesoft/hg/repo/HgRepository.java src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java test/org/tmatesoft/hg/test/TestIgnore.java |
diffstat | 10 files changed, 166 insertions(+), 130 deletions(-) [+] |
line wrap: on
line diff
--- a/cmdline/org/tmatesoft/hg/console/Main.java Tue Nov 22 05:25:57 2011 +0100 +++ b/cmdline/org/tmatesoft/hg/console/Main.java Thu Nov 24 02:57:03 2011 +0100 @@ -420,9 +420,22 @@ System.out.println("Ignored " + toCheck[i] + ": " + ignore.isIgnored(Path.create(toCheck[i]))); } } + + static class DirstateDump implements HgDirstate.Inspector { + private final char[] x = new char[] {'n', 'a', 'r', 'm' }; + + public boolean next(EntryKind kind, Record entry) { + System.out.printf("%c %3o%6d %30tc\t\t%s", x[kind.ordinal()], entry.mode(), entry.size(), (long) entry.modificationTime() * 1000, entry.name()); + if (entry.copySource() != null) { + System.out.printf(" --> %s", entry.copySource()); + } + System.out.println(); + return true; + } + } - private void dumpDirstate() { - new HgInternals(hgRepo).dumpDirstate(); + private void dumpDirstate() throws Exception { + new HgInternals(hgRepo).getDirstate().walk(new DirstateDump()); HgWorkingCopyStatusCollector wcc = HgWorkingCopyStatusCollector.create(hgRepo, new Path.Matcher.Any()); wcc.getDirstate().walk(new HgDirstate.Inspector() { @@ -508,8 +521,6 @@ } private void bunchOfTests() throws Exception { - HgInternals debug = new HgInternals(hgRepo); - debug.dumpDirstate(); final StatusDump dump = new StatusDump(); dump.showIgnored = false; dump.showClean = false;
--- a/src/org/tmatesoft/hg/core/HgInvalidControlFileException.java Tue Nov 22 05:25:57 2011 +0100 +++ b/src/org/tmatesoft/hg/core/HgInvalidControlFileException.java Thu Nov 24 02:57:03 2011 +0100 @@ -36,4 +36,9 @@ super(message, th, file); } + @Override + public HgInvalidControlFileException setFile(File file) { + super.setFile(file); + return this; + } }
--- a/src/org/tmatesoft/hg/internal/SubrepoManager.java Tue Nov 22 05:25:57 2011 +0100 +++ b/src/org/tmatesoft/hg/internal/SubrepoManager.java Thu Nov 24 02:57:03 2011 +0100 @@ -27,7 +27,7 @@ import java.util.List; import java.util.Map; -import org.tmatesoft.hg.repo.HgInternals; +import org.tmatesoft.hg.core.HgInvalidControlFileException; import org.tmatesoft.hg.repo.HgRepository; import org.tmatesoft.hg.repo.HgSubrepoLocation; @@ -46,25 +46,29 @@ repo = hgRepo; } - private List<HgSubrepoLocation> readActualState() { + private List<HgSubrepoLocation> readActualState() throws HgInvalidControlFileException { File hgsubFile = new File(repo.getWorkingDir(), ".hgsub"); if (!hgsubFile.canRead()) { return Collections.emptyList(); } + Map<String, String> state; // path -> revision + File hgstateFile = null; try { - Map<String, String> state; // path -> revision - File hgstateFile = new File(repo.getWorkingDir(), ".hgsubstate"); + hgstateFile = new File(repo.getWorkingDir(), ".hgsubstate"); if (hgstateFile.canRead()) { state = readState(new BufferedReader(new FileReader(hgstateFile))); } else { state = Collections.emptyMap(); } + } catch (IOException ex) { + throw new HgInvalidControlFileException("Subrepo state read failed", ex, hgstateFile); + } + try { BufferedReader br = new BufferedReader(new FileReader(hgsubFile)); return readConfig(br, state); } catch (IOException ex) { - HgInternals.getContext(repo).getLog().error(getClass(), ex, "Subrepo state read failed"); + throw new HgInvalidControlFileException("Subrepo state read failed", ex, hgsubFile); } - return Collections.emptyList(); } private List<HgSubrepoLocation> readConfig(BufferedReader br, Map<String, String> substate) throws IOException { @@ -121,10 +125,13 @@ return rv; } + /*public to allow access from HgRepository, otherwise package-local*/ + public void read() throws HgInvalidControlFileException { + subRepos = readActualState(); + } + public List<HgSubrepoLocation> all(/*int revision, or TIP|WC*/) { - if (subRepos == null) { - subRepos = readActualState(); - } + assert subRepos != null; return subRepos; } }
--- a/src/org/tmatesoft/hg/repo/HgBranches.java Tue Nov 22 05:25:57 2011 +0100 +++ b/src/org/tmatesoft/hg/repo/HgBranches.java Thu Nov 24 02:57:03 2011 +0100 @@ -54,7 +54,7 @@ repo = hgRepo; } - private int readCache() /*XXX throws parse errors, e.g. may fail with NumberFormatException */{ + private int readCache() { File branchheadsCache = getCacheFile(); int lastInCache = -1; if (!branchheadsCache.canRead()) { @@ -97,7 +97,12 @@ } return lastInCache; } catch (IOException ex) { - repo.getContext().getLog().warn(getClass(), ex, null); // log error, but otherwise do nothing + // log error, but otherwise do nothing + repo.getContext().getLog().warn(getClass(), ex, null); + // FALL THROUGH to return -1 indicating no cache information + } catch (NumberFormatException ex) { + repo.getContext().getLog().warn(getClass(), ex, null); + // FALL THROUGH } finally { if (br != null) { try {
--- a/src/org/tmatesoft/hg/repo/HgDirstate.java Tue Nov 22 05:25:57 2011 +0100 +++ b/src/org/tmatesoft/hg/repo/HgDirstate.java Thu Nov 24 02:57:03 2011 +0100 @@ -20,6 +20,7 @@ import java.io.BufferedReader; import java.io.File; +import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.Collections; @@ -28,7 +29,7 @@ import java.util.Map; import java.util.TreeSet; -import org.tmatesoft.hg.core.HgBadStateException; +import org.tmatesoft.hg.core.HgInvalidControlFileException; import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.internal.DataAccess; import org.tmatesoft.hg.util.Pair; @@ -73,7 +74,7 @@ canonicalPathRewrite = canonicalPath; } - private void read() { + /*package-local*/ void read() throws HgInvalidControlFileException { normal = added = removed = merged = Collections.<Path, Record>emptyMap(); if (canonicalPathRewrite != null) { canonical2dirstateName = new HashMap<Path,Path>(); @@ -144,8 +145,7 @@ } } } catch (IOException ex) { - repo.getContext().getLog().error(getClass(), ex, null); - // FIXME clean dirstate? + throw new HgInvalidControlFileException("Dirstate read failed", ex, dirstateFile); } finally { da.done(); } @@ -164,16 +164,14 @@ * @return pair of working copy parents, with {@link Nodeid#NULL} for missing values. */ public Pair<Nodeid,Nodeid> parents() { - if (parents == null) { - parents = readParents(repo, dirstateFile); - } + assert parents != null; // instance not initialized with #read() return parents; } /** * @return pair of parents, both {@link Nodeid#NULL} if dirstate is not available */ - /*package-local*/ static Pair<Nodeid, Nodeid> readParents(HgRepository repo, File dirstateFile) { + /*package-local*/ static Pair<Nodeid, Nodeid> readParents(HgRepository repo, File dirstateFile) throws HgInvalidControlFileException { // do not read whole dirstate if all we need is WC parent information if (dirstateFile == null || !dirstateFile.exists()) { return new Pair<Nodeid,Nodeid>(NULL, NULL); @@ -185,16 +183,17 @@ try { return internalReadParents(da); } catch (IOException ex) { - throw new HgBadStateException(ex); // XXX in fact, our exception is not the best solution here. + throw new HgInvalidControlFileException("Error reading working copy parents from dirstate", ex, dirstateFile); } finally { da.done(); } } /** + * FIXME move to a better place, e.g. WorkingCopy container that tracks both dirstate and branches (and, perhaps, undo, lastcommit and other similar information) * @return branch associated with the working directory */ - public String branch() { + public String branch() throws HgInvalidControlFileException { // XXX is it really proper place for the method? if (currentBranch == null) { currentBranch = readBranch(repo); @@ -206,7 +205,7 @@ * XXX is it really proper place for the method? * @return branch associated with the working directory */ - /*package-local*/ static String readBranch(HgRepository repo) { + /*package-local*/ static String readBranch(HgRepository repo) throws HgInvalidControlFileException { String branch = HgRepository.DEFAULT_BRANCH_NAME; File branchFile = new File(repo.getRepositoryRoot(), "branch"); if (branchFile.exists()) { @@ -218,9 +217,11 @@ } branch = b == null || b.length() == 0 ? HgRepository.DEFAULT_BRANCH_NAME : b; r.close(); + } catch (FileNotFoundException ex) { + repo.getContext().getLog().debug(HgDirstate.class, ex, null); // log verbose debug, exception might be legal here + // IGNORE } catch (IOException ex) { - repo.getContext().getLog().debug(HgDirstate.class, ex, null); // log verbose debug, exception might be legal here (i.e. FileNotFound) - // IGNORE + throw new HgInvalidControlFileException("Error reading file with branch information", ex, branchFile); } } return branch; @@ -228,9 +229,7 @@ // new, modifiable collection /*package-local*/ TreeSet<Path> all() { - if (normal == null) { - read(); - } + assert normal != null; TreeSet<Path> rv = new TreeSet<Path>(); @SuppressWarnings("unchecked") Map<Path, Record>[] all = new Map[] { normal, added, removed, merged }; @@ -299,28 +298,8 @@ return null; } - - /*package-local*/ void dump() { - read(); - @SuppressWarnings("unchecked") - Map<Path, Record>[] all = new Map[] { normal, added, removed, merged }; - char[] x = new char[] {'n', 'a', 'r', 'm' }; - for (int i = 0; i < all.length; i++) { - for (Record r : all[i].values()) { - System.out.printf("%c %3o%6d %30tc\t\t%s", x[i], r.mode, r.size, (long) r.time * 1000, r.name1); - if (r.name2 != null) { - System.out.printf(" --> %s", r.name2); - } - System.out.println(); - } - System.out.println(); - } - } - public void walk(Inspector inspector) { - if (normal == null) { - read(); - } + assert normal != null; @SuppressWarnings("unchecked") Map<Path, Record>[] all = new Map[] { normal, added, removed, merged }; for (int i = 0; i < all.length; i++) { @@ -377,6 +356,10 @@ return size; } + public int mode() { + return mode; + } + @Override public Record clone() { try {
--- a/src/org/tmatesoft/hg/repo/HgInternals.java Tue Nov 22 05:25:57 2011 +0100 +++ b/src/org/tmatesoft/hg/repo/HgInternals.java Thu Nov 24 02:57:03 2011 +0100 @@ -25,6 +25,7 @@ import java.net.InetAddress; import java.net.UnknownHostException; +import org.tmatesoft.hg.core.HgInvalidControlFileException; import org.tmatesoft.hg.core.HgInvalidRevisionException; import org.tmatesoft.hg.core.SessionContext; import org.tmatesoft.hg.internal.Experimental; @@ -54,16 +55,12 @@ repo = hgRepo; } - public void dumpDirstate() { - getDirstate().dump(); - } - - public HgDirstate getDirstate() { + public HgDirstate getDirstate() throws HgInvalidControlFileException { return repo.loadDirstate(new PathPool(new PathRewrite.Empty())); } // tests - public HgDirstate createDirstate(boolean caseSensitiveFileSystem) { + public HgDirstate createDirstate(boolean caseSensitiveFileSystem) throws HgInvalidControlFileException { PathRewrite canonicalPath = null; if (!caseSensitiveFileSystem) { canonicalPath = new PathRewrite() { @@ -73,7 +70,9 @@ } }; } - return new HgDirstate(repo, new File(repo.getRepositoryRoot(), "dirstate"), new PathPool(new PathRewrite.Empty()), canonicalPath); + HgDirstate ds = new HgDirstate(repo, new File(repo.getRepositoryRoot(), "dirstate"), new PathPool(new PathRewrite.Empty()), canonicalPath); + ds.read(); + return ds; } public Path[] checkKnown(HgDirstate dirstate, Path[] toCheck) {
--- a/src/org/tmatesoft/hg/repo/HgMergeState.java Tue Nov 22 05:25:57 2011 +0100 +++ b/src/org/tmatesoft/hg/repo/HgMergeState.java Thu Nov 24 02:57:03 2011 +0100 @@ -29,6 +29,7 @@ import org.tmatesoft.hg.core.HgBadStateException; import org.tmatesoft.hg.core.HgFileRevision; +import org.tmatesoft.hg.core.HgInvalidControlFileException; import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.internal.ManifestRevision; import org.tmatesoft.hg.internal.Pool; @@ -91,7 +92,7 @@ repo = hgRepo; } - public void refresh() throws IOException/*XXX it's unlikely caller can do anything reasonable about IOException */ { + public void refresh() throws HgInvalidControlFileException { entries = null; // it's possible there are two parents but no merge/state, we shall report this case as 'merging', with proper // first and second parent values @@ -105,60 +106,64 @@ // empty state return; } - ArrayList<Entry> result = new ArrayList<Entry>(); - // FIXME need to settle use of Pool<Path> and PathPool - // latter is pool that can create objects on demand, former is just cache - PathPool pathPool = new PathPool(new PathRewrite.Empty()); - final ManifestRevision m1 = new ManifestRevision(nodeidPool, fnamePool); - final ManifestRevision m2 = new ManifestRevision(nodeidPool, fnamePool); - if (!wcp2.isNull()) { - final int rp2 = repo.getChangelog().getLocalRevision(wcp2); - repo.getManifest().walk(rp2, rp2, m2); - } - BufferedReader br = new BufferedReader(new FileReader(f)); - String s = br.readLine(); - stateParent = nodeidPool.unify(Nodeid.fromAscii(s)); - final int rp1 = repo.getChangelog().getLocalRevision(stateParent); - repo.getManifest().walk(rp1, rp1, m1); - while ((s = br.readLine()) != null) { - String[] r = s.split("\\00"); - Path p1fname = pathPool.path(r[3]); - Nodeid nidP1 = m1.nodeid(p1fname); - Nodeid nidCA = nodeidPool.unify(Nodeid.fromAscii(r[5])); - HgFileRevision p1 = new HgFileRevision(repo, nidP1, p1fname); - HgFileRevision ca; - if (nidCA == nidP1 && r[3].equals(r[4])) { - ca = p1; - } else { - ca = new HgFileRevision(repo, nidCA, pathPool.path(r[4])); + try { + ArrayList<Entry> result = new ArrayList<Entry>(); + // FIXME need to settle use of Pool<Path> and PathPool + // latter is pool that can create objects on demand, former is just cache + PathPool pathPool = new PathPool(new PathRewrite.Empty()); + final ManifestRevision m1 = new ManifestRevision(nodeidPool, fnamePool); + final ManifestRevision m2 = new ManifestRevision(nodeidPool, fnamePool); + if (!wcp2.isNull()) { + final int rp2 = repo.getChangelog().getLocalRevision(wcp2); + repo.getManifest().walk(rp2, rp2, m2); } - HgFileRevision p2; - if (!wcp2.isNull() || !r[6].equals(r[4])) { - final Path p2fname = pathPool.path(r[6]); - Nodeid nidP2 = m2.nodeid(p2fname); - if (nidP2 == null) { - assert false : "There's not enough information (or I don't know where to look) in merge/state to find out what's the second parent"; - nidP2 = NULL; + BufferedReader br = new BufferedReader(new FileReader(f)); + String s = br.readLine(); + stateParent = nodeidPool.unify(Nodeid.fromAscii(s)); + final int rp1 = repo.getChangelog().getLocalRevision(stateParent); + repo.getManifest().walk(rp1, rp1, m1); + while ((s = br.readLine()) != null) { + String[] r = s.split("\\00"); + Path p1fname = pathPool.path(r[3]); + Nodeid nidP1 = m1.nodeid(p1fname); + Nodeid nidCA = nodeidPool.unify(Nodeid.fromAscii(r[5])); + HgFileRevision p1 = new HgFileRevision(repo, nidP1, p1fname); + HgFileRevision ca; + if (nidCA == nidP1 && r[3].equals(r[4])) { + ca = p1; + } else { + ca = new HgFileRevision(repo, nidCA, pathPool.path(r[4])); } - p2 = new HgFileRevision(repo, nidP2, p2fname); - } else { - // no second parent known. no idea what to do here, assume linear merge, use common ancestor as parent - p2 = ca; + HgFileRevision p2; + if (!wcp2.isNull() || !r[6].equals(r[4])) { + final Path p2fname = pathPool.path(r[6]); + Nodeid nidP2 = m2.nodeid(p2fname); + if (nidP2 == null) { + assert false : "There's not enough information (or I don't know where to look) in merge/state to find out what's the second parent"; + nidP2 = NULL; + } + p2 = new HgFileRevision(repo, nidP2, p2fname); + } else { + // no second parent known. no idea what to do here, assume linear merge, use common ancestor as parent + p2 = ca; + } + final Kind k; + if ("u".equals(r[1])) { + k = Kind.Unresolved; + } else if ("r".equals(r[1])) { + k = Kind.Resolved; + } else { + throw new HgBadStateException(r[1]); + } + Entry e = new Entry(k, pathPool.path(r[0]), p1, p2, ca); + result.add(e); } - final Kind k; - if ("u".equals(r[1])) { - k = Kind.Unresolved; - } else if ("r".equals(r[1])) { - k = Kind.Resolved; - } else { - throw new HgBadStateException(r[1]); - } - Entry e = new Entry(k, pathPool.path(r[0]), p1, p2, ca); - result.add(e); + entries = result.toArray(new Entry[result.size()]); + br.close(); + pathPool.clear(); + } catch (IOException ex) { + throw new HgInvalidControlFileException("Merge state read failed", ex, f); } - entries = result.toArray(new Entry[result.size()]); - br.close(); - pathPool.clear(); } /**
--- a/src/org/tmatesoft/hg/repo/HgRepository.java Tue Nov 22 05:25:57 2011 +0100 +++ b/src/org/tmatesoft/hg/repo/HgRepository.java Thu Nov 24 02:57:03 2011 +0100 @@ -251,16 +251,18 @@ } /** - * @return pair of values, {@link Pair#first()} and {@link Pair#second()} are respective parents, never <code>null</code>. + * @return pair of values, {@link Pair#first()} and {@link Pair#second()} are respective parents, never <code>null</code>. + * @throws HgInvalidControlFileException if attempt to read information about working copy parents from dirstate failed */ - public Pair<Nodeid,Nodeid> getWorkingCopyParents() { + public Pair<Nodeid,Nodeid> getWorkingCopyParents() throws HgInvalidControlFileException { return HgDirstate.readParents(this, new File(repoDir, "dirstate")); } /** * @return name of the branch associated with working directory, never <code>null</code>. + * @throws HgInvalidControlFileException if attempt to read branch name failed. */ - public String getWorkingCopyBranchName() { + public String getWorkingCopyBranchName() throws HgInvalidControlFileException { return HgDirstate.readBranch(this); } @@ -276,9 +278,10 @@ * known, not recursive collection of all nested sub-repositories. * @return list of all known sub-repositories in this repository, or empty list if none found. */ - public List<HgSubrepoLocation> getSubrepositories() { + public List<HgSubrepoLocation> getSubrepositories() throws HgInvalidControlFileException { if (subRepos == null) { subRepos = new SubrepoManager(this); + subRepos.read(); } return subRepos.all(); } @@ -311,7 +314,7 @@ // XXX package-local, unless there are cases when required from outside (guess, working dir/revision walkers may hide dirstate access and no public visibility needed) // XXX consider passing Path pool or factory to produce (shared) Path instead of Strings - /*package-local*/ final HgDirstate loadDirstate(PathPool pathPool) { + /*package-local*/ final HgDirstate loadDirstate(PathPool pathPool) throws HgInvalidControlFileException { PathRewrite canonicalPath = null; if (!isCaseSensitiveFileSystem) { canonicalPath = new PathRewrite() { @@ -321,7 +324,9 @@ } }; } - return new HgDirstate(this, new File(repoDir, "dirstate"), pathPool, canonicalPath); + HgDirstate ds = new HgDirstate(this, new File(repoDir, "dirstate"), pathPool, canonicalPath); + ds.read(); + return ds; } /**
--- a/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java Tue Nov 22 05:25:57 2011 +0100 +++ b/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java Thu Nov 24 02:57:03 2011 +0100 @@ -33,6 +33,7 @@ import org.tmatesoft.hg.core.HgBadStateException; import org.tmatesoft.hg.core.HgDataStreamException; import org.tmatesoft.hg.core.HgException; +import org.tmatesoft.hg.core.HgInvalidControlFileException; import org.tmatesoft.hg.core.Nodeid; import org.tmatesoft.hg.internal.ByteArrayChannel; import org.tmatesoft.hg.internal.Experimental; @@ -100,13 +101,17 @@ * Access to directory state information this collector uses. * @return directory state holder, never <code>null</code> */ - public HgDirstate getDirstate() { + public HgDirstate getDirstate() throws HgInvalidControlFileException { if (dirstate == null) { dirstate = repo.loadDirstate(getPathPool()); } return dirstate; } + private HgDirstate getDirstateImpl() { + return dirstate; + } + private ManifestRevision getManifest(int changelogLocalRev) { assert changelogLocalRev >= 0; ManifestRevision mr; @@ -123,7 +128,7 @@ // WC not necessarily points to TIP, but may be result of update to any previous revision. // In such case, we need to compare local files not to their TIP content, but to specific version at the time of selected revision if (dirstateParentManifest == null) { - Nodeid dirstateParent = getDirstate().parents().first(); + Nodeid dirstateParent = getDirstateImpl().parents().first(); if (dirstateParent.isNull()) { dirstateParentManifest = baseRevisionCollector != null ? baseRevisionCollector.raw(-1) : HgStatusCollector.createEmptyManifestRevision(); } else { @@ -140,6 +145,17 @@ if (HgInternals.wrongLocalRevision(baseRevision) || baseRevision == BAD_REVISION) { throw new IllegalArgumentException(String.valueOf(baseRevision)); } + if (getDirstateImpl() == null) { + // XXX this is a hack to avoid declaring throws for the #walk() at the moment + // once I decide whether to have mediator that collects errors or to use exceptions here + // this hack shall be removed in favor of either severe error in mediator or a re-thrown exception. + try { + getDirstate(); + } catch (HgInvalidControlFileException ex) { + repo.getContext().getLog().error(getClass(), ex, "Can't read dirstate"); + return; + } + } ManifestRevision collect = null; // non null indicates we compare against base revision Set<Path> baseRevFiles = Collections.emptySet(); // files from base revision not affected by status calculation if (baseRevision != TIP && baseRevision != WORKING_COPY) { @@ -164,7 +180,7 @@ final HgIgnore hgIgnore = repo.getIgnore(); repoWalker.reset(); TreeSet<Path> processed = new TreeSet<Path>(); // names of files we handled as they known to Dirstate (not FileIterator) - final HgDirstate ds = getDirstate(); + final HgDirstate ds = getDirstateImpl(); TreeSet<Path> knownEntries = ds.all(); // here just to get dirstate initialized while (repoWalker.hasNext()) { repoWalker.next(); @@ -261,7 +277,7 @@ private void checkLocalStatusAgainstFile(Path fname, FileInfo f, HgStatusInspector inspector) { HgDirstate.Record r; - if ((r = getDirstate().checkNormal(fname)) != null) { + if ((r = getDirstateImpl().checkNormal(fname)) != null) { // either clean or modified final boolean timestampEqual = f.lastModified() == r.modificationTime(), sizeEqual = r.size() == f.length(); if (timestampEqual && sizeEqual) { @@ -287,15 +303,15 @@ inspector.clean(df.getPath()); } } - } else if ((r = getDirstate().checkAdded(fname)) != null) { + } else if ((r = getDirstateImpl().checkAdded(fname)) != null) { if (r.copySource() == null) { inspector.added(fname); } else { inspector.copied(r.copySource(), fname); } - } else if ((r = getDirstate().checkRemoved(fname)) != null) { + } else if ((r = getDirstateImpl().checkRemoved(fname)) != null) { inspector.removed(fname); - } else if ((r = getDirstate().checkMerged(fname)) != null) { + } else if ((r = getDirstateImpl().checkMerged(fname)) != null) { inspector.modified(fname); } } @@ -310,7 +326,7 @@ // normal: added? // added: not known at the time of baseRevision, shall report // merged: was not known, report as added? - if ((r = getDirstate().checkNormal(fname)) != null) { + if ((r = getDirstateImpl().checkNormal(fname)) != null) { try { Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision); if (origin != null) { @@ -321,14 +337,14 @@ ex.printStackTrace(); // FIXME report to a mediator, continue status collection } - } else if ((r = getDirstate().checkAdded(fname)) != null) { + } else if ((r = getDirstateImpl().checkAdded(fname)) != null) { if (r.copySource() != null && baseRevNames.contains(r.copySource())) { baseRevNames.remove(r.copySource()); // XXX surely I shall not report rename source as Removed? inspector.copied(r.copySource(), fname); return; } // fall-through, report as added - } else if (getDirstate().checkRemoved(fname) != null) { + } else if (getDirstateImpl().checkRemoved(fname) != null) { // removed: removed file was not known at the time of baseRevision, and we should not report it as removed return; } @@ -336,7 +352,7 @@ } else { // was known; check whether clean or modified Nodeid nidFromDirstate = getDirstateParentManifest().nodeid(fname); - if ((r = getDirstate().checkNormal(fname)) != null && nid1.equals(nidFromDirstate)) { + if ((r = getDirstateImpl().checkNormal(fname)) != null && nid1.equals(nidFromDirstate)) { // regular file, was the same up to WC initialization. Check if was modified since, and, if not, report right away // same code as in #checkLocalStatusAgainstFile final boolean timestampEqual = f.lastModified() == r.modificationTime(), sizeEqual = r.size() == f.length(); @@ -360,7 +376,7 @@ // or nodeid in dirstate is different, but local change might have brought it back to baseRevision state) // FALL THROUGH } - if (r != null || (r = getDirstate().checkMerged(fname)) != null || (r = getDirstate().checkAdded(fname)) != null) { + if (r != null || (r = getDirstateImpl().checkMerged(fname)) != null || (r = getDirstateImpl().checkAdded(fname)) != null) { // check actual content to see actual changes // when added - seems to be the case of a file added once again, hence need to check if content is different // either clean or modified @@ -371,7 +387,7 @@ inspector.modified(fname); } baseRevNames.remove(fname); // consumed, processed, handled. - } else if (getDirstate().checkRemoved(fname) != null) { + } else if (getDirstateImpl().checkRemoved(fname) != null) { // was known, and now marked as removed, report it right away, do not rely on baseRevNames processing later inspector.removed(fname); baseRevNames.remove(fname); // consumed, processed, handled.
--- a/test/org/tmatesoft/hg/test/TestIgnore.java Tue Nov 22 05:25:57 2011 +0100 +++ b/test/org/tmatesoft/hg/test/TestIgnore.java Thu Nov 24 02:57:03 2011 +0100 @@ -101,7 +101,7 @@ @Test public void testSegmentsRegexMatch() throws Exception { // regex patterns that don't start with explicit ^ are allowed to match anywhere in the string - String s = "syntax:regex\n/\\.git\n^abc\n"; + String s = "syntax:regexp\n/\\.git\n^abc\n"; HgIgnore hgIgnore = HgInternals.newHgIgnore(new StringReader(s)); Path p = Path.create(".git/aa"); errorCollector.assertTrue(p.toString(), !hgIgnore.isIgnored(p));