Mercurial > hg4j
changeset 58:4cfc47bc14cc
Status against local working dir extracted into distinct class. Iterating over local files extracted for ease of os-dependant patching
author | Artem Tikhomirov <tikhomirov.artem@gmail.com> |
---|---|
date | Mon, 17 Jan 2011 23:01:19 +0100 |
parents | 8b0d6f1bd6b4 |
children | b771e94a4f7c |
files | src/com/tmate/hgkit/console/Status.java src/com/tmate/hgkit/fs/FileWalker.java src/com/tmate/hgkit/ll/LocalHgRepo.java src/com/tmate/hgkit/ll/WorkingCopyStatusCollector.java |
diffstat | 4 files changed, 287 insertions(+), 150 deletions(-) [+] |
line wrap: on
line diff
--- a/src/com/tmate/hgkit/console/Status.java Mon Jan 17 05:54:25 2011 +0100 +++ b/src/com/tmate/hgkit/console/Status.java Mon Jan 17 23:01:19 2011 +0100 @@ -15,6 +15,7 @@ import com.tmate.hgkit.ll.LocalHgRepo; import com.tmate.hgkit.ll.Nodeid; import com.tmate.hgkit.ll.StatusCollector; +import com.tmate.hgkit.ll.WorkingCopyStatusCollector; /** * @@ -49,13 +50,21 @@ System.out.println("\n\nTry hg status --change <rev>:"); sc.change(0, dump); System.out.println("\nStatus against working dir:"); - ((LocalHgRepo) hgRepo).statusLocal(TIP, dump); + WorkingCopyStatusCollector wcc = new WorkingCopyStatusCollector(hgRepo, ((LocalHgRepo) hgRepo).createWorkingDirWalker()); + wcc.walk(TIP, dump); System.out.println(); System.out.printf("Manifest of the revision %d:\n", r2); hgRepo.getManifest().walk(r2, r2, new Manifest.Dump()); System.out.println(); System.out.printf("\nStatus of working dir against %d:\n", r2); - ((LocalHgRepo) hgRepo).statusLocal(r2, dump); + r = wcc.status(r2); + sortAndPrint('M', r.getModified()); + sortAndPrint('A', r.getAdded()); + sortAndPrint('R', r.getRemoved()); + sortAndPrint('?', r.getUnknown()); + sortAndPrint('I', r.getIgnored()); + sortAndPrint('C', r.getClean()); + sortAndPrint('!', r.getMissing()); } private static void sortAndPrint(char prefix, List<String> ul) {
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/tmate/hgkit/fs/FileWalker.java Mon Jan 17 23:01:19 2011 +0100 @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2011 Artem Tikhomirov + */ +package com.tmate.hgkit.fs; + +import java.io.File; +import java.util.LinkedList; +import java.util.NoSuchElementException; + +/** + * + * @author artem + */ +public class FileWalker { + + private final File startDir; + private final LinkedList<File> dirQueue; + private final LinkedList<File> fileQueue; + private File nextFile; + private String nextPath; + + // FilenameFilter is used in a non-standard way - first argument, dir, is always startDir, + // while second arg, name, is startDir-relative path to the file in question + public FileWalker(File startDir) { + this.startDir = startDir; + dirQueue = new LinkedList<File>(); + fileQueue = new LinkedList<File>(); + reset(); + } + + public void reset() { + fileQueue.clear(); + dirQueue.clear(); + dirQueue.add(startDir); + nextFile = null; + nextPath = null; + } + + public boolean hasNext() { + return fill(); + } + + public void next() { + if (!fill()) { + throw new NoSuchElementException(); + } + nextFile = fileQueue.removeFirst(); + nextPath = path(nextFile); + } + + public String name() { + return nextPath; + } + + public File file() { + return nextFile; + } + + private String path(File f) { + // XXX LocalHgRepo#normalize + String p = f.getPath().substring(startDir.getPath().length() + 1); + return p.replace('\\', '/').replace("//", "/"); + } + + private File[] listFiles(File f) { + // in case we need to solve os-related file issues (mac with some encodings?) + return f.listFiles(); + } + + // return true when fill added any elements to fileQueue. + private boolean fill() { + while (fileQueue.isEmpty()) { + if (dirQueue.isEmpty()) { + return false; + } + while (!dirQueue.isEmpty()) { + File dir = dirQueue.removeFirst(); + for (File f : listFiles(dir)) { + if (f.isDirectory()) { + if (!".hg".equals(f.getName())) { + dirQueue.addLast(f); + } + } else { + fileQueue.addLast(f); + } + } + break; + } + } + return !fileQueue.isEmpty(); + } +}
--- a/src/com/tmate/hgkit/ll/LocalHgRepo.java Mon Jan 17 05:54:25 2011 +0100 +++ b/src/com/tmate/hgkit/ll/LocalHgRepo.java Mon Jan 17 23:01:19 2011 +0100 @@ -6,7 +6,9 @@ import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; +import java.io.FileFilter; import java.io.FileInputStream; +import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStreamReader; import java.lang.ref.SoftReference; @@ -18,6 +20,7 @@ import java.util.TreeSet; import com.tmate.hgkit.fs.DataAccessProvider; +import com.tmate.hgkit.fs.FileWalker; /** * @author artem @@ -31,7 +34,7 @@ public LocalHgRepo(String repositoryPath) { setInvalid(true); repoLocation = repositoryPath; - dataAccess = null; + dataAccess = null; } public LocalHgRepo(File repositoryRoot) throws IOException { @@ -48,155 +51,10 @@ return repoLocation; } - public void statusLocal(int baseRevision, StatusCollector.Inspector inspector) { - LinkedList<File> folders = new LinkedList<File>(); - final File rootDir = repoDir.getParentFile(); - folders.add(rootDir); - final HgDirstate dirstate = loadDirstate(); - final HgIgnore hgignore = loadIgnore(); - TreeSet<String> knownEntries = dirstate.all(); - final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount(); - StatusCollector.ManifestRevisionInspector collect = null; - Set<String> baseRevFiles = Collections.emptySet(); - if (!isTipBase) { - collect = new StatusCollector.ManifestRevisionInspector(baseRevision, baseRevision); - getManifest().walk(baseRevision, baseRevision, collect); - baseRevFiles = new TreeSet<String>(collect.files(baseRevision)); - } - do { - File d = folders.removeFirst(); - for (File f : d.listFiles()) { - if (f.isDirectory()) { - if (!".hg".equals(f.getName())) { - folders.addLast(f); - } - } else { - // FIXME path relative to rootDir - need more robust approach - String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1)); - if (hgignore.isIgnored(fname)) { - inspector.ignored(fname); - } else { - if (knownEntries.remove(fname)) { - // modified, added, removed, clean - if (collect != null) { // need to check against base revision, not FS file - Nodeid nid1 = collect.nodeid(baseRevision, fname); - String flags = collect.flags(baseRevision, fname); - checkLocalStatusAgainstBaseRevision(baseRevFiles, nid1, flags, fname, f, dirstate, inspector); - baseRevFiles.remove(fname); - } else { - checkLocalStatusAgainstFile(fname, f, dirstate, inspector); - } - } else { - inspector.unknown(fname); - } - } - } - } - } while (!folders.isEmpty()); - if (collect != null) { - for (String r : baseRevFiles) { - inspector.removed(r); - } - } - for (String m : knownEntries) { - // removed from the repository and missing from working dir shall not be reported as 'deleted' - if (dirstate.checkRemoved(m) == null) { - inspector.missing(m); - } - } + public FileWalker createWorkingDirWalker() { + return new FileWalker(repoDir.getParentFile()); } - private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) { - HgDirstate.Record r; - if ((r = dirstate.checkNormal(fname)) != null) { - // either clean or modified - if (f.lastModified() / 1000 == r.time && r.size == f.length()) { - inspector.clean(fname); - } else { - // FIXME check actual content to avoid false modified files - inspector.modified(fname); - } - } else if ((r = dirstate.checkAdded(fname)) != null) { - if (r.name2 == null) { - inspector.added(fname); - } else { - inspector.copied(fname, r.name2); - } - } else if ((r = dirstate.checkRemoved(fname)) != null) { - inspector.removed(fname); - } else if ((r = dirstate.checkMerged(fname)) != null) { - inspector.modified(fname); - } - } - - // XXX refactor checkLocalStatus methods in more OO way - private void checkLocalStatusAgainstBaseRevision(Set<String> baseRevNames, Nodeid nid1, String flags, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) { - // fname is in the dirstate, either Normal, Added, Removed or Merged - HgDirstate.Record r; - if (nid1 == null) { - // normal: added? - // added: not known at the time of baseRevision, shall report - // merged: was not known, report as added? - if ((r = dirstate.checkAdded(fname)) != null) { - if (r.name2 != null && baseRevNames.contains(r.name2)) { - baseRevNames.remove(r.name2); - inspector.copied(r.name2, fname); - return; - } - // fall-through, report as added - } else if (dirstate.checkRemoved(fname) != null) { - // removed: removed file was not known at the time of baseRevision, and we should not report it as removed - return; - } - inspector.added(fname); - } else { - // was known; check whether clean or modified - // when added - seems to be the case of a file added once again, hence need to check if content is different - if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) { - // either clean or modified - HgDataFile fileNode = getFileNode(fname); - final int lengthAtRevision = fileNode.length(nid1); - if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { - inspector.modified(fname); - } else { - // check actual content to see actual changes - // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison - if (areTheSame(f, fileNode.content(nid1))) { - inspector.clean(fname); - } else { - inspector.modified(fname); - } - } - } - // only those left in idsMap after processing are reported as removed - } - - // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest - // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively - // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: - // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest - // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). - // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' - } - - private static String todoGenerateFlags(String fname) { - // FIXME implement - return null; - } - private static boolean areTheSame(File f, byte[] data) { - try { - BufferedInputStream is = new BufferedInputStream(new FileInputStream(f)); - int i = 0; - while (i < data.length && data[i] == is.read()) { - i++; // increment only for successful match, otherwise won't tell last byte in data was the same as read from the stream - } - return i == data.length && is.read() == -1; // although data length is expected to be the same (see caller), check that we reached EOF, no more data left. - } catch (IOException ex) { - ex.printStackTrace(); // log warn - } - return false; - } - // XXX package-local, unless there are cases when required from outside (guess, working dir/revision walkers may hide dirstate access and no public visibility needed) public final HgDirstate loadDirstate() { // XXX may cache in SoftReference if creation is expensive
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/tmate/hgkit/ll/WorkingCopyStatusCollector.java Mon Jan 17 23:01:19 2011 +0100 @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2011 Artem Tikhomirov + */ +package com.tmate.hgkit.ll; + +import static com.tmate.hgkit.ll.HgRepository.TIP; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Collections; +import java.util.Set; +import java.util.TreeSet; + +import com.tmate.hgkit.fs.FileWalker; + +/** + * + * @author artem + */ +public class WorkingCopyStatusCollector { + + private final HgRepository repo; + private final FileWalker repoWalker; + + public WorkingCopyStatusCollector(HgRepository hgRepo, FileWalker hgRepoWalker) { + this.repo = hgRepo; + this.repoWalker = hgRepoWalker; + } + + public void walk(int baseRevision, StatusCollector.Inspector inspector) { + final HgIgnore hgIgnore = ((LocalHgRepo) repo).loadIgnore(); // FIXME hack + final HgDirstate dirstate = ((LocalHgRepo) repo).loadDirstate(); // FIXME hack + TreeSet<String> knownEntries = dirstate.all(); + final boolean isTipBase = baseRevision == TIP || baseRevision == repo.getManifest().getRevisionCount(); + StatusCollector.ManifestRevisionInspector collect = null; + Set<String> baseRevFiles = Collections.emptySet(); + if (!isTipBase) { + collect = new StatusCollector.ManifestRevisionInspector(baseRevision, baseRevision); + repo.getManifest().walk(baseRevision, baseRevision, collect); + baseRevFiles = new TreeSet<String>(collect.files(baseRevision)); + } + repoWalker.reset(); + while (repoWalker.hasNext()) { + repoWalker.next(); + String fname = repoWalker.name(); + File f = repoWalker.file(); + if (hgIgnore.isIgnored(fname)) { + inspector.ignored(fname); + } else if (knownEntries.remove(fname)) { + // modified, added, removed, clean + if (collect != null) { // need to check against base revision, not FS file + Nodeid nid1 = collect.nodeid(baseRevision, fname); + String flags = collect.flags(baseRevision, fname); + checkLocalStatusAgainstBaseRevision(baseRevFiles, nid1, flags, fname, f, dirstate, inspector); + baseRevFiles.remove(fname); + } else { + checkLocalStatusAgainstFile(fname, f, dirstate, inspector); + } + } else { + inspector.unknown(fname); + } + } + if (collect != null) { + for (String r : baseRevFiles) { + inspector.removed(r); + } + } + for (String m : knownEntries) { + // removed from the repository and missing from working dir shall not be reported as 'deleted' + if (dirstate.checkRemoved(m) == null) { + inspector.missing(m); + } + } + } + + public StatusCollector.Record status(int baseRevision) { + StatusCollector.Record rv = new StatusCollector.Record(); + walk(baseRevision, rv); + return rv; + } + + //******************************************** + + + private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) { + HgDirstate.Record r; + if ((r = dirstate.checkNormal(fname)) != null) { + // either clean or modified + if (f.lastModified() / 1000 == r.time && r.size == f.length()) { + inspector.clean(fname); + } else { + // FIXME check actual content to avoid false modified files + inspector.modified(fname); + } + } else if ((r = dirstate.checkAdded(fname)) != null) { + if (r.name2 == null) { + inspector.added(fname); + } else { + inspector.copied(fname, r.name2); + } + } else if ((r = dirstate.checkRemoved(fname)) != null) { + inspector.removed(fname); + } else if ((r = dirstate.checkMerged(fname)) != null) { + inspector.modified(fname); + } + } + + // XXX refactor checkLocalStatus methods in more OO way + private void checkLocalStatusAgainstBaseRevision(Set<String> baseRevNames, Nodeid nid1, String flags, String fname, File f, HgDirstate dirstate, StatusCollector.Inspector inspector) { + // fname is in the dirstate, either Normal, Added, Removed or Merged + HgDirstate.Record r; + if (nid1 == null) { + // normal: added? + // added: not known at the time of baseRevision, shall report + // merged: was not known, report as added? + if ((r = dirstate.checkAdded(fname)) != null) { + if (r.name2 != null && baseRevNames.contains(r.name2)) { + baseRevNames.remove(r.name2); + inspector.copied(r.name2, fname); + return; + } + // fall-through, report as added + } else if (dirstate.checkRemoved(fname) != null) { + // removed: removed file was not known at the time of baseRevision, and we should not report it as removed + return; + } + inspector.added(fname); + } else { + // was known; check whether clean or modified + // when added - seems to be the case of a file added once again, hence need to check if content is different + if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) { + // either clean or modified + HgDataFile fileNode = repo.getFileNode(fname); + final int lengthAtRevision = fileNode.length(nid1); + if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { + inspector.modified(fname); + } else { + // check actual content to see actual changes + // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison + if (areTheSame(f, fileNode.content(nid1))) { + inspector.clean(fname); + } else { + inspector.modified(fname); + } + } + } + // only those left in idsMap after processing are reported as removed + } + + // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest + // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively + // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: + // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest + // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). + // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' + } + + private static String todoGenerateFlags(String fname) { + // FIXME implement + return null; + } + private static boolean areTheSame(File f, byte[] data) { + try { + BufferedInputStream is = new BufferedInputStream(new FileInputStream(f)); + int i = 0; + while (i < data.length && data[i] == is.read()) { + i++; // increment only for successful match, otherwise won't tell last byte in data was the same as read from the stream + } + return i == data.length && is.read() == -1; // although data length is expected to be the same (see caller), check that we reached EOF, no more data left. + } catch (IOException ex) { + ex.printStackTrace(); // log warn + } + return false; + } + +}