Mercurial > jhg
changeset 157:d5268ca7715b
Merged branch wrap-data-access into default for resource-friendly data access. Updated API to promote that friendliness to clients (channels, not byte[]). More exceptions
line wrap: on
line diff
--- a/.classpath Sun Jan 16 01:40:38 2011 +0100 +++ b/.classpath Wed Mar 09 05:22:17 2011 +0100 @@ -1,6 +1,9 @@ -<?xml version="1.0" encoding="UTF-8"?> -<classpath> - <classpathentry kind="src" path="src"/> - <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/> - <classpathentry kind="output" path="bin"/> -</classpath> +<?xml version="1.0" encoding="UTF-8"?> +<classpath> + <classpathentry kind="src" path="src"/> + <classpathentry kind="src" path="test"/> + <classpathentry kind="src" path="cmdline"/> + <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/> + <classpathentry kind="lib" path="lib/junit-4.8.2.jar" sourcepath="lib/junit-4.8.2-src.jar"/> + <classpathentry kind="output" path="bin"/> +</classpath>
--- a/.hgignore Sun Jan 16 01:40:38 2011 +0100 +++ b/.hgignore Wed Mar 09 05:22:17 2011 +0100 @@ -1,4 +1,7 @@ syntax:glob bin src/Extras.java -hgkit.jar +hg4j*.jar +hg4j-tests*.jar +hg4j-console*.jar +TEST-*.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.settings/org.eclipse.core.resources.prefs Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,3 @@ +#Thu Feb 24 18:49:36 CET 2011 +eclipse.preferences.version=1 +encoding/<project>=UTF-8
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.settings/org.eclipse.core.runtime.prefs Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,3 @@ +#Thu Feb 24 18:49:36 CET 2011 +eclipse.preferences.version=1 +line.separator=\n
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/COPYING Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,14 @@ +Copyright (C) 2010-2011 TMate Software Ltd + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +For information on how to redistribute this software under +the terms of a license other than GNU General Public License +contact TMate Software at support@hg4j.com \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/TODO Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,66 @@ +Read-only support, version 1.0 +============================== +Committed: +* store+fncache, RevlogNG (i.e. no support for older store formats) + +* hg log + + user, branch, limit + - date, + + filename + + filename and follow history + + +* hg manifest (aka ls) + + +* hg status + + copies for [revision..revision] and for [revision..working dir] + + path or anything meaningful instead of Strings + + matchers + +* hg cat + + CatCommand. File, revision. + - Cat command line client + ++ hgignore + + glob + + pattern + ++ Tests with JUnit + - allow to specify repo location (system property) + - keep a .zip of repo along with tests + +* tags + * Tags are read and can be queried (cmdline Log does) + +* keywords + + filter with context (HgRepository + Path + Direction (to/from repo) + - filters shall have weight (to allow certain filter come first). Would need that once FilterFactories are pluggable + +* newlines + + \r\n <==> \n + - force translation if inconsistent (now either fails or does nothing) + +* API + - CommandContext + - Data access - not bytes, but ByteChannel + - HgRepository constants (TIP, BAD, WC) to HgRevisions enum + +Proposed: +- LogCommand.revision(int... rev)+ to walk selected revisions only (list->sort(array) on execute, binary search) +- LogCommand.before(Date date) and .after() +- LogCommand.match() to specify pattern, no selected file()s only? +* RepositoryFacade and CommandContext +- hgignore: read extra ignore files from config file (ui.ignore) + + +Read-only support, version 1.1 +============================== +Committed: +* http, https and ssh connections: + +* incoming + +* outgoing + +- clone remote repo
--- a/build.xml Sun Jan 16 01:40:38 2011 +0100 +++ b/build.xml Wed Mar 09 05:22:17 2011 +0100 @@ -1,30 +1,115 @@ <?xml version="1.0" encoding="UTF-8"?> -<project name="hgkit" default="samples"> - <description> - description +<!-- + Copyright (c) 2010-2011 TMate Software Ltd + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; version 2 of the License. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + For information on how to redistribute this software under + the terms of a license other than GNU General Public License + contact TMate Software at support@hg4j.com +--> +<project name="hg4j" default="samples"> + <description> + Build, test and showcase hg4j </description> - <target name="samples" depends="build"> - - <echo message="History of a specific file(s)"/> - <java classpath="hgkit.jar" classname="com.tmate.hgkit.console.Log"> - <arg line="design.txt .classpath src/com/tmate/hgkit/ll/LocalHgRepo.java"/> - </java> - - <echo message="Whole repo log"/> - <java classpath="hgkit.jar" classname="com.tmate.hgkit.console.Log"/> - - <echo message="Content of a file"/> - <java classpath="hgkit.jar" classname="com.tmate.hgkit.console.Cat"> - <arg line="src/com/tmate/hgkit/ll/Revlog.java"/> - </java> + <property name="junit.jar" value="lib/junit-4.8.2.jar" /> + <property name="ver.qualifier" value=".rc1" /> + <property name="version.lib" value="0.0.9" /> + <property name="version.jar" value="${version.lib}${ver.qualifier}" /> - </target> + <property name="hg4j.jar" value="hg4j_${version.jar}.jar" /> + <property name="hg4j-tests.jar" value="hg4j-tests_${version.jar}.jar" /> + <property name="hg4j-console.jar" value="hg4j-console_${version.jar}.jar" /> - <target name="build"> - <mkdir dir="bin"/> - <javac srcdir="src" destdir="bin"/> - <jar destfile="hgkit.jar" basedir="bin"/> - </target> + <target name="samples" depends="build-cmdline"> + <path id="path.cmdline" path="${hg4j.jar};${hg4j-console.jar}" /> + + <echo message="History of a specific file(s)" /> + <java classpathref="path.cmdline" classname="org.tmatesoft.hg.console.Log"> + <arg line="design.txt .classpath src/org/tmatesoft/hg/core/HgRepoFacade.java" /> + </java> + + <echo message="${line.separator}>>>Latest commit" /> + <java classpathref="path.cmdline" classname="org.tmatesoft.hg.console.Log"> + <arg line="--debug --limit 1" /> + </java> + + <echo message="${line.separator}>>>Content of a file" /> + <java classpathref="path.cmdline" classname="org.tmatesoft.hg.console.Cat"> + <arg line="src/org/tmatesoft/hg/core/HgRepoFacade.java --rev 1" /> + </java> + + <echo message="${line.separator}>>>Status between two revisions" /> + <java classpathref="path.cmdline" classname="org.tmatesoft.hg.console.Status"> + <arg line="--rev 140 --rev 142" /> + </java> + + <echo message="${line.separator}>>>Status, working copy, all" /> + <java classpathref="path.cmdline" classname="org.tmatesoft.hg.console.Status"> + <arg line="-A" /> + </java> + + <echo message="${line.separator}>>>Manifest" /> + <java classpathref="path.cmdline" classname="org.tmatesoft.hg.console.Manifest"> + <arg line="--debug" /> + </java> + </target> + + <target name="tests" depends="build-tests"> + <property name="test-repos-root" value="${java.io.tmpdir}/hg4j-tests/"/> + <delete dir="${test-repos-root}" quiet="yes"/> + <unjar src="test-data/test-repos.jar" dest="${test-repos-root}"/> + <junit> + <classpath path="${hg4j.jar};${hg4j-tests.jar};${junit.jar}" /> + <formatter type="xml" /> + <formatter type="plain" usefile="no" /> + <sysproperty key="hg4j.tests.repos" value="${test-repos-root}"/> + <test name="org.tmatesoft.hg.test.TestHistory" /> + <test name="org.tmatesoft.hg.test.TestManifest" /> + <test name="org.tmatesoft.hg.test.TestStatus" /> + <test name="org.tmatesoft.hg.test.TestStorePath" /> + <test name="org.tmatesoft.hg.test.TestByteChannel" /> + </junit> + </target> + + <target name="build"> + <mkdir dir="bin" /> + <javac srcdir="src" destdir="bin" /> + <jar destfile="${hg4j.jar}"> + <fileset dir="bin/"> + <include name="org/tmatesoft/hg/core/**" /> + <include name="org/tmatesoft/hg/util/**" /> + <include name="org/tmatesoft/hg/repo/**" /> + <include name="org/tmatesoft/hg/internal/**" /> + </fileset> + <fileset file="COPYING"/> + </jar> + </target> + + <target name="build-tests" depends="build"> + <mkdir dir="bin" /> + <javac srcdir="test" destdir="bin" /> + <jar destfile="${hg4j-tests.jar}"> + <fileset dir="bin" includes="org/tmatesoft/hg/test/**"/> + <fileset file="COPYING"/> + </jar> + </target> + + <target name="build-cmdline" depends="build"> + <mkdir dir="bin" /> + <javac srcdir="cmdline" destdir="bin" /> + <jar destfile="${hg4j-console.jar}"> + <fileset dir="bin/" includes="org/tmatesoft/hg/console/**"/> + <fileset file="COPYING"/> + </jar> + </target> </project>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Bundle.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import java.io.File; + +import org.tmatesoft.hg.internal.DataAccessProvider; +import org.tmatesoft.hg.repo.HgBundle; +import org.tmatesoft.hg.repo.HgRepository; + + +/** + * WORK IN PROGRESS, DO NOT USE + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Bundle { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + File bundleFile = new File("/temp/hg/hg-bundle-a78c980749e3.tmp"); + DataAccessProvider dap = new DataAccessProvider(); + HgBundle hgBundle = new HgBundle(dap, bundleFile); +// hgBundle.dump(); + hgBundle.changes(hgRepo); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Cat.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; + +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.ByteChannel; + + +/** + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Cat { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + int rev = cmdLineOpts.getSingleInt(TIP, "-r", "--rev"); + OutputStreamChannel out = new OutputStreamChannel(System.out); + for (String fname : cmdLineOpts.getList("")) { + System.out.println(fname); + HgDataFile fn = hgRepo.getFileNode(fname); + if (fn.exists()) { + fn.contentWithFilters(rev, out); + System.out.println(); + } else { + System.out.printf("%s not found!\n", fname); + } + } + } + + private static class OutputStreamChannel implements ByteChannel { + + private final OutputStream stream; + + public OutputStreamChannel(OutputStream out) { + stream = out; + } + + public int write(ByteBuffer buffer) throws IOException { + int count = buffer.remaining(); + while(buffer.hasRemaining()) { + stream.write(buffer.get()); + } + return count; + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Incoming.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,200 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.repo.HgChangelog; +import org.tmatesoft.hg.repo.HgRepository; + + +/** + * WORK IN PROGRESS, DO NOT USE + * hg in counterpart + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Incoming { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + // in fact, all we need from changelog is set of all nodeids. However, since ParentWalker reuses same Nodeids, it's not too expensive + // to reuse it here, XXX although later this may need to be refactored + final HgChangelog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker(); + pw.init(); + // + HashSet<Nodeid> base = new HashSet<Nodeid>(); + HashSet<Nodeid> unknownRemoteHeads = new HashSet<Nodeid>(); + // imagine empty repository - any nodeid from remote heads would be unknown + unknownRemoteHeads.add(Nodeid.fromAscii("382cfe9463db0484a14136e4b38407419525f0c0".getBytes(), 0, 40)); + // + LinkedList<RemoteBranch> remoteBranches = new LinkedList<RemoteBranch>(); + remoteBranches(unknownRemoteHeads, remoteBranches); + // + HashSet<Nodeid> visited = new HashSet<Nodeid>(); + HashSet<RemoteBranch> processed = new HashSet<RemoteBranch>(); + LinkedList<Nodeid[]> toScan = new LinkedList<Nodeid[]>(); + LinkedHashSet<Nodeid> toFetch = new LinkedHashSet<Nodeid>(); + // next one seems to track heads we've asked (or plan to ask) remote.branches for + HashSet<Nodeid> unknownHeads /*req*/ = new HashSet<Nodeid>(unknownRemoteHeads); + while (!remoteBranches.isEmpty()) { + LinkedList<Nodeid> toQueryRemote = new LinkedList<Nodeid>(); + while (!remoteBranches.isEmpty()) { + RemoteBranch next = remoteBranches.removeFirst(); + if (visited.contains(next.head) || processed.contains(next)) { + continue; + } + if (Nodeid.NULL.equals(next.head)) { + // it's discovery.py that expects next.head to be nullid here, I can't imagine how this may happen, hence this exception + throw new IllegalStateException("I wonder if null if may ever get here with remote branches"); + } else if (pw.knownNode(next.root)) { + // root of the remote change is known locally, analyze to find exact missing changesets + toScan.addLast(new Nodeid[] { next.head, next.root }); + processed.add(next); + } else { + if (!visited.contains(next.root) && !toFetch.contains(next.root)) { + // if parents are locally known, this is new branch (sequence of changes) (sequence sprang out of known parents) + if ((next.p1 == null || pw.knownNode(next.p1)) && (next.p2 == null || pw.knownNode(next.p2))) { + toFetch.add(next.root); + } + // XXX perhaps, may combine this parent processing below (I don't understand what this code is exactly about) + if (pw.knownNode(next.p1)) { + base.add(next.p1); + } + if (pw.knownNode(next.p2)) { + base.add(next.p2); + } + } + if (next.p1 != null && !pw.knownNode(next.p1) && !unknownHeads.contains(next.p1)) { + toQueryRemote.add(next.p1); + unknownHeads.add(next.p1); + } + if (next.p2 != null && !pw.knownNode(next.p2) && !unknownHeads.contains(next.p2)) { + toQueryRemote.add(next.p2); + unknownHeads.add(next.p2); + } + } + visited.add(next.head); + } + if (!toQueryRemote.isEmpty()) { + // discovery.py in fact does this in batches of 10 revisions a time. + // however, this slicing may be done in remoteBranches call instead (if needed) + remoteBranches(toQueryRemote, remoteBranches); + } + } + while (!toScan.isEmpty()) { + Nodeid[] head_root = toScan.removeFirst(); + List<Nodeid> nodesBetween = remoteBetween(head_root[0], head_root[1], new LinkedList<Nodeid>()); + nodesBetween.add(head_root[1]); + int x = 1; + Nodeid p = head_root[0]; + for (Nodeid i : nodesBetween) { + System.out.println("narrowing " + x + ":" + nodesBetween.size() + " " + i.shortNotation()); + if (pw.knownNode(i)) { + if (x <= 2) { + toFetch.add(p); + base.add(i); + } else { + // XXX original discovery.py collects new elements to scan separately + // likely to "batch" calls to server + System.out.println("narrowed branch search to " + p.shortNotation() + ":" + i.shortNotation()); + toScan.addLast(new Nodeid[] { p, i }); + } + break; + } + x = x << 1; + p = i; + } + } + for (Nodeid n : toFetch) { + if (pw.knownNode(n)) { + System.out.println("Erroneous to fetch:" + n); + } else { + System.out.println(n); + } + } + + } + + static final class RemoteBranch { + public Nodeid head, root, p1, p2; + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (false == obj instanceof RemoteBranch) { + return false; + } + RemoteBranch o = (RemoteBranch) obj; + return head.equals(o.head) && root.equals(o.root) && (p1 == null && o.p1 == null || p1.equals(o.p1)) && (p2 == null && o.p2 == null || p2.equals(o.p2)); + } + } + + private static void remoteBranches(Collection<Nodeid> unknownRemoteHeads, List<RemoteBranch> remoteBranches) { + // discovery.findcommonincoming: + // unknown = remote.branches(remote.heads); + // sent: cmd=branches&roots=d6d2a630f4a6d670c90a5ca909150f2b426ec88f+ + // received: d6d2a630f4a6d670c90a5ca909150f2b426ec88f dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 + // head, root, first parent, second parent + // + // TODO implement this with remote access + // + RemoteBranch rb = new RemoteBranch(); + rb.head = unknownRemoteHeads.iterator().next(); + rb.root = Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40); + remoteBranches.add(rb); + } + + private static List<Nodeid> remoteBetween(Nodeid nodeid1, Nodeid nodeid2, List<Nodeid> list) { + // sent: cmd=between&pairs=d6d2a630f4a6d670c90a5ca909150f2b426ec88f-dbd663faec1f0175619cf7668bddc6350548b8d6 + // received: a78c980749e3ccebb47138b547e9b644a22797a9 286d221f6c28cbfce25ea314e1f46a23b7f979d3 fc265ddeab262ff5c34b4cf4e2522d8d41f1f05b a3576694a4d1edaa681cab15b89d6b556b02aff4 + // 1st, 2nd, fourth and eights of total 8 changes between rev9 and rev0 + // + // + // a78c980749e3ccebb47138b547e9b644a22797a9 286d221f6c28cbfce25ea314e1f46a23b7f979d3 fc265ddeab262ff5c34b4cf4e2522d8d41f1f05b a3576694a4d1edaa681cab15b89d6b556b02aff4 + //d6d2a630f4a6d670c90a5ca909150f2b426ec88f a78c980749e3ccebb47138b547e9b644a22797a9 5abe5af181bd6a6d3e94c378376c901f0f80da50 08db726a0fb7914ac9d27ba26dc8bbf6385a0554 + + // TODO implement with remote access + String response = null; + if (nodeid1.equals(Nodeid.fromAscii("382cfe9463db0484a14136e4b38407419525f0c0".getBytes(), 0, 40)) && nodeid2.equals(Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40))) { + response = "d6d2a630f4a6d670c90a5ca909150f2b426ec88f a78c980749e3ccebb47138b547e9b644a22797a9 5abe5af181bd6a6d3e94c378376c901f0f80da50 08db726a0fb7914ac9d27ba26dc8bbf6385a0554"; + } else if (nodeid1.equals(Nodeid.fromAscii("a78c980749e3ccebb47138b547e9b644a22797a9".getBytes(), 0, 40)) && nodeid2.equals(Nodeid.fromAscii("5abe5af181bd6a6d3e94c378376c901f0f80da50".getBytes(), 0, 40))) { + response = "286d221f6c28cbfce25ea314e1f46a23b7f979d3"; + } + if (response == null) { + throw HgRepository.notImplemented(); + } + for (String s : response.split(" ")) { + list.add(Nodeid.fromAscii(s.getBytes(), 0, 40)); + } + return list; + } + +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Log.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,240 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import java.util.Formatter; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.HgChangeset; +import org.tmatesoft.hg.core.HgLogCommand; +import org.tmatesoft.hg.core.HgLogCommand.FileRevision; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; + + +/** + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Log { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + final Dump dump = new Dump(hgRepo); + dump.complete = cmdLineOpts.getBoolean("--debug"); + dump.verbose = cmdLineOpts.getBoolean("-v", "--verbose"); + dump.reverseOrder = true; + HgLogCommand cmd = new HgLogCommand(hgRepo); + for (String u : cmdLineOpts.getList("-u", "--user")) { + cmd.user(u); + } + for (String b : cmdLineOpts.getList("-b", "--branches")) { + cmd.branch(b); + } + int limit = cmdLineOpts.getSingleInt(-1, "-l", "--limit"); + if (limit != -1) { + cmd.limit(limit); + } + List<String> files = cmdLineOpts.getList(""); + if (files.isEmpty()) { + if (limit == -1) { + // no revisions and no limit + cmd.execute(dump); + } else { + // in fact, external (to dump inspector) --limit processing yelds incorrect results when other args + // e.g. -u or -b are used (i.e. with -u shall give <limit> csets with user, not check last <limit> csets for user + int[] r = new int[] { 0, hgRepo.getChangelog().getRevisionCount() }; + if (fixRange(r, dump.reverseOrder, limit) == 0) { + System.out.println("No changes"); + return; + } + cmd.range(r[0], r[1]).execute(dump); + } + dump.complete(); + } else { + for (String fname : files) { + HgDataFile f1 = hgRepo.getFileNode(fname); + System.out.println("History of the file: " + f1.getPath()); + if (limit == -1) { + cmd.file(f1.getPath(), true).execute(dump); + } else { + int[] r = new int[] { 0, f1.getRevisionCount() }; + if (fixRange(r, dump.reverseOrder, limit) == 0) { + System.out.println("No changes"); + continue; + } + cmd.range(r[0], r[1]).file(f1.getPath(), true).execute(dump); + } + dump.complete(); + } + } + // + // XXX new ChangelogWalker().setFile("hello.c").setRevisionRange(1, 4).accept(new Visitor); + } + + private static int fixRange(int[] start_end, boolean reverse, int limit) { + assert start_end.length == 2; + if (limit < start_end[1]) { + if (reverse) { + // adjust left boundary of the range + start_end[0] = start_end[1] - limit; + } else { + start_end[1] = limit; // adjust right boundary + } + } + int rv = start_end[1] - start_end[0]; + start_end[1]--; // range needs index, not length + return rv; + } + + private static final class Dump implements HgLogCommand.FileHistoryHandler { + // params + boolean complete = false; // roughly --debug + boolean reverseOrder = false; + boolean verbose = true; // roughly -v + // own + private LinkedList<String> l = new LinkedList<String>(); + private final HgRepository repo; +// private HgChangelog.ParentWalker changelogWalker; + private final int tip ; + + public Dump(HgRepository hgRepo) { + repo = hgRepo; + tip = hgRepo.getChangelog().getLastRevision(); + } + + public void copy(FileRevision from, FileRevision to) { + System.out.printf("Got notified that %s(%s) was originally known as %s(%s)\n", to.getPath(), to.getRevision(), from.getPath(), from.getRevision()); + } + + public void next(HgChangeset changeset) { + final String s = print(changeset); + if (reverseOrder) { + // XXX in fact, need to insert s into l according to changeset.getRevision() + // because when file history is being followed, revisions of the original file (with smaller revNumber) + // are reported *after* revisions of present file and with addFirst appear above them + l.addFirst(s); + } else { + System.out.print(s); + } + } + + public void complete() { + if (!reverseOrder) { + return; + } + for (String s : l) { + System.out.print(s); + } + l.clear(); +// changelogWalker = null; + } + + private String print(HgChangeset cset) { + StringBuilder sb = new StringBuilder(); + Formatter f = new Formatter(sb); + final Nodeid csetNodeid = cset.getNodeid(); + f.format("changeset: %d:%s\n", cset.getRevision(), complete ? csetNodeid : csetNodeid.shortNotation()); + if (cset.getRevision() == tip || repo.getTags().isTagged(csetNodeid)) { + + sb.append("tag: "); + for (String t : repo.getTags().tags(csetNodeid)) { + sb.append(t); + sb.append(' '); + } + if (cset.getRevision() == tip) { + sb.append("tip"); + } + sb.append('\n'); + } + if (complete) { +// if (changelogWalker == null) { +// changelogWalker = repo.getChangelog().new ParentWalker(); +// changelogWalker.init(); +// } +// Nodeid p1 = changelogWalker.safeFirstParent(csetNodeid); +// Nodeid p2 = changelogWalker.safeSecondParent(csetNodeid); + Nodeid p1 = cset.getFirstParentRevision(); + Nodeid p2 = cset.getSecondParentRevision(); + int p1x = p1 == Nodeid.NULL ? -1 : repo.getChangelog().getLocalRevision(p1); + int p2x = p2 == Nodeid.NULL ? -1 : repo.getChangelog().getLocalRevision(p2); + int mx = repo.getManifest().getLocalRevision(cset.getManifestRevision()); + f.format("parent: %d:%s\nparent: %d:%s\nmanifest: %d:%s\n", p1x, p1, p2x, p2, mx, cset.getManifestRevision()); + } + f.format("user: %s\ndate: %s\n", cset.getUser(), cset.getDate()); + if (!complete && verbose) { + final List<Path> files = cset.getAffectedFiles(); + sb.append("files: "); + for (Path s : files) { + sb.append(' '); + sb.append(s); + } + sb.append('\n'); + } + if (complete) { + if (!cset.getModifiedFiles().isEmpty()) { + sb.append("files: "); + for (FileRevision s : cset.getModifiedFiles()) { + sb.append(' '); + sb.append(s.getPath()); + } + sb.append('\n'); + } + if (!cset.getAddedFiles().isEmpty()) { + sb.append("files+: "); + for (FileRevision s : cset.getAddedFiles()) { + sb.append(' '); + sb.append(s.getPath()); + } + sb.append('\n'); + } + if (!cset.getRemovedFiles().isEmpty()) { + sb.append("files-: "); + for (Path s : cset.getRemovedFiles()) { + sb.append(' '); + sb.append(s); + } + sb.append('\n'); + } +// if (cset.extras() != null) { +// sb.append("extra: "); +// for (Map.Entry<String, String> e : cset.extras().entrySet()) { +// sb.append(' '); +// sb.append(e.getKey()); +// sb.append('='); +// sb.append(e.getValue()); +// } +// sb.append('\n'); +// } + } + if (complete || verbose) { + f.format("description:\n%s\n\n", cset.getComment()); + } else { + f.format("summary: %s\n\n", cset.getComment()); + } + return sb.toString(); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Main.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,279 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.tmatesoft.hg.core.HgLogCommand.FileRevision; +import org.tmatesoft.hg.core.HgManifestCommand; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.ByteArrayChannel; +import org.tmatesoft.hg.internal.DigestHelper; +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgInternals; +import org.tmatesoft.hg.repo.HgManifest; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.HgStatusCollector; +import org.tmatesoft.hg.repo.HgStatusInspector; +import org.tmatesoft.hg.repo.HgWorkingCopyStatusCollector; +import org.tmatesoft.hg.util.Path; + +/** + * Various debug dumps. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Main { + + private Options cmdLineOpts; + private HgRepository hgRepo; + + public Main(String[] args) throws Exception { + cmdLineOpts = Options.parse(args); + hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + System.out.println("REPO:" + hgRepo.getLocation()); + } + + public static void main(String[] args) throws Exception { + Main m = new Main(args); + m.inflaterLengthException(); +// m.dumpIgnored(); +// m.dumpDirstate(); +// m.testStatusInternals(); +// m.catCompleteHistory(); +// m.dumpCompleteManifestLow(); +// m.dumpCompleteManifestHigh(); +// m.bunchOfTests(); + } + + private void inflaterLengthException() throws Exception { + HgDataFile f1 = hgRepo.getFileNode("src/com/tmate/hgkit/console/Bundle.java"); + HgDataFile f2 = hgRepo.getFileNode("test-repos.jar"); + System.out.println(f1.isCopy()); + System.out.println(f2.isCopy()); + ByteArrayChannel bac = new ByteArrayChannel(); + f1.content(0, bac); + System.out.println(bac.toArray().length); + } + + private void dumpIgnored() { + HgInternals debug = new HgInternals(hgRepo); + String[] toCheck = new String[] {"design.txt", "src/com/tmate/hgkit/ll/Changelog.java", "src/Extras.java", "bin/com/tmate/hgkit/ll/Changelog.class"}; + boolean[] checkResult = debug.checkIgnored(toCheck); + for (int i = 0; i < toCheck.length; i++) { + System.out.println("Ignored " + toCheck[i] + ": " + checkResult[i]); + } + } + + private void dumpDirstate() { + new HgInternals(hgRepo).dumpDirstate(); + } + + + private void catCompleteHistory() throws Exception { + DigestHelper dh = new DigestHelper(); + for (String fname : cmdLineOpts.getList("")) { + System.out.println(fname); + HgDataFile fn = hgRepo.getFileNode(fname); + if (fn.exists()) { + int total = fn.getRevisionCount(); + System.out.printf("Total revisions: %d\n", total); + for (int i = 0; i < total; i++) { + ByteArrayChannel sink = new ByteArrayChannel(); + fn.content(i, sink); + System.out.println("==========>"); + byte[] content = sink.toArray(); + System.out.println(new String(content)); + int[] parentRevisions = new int[2]; + byte[] parent1 = new byte[20]; + byte[] parent2 = new byte[20]; + fn.parents(i, parentRevisions, parent1, parent2); + System.out.println(dh.sha1(parent1, parent2, content).asHexString()); + } + } else { + System.out.println(">>>Not found!"); + } + } + } + + private void dumpCompleteManifestLow() { + hgRepo.getManifest().walk(0, TIP, new ManifestDump()); + } + + public static final class ManifestDump implements HgManifest.Inspector { + public boolean begin(int revision, Nodeid nid) { + System.out.printf("%d : %s\n", revision, nid); + return true; + } + + public boolean next(Nodeid nid, String fname, String flags) { + System.out.println(nid + "\t" + fname + "\t\t" + flags); + return true; + } + + public boolean end(int revision) { + System.out.println(); + return true; + } + } + + private void dumpCompleteManifestHigh() { + new HgManifestCommand(hgRepo).dirs(true).execute(new HgManifestCommand.Handler() { + + public void begin(Nodeid manifestRevision) { + System.out.println(">> " + manifestRevision); + } + public void dir(Path p) { + System.out.println(p); + } + public void file(FileRevision fileRevision) { + System.out.print(fileRevision.getRevision());; + System.out.print(" "); + System.out.println(fileRevision.getPath()); + } + + public void end(Nodeid manifestRevision) { + System.out.println(); + } + }); + } + + private void bunchOfTests() throws Exception { + HgInternals debug = new HgInternals(hgRepo); + debug.dumpDirstate(); + final StatusDump dump = new StatusDump(); + dump.showIgnored = false; + dump.showClean = false; + HgStatusCollector sc = new HgStatusCollector(hgRepo); + final int r1 = 0, r2 = 3; + System.out.printf("Status for changes between revision %d and %d:\n", r1, r2); + sc.walk(r1, r2, dump); + // + System.out.println("\n\nSame, but sorted in the way hg status does:"); + HgStatusCollector.Record r = sc.status(r1, r2); + sortAndPrint('M', r.getModified(), null); + sortAndPrint('A', r.getAdded(), null); + sortAndPrint('R', r.getRemoved(), null); + // + System.out.println("\n\nTry hg status --change <rev>:"); + sc.change(0, dump); + System.out.println("\nStatus against working dir:"); + HgWorkingCopyStatusCollector wcc = new HgWorkingCopyStatusCollector(hgRepo); + wcc.walk(TIP, dump); + System.out.println(); + System.out.printf("Manifest of the revision %d:\n", r2); + hgRepo.getManifest().walk(r2, r2, new ManifestDump()); + System.out.println(); + System.out.printf("\nStatus of working dir against %d:\n", r2); + r = wcc.status(r2); + sortAndPrint('M', r.getModified(), null); + sortAndPrint('A', r.getAdded(), r.getCopied()); + sortAndPrint('R', r.getRemoved(), null); + sortAndPrint('?', r.getUnknown(), null); + sortAndPrint('I', r.getIgnored(), null); + sortAndPrint('C', r.getClean(), null); + sortAndPrint('!', r.getMissing(), null); + } + + private void sortAndPrint(char prefix, List<Path> ul, Map<Path, Path> copies) { + ArrayList<Path> sortList = new ArrayList<Path>(ul); + Collections.sort(sortList); + for (Path s : sortList) { + System.out.print(prefix); + System.out.print(' '); + System.out.println(s); + if (copies != null && copies.containsKey(s)) { + System.out.println(" " + copies.get(s)); + } + } + } + + + private void testStatusInternals() { + HgDataFile n = hgRepo.getFileNode(Path.create("design.txt")); + for (String s : new String[] {"011dfd44417c72bd9e54cf89b82828f661b700ed", "e5529faa06d53e06a816e56d218115b42782f1ba", "c18e7111f1fc89a80a00f6a39d51288289a382fc"}) { + // expected: 359, 2123, 3079 + byte[] b = s.getBytes(); + final Nodeid nid = Nodeid.fromAscii(b, 0, b.length); + System.out.println(s + " : " + n.length(nid)); + } + } + + private static class StatusDump implements HgStatusInspector { + public boolean hideStatusPrefix = false; // hg status -n option + public boolean showCopied = true; // -C + public boolean showIgnored = true; // -i + public boolean showClean = true; // -c + + public void modified(Path fname) { + print('M', fname); + } + + public void added(Path fname) { + print('A', fname); + } + + public void copied(Path fnameOrigin, Path fnameAdded) { + added(fnameAdded); + if (showCopied) { + print(' ', fnameOrigin); + } + } + + public void removed(Path fname) { + print('R', fname); + } + + public void clean(Path fname) { + if (showClean) { + print('C', fname); + } + } + + public void missing(Path fname) { + print('!', fname); + } + + public void unknown(Path fname) { + print('?', fname); + } + + public void ignored(Path fname) { + if (showIgnored) { + print('I', fname); + } + } + + private void print(char status, Path fname) { + if (!hideStatusPrefix) { + System.out.print(status); + System.out.print(' '); + } + System.out.println(fname); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Manifest.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import org.tmatesoft.hg.core.HgLogCommand.FileRevision; +import org.tmatesoft.hg.core.HgManifestCommand; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Manifest { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + final boolean debug = cmdLineOpts.getBoolean("--debug"); + final boolean verbose = cmdLineOpts.getBoolean("-v", "--verbose"); + HgManifestCommand.Handler h = new HgManifestCommand.Handler() { + + public void begin(Nodeid manifestRevision) { + } + public void dir(Path p) { + } + public void file(FileRevision fileRevision) { + if (debug) { + System.out.print(fileRevision.getRevision());; + } + if (debug || verbose) { + System.out.print(" 644"); // FIXME real flags! + System.out.print(" "); + } + System.out.println(fileRevision.getPath()); + } + + public void end(Nodeid manifestRevision) { + } + }; + int rev = cmdLineOpts.getSingleInt(TIP, "-r", "--rev"); + new HgManifestCommand(hgRepo).dirs(false).revision(rev).execute(h); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Options.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.tmatesoft.hg.repo.HgLookup; +import org.tmatesoft.hg.repo.HgRepository; + +/** + * Parse command-line options. Primitive implementation that recognizes options with 0 or 1 argument. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +class Options { + + public final Map<String,List<String>> opt2values = new HashMap<String, List<String>>(); + + public boolean getBoolean(String... aliases) { + return getBoolean(false, aliases); + } + + public boolean getBoolean(boolean def, String... aliases) { + for (String s : aliases) { + if (opt2values.containsKey(s)) { + return true; + } + } + return def; + } + + public String getSingle(String... aliases) { + String rv = null; + for (String s : aliases) { + List<String> values = opt2values.get(s); + if (values != null && values.size() > 0) { + rv = values.get(values.size() - 1); // take last one, most recent + } + } + return rv; + } + + public int getSingleInt(int def, String... aliases) { + String r = getSingle(aliases); + if (r == null) { + return def; + } + return Integer.parseInt(r); + } + + public List<String> getList(String... aliases) { + LinkedList<String> rv = new LinkedList<String>(); + for (String s : aliases) { + List<String> values = opt2values.get(s); + if (values != null) { + rv.addAll(values); + } + } + return rv; + } + + public HgRepository findRepository() throws Exception { + String repoLocation = getSingle("-R", "--repository"); + if (repoLocation != null) { + return new HgLookup().detect(repoLocation); + } + return new HgLookup().detectFromWorkingDir(); + } + + + public static Options parse(String[] commandLineArgs) { + Options rv = new Options(); + List<String> values = new LinkedList<String>(); + rv.opt2values.put("", values); // values with no options + for (String arg : commandLineArgs) { + if (arg.charAt(0) == '-') { + // option + if (arg.length() == 1) { + throw new IllegalArgumentException("Bad option: -"); + } + values = rv.opt2values.get(arg); + if (values == null) { + rv.opt2values.put(arg, values = new LinkedList<String>()); + } + // next value, if any, gets into the values list for arg option. + } else { + values.add(arg); + values = rv.opt2values.get(""); + } + } + return rv; + } +} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Outgoing.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import java.util.Collection; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.repo.HgChangelog; +import org.tmatesoft.hg.repo.HgRepository; + + +/** + * WORK IN PROGRESS, DO NOT USE + * hg out + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Outgoing { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepository hgRepo = cmdLineOpts.findRepository(); + if (hgRepo.isInvalid()) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); + return; + } + // FIXME detection of + List<Nodeid> base = new LinkedList<Nodeid>(); + base.add(Nodeid.fromAscii("d6d2a630f4a6d670c90a5ca909150f2b426ec88f".getBytes(), 0, 40)); + // + // fill with all known + HgChangelog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker(); + pw.init(); + LinkedHashSet<Nodeid> sendToRemote = new LinkedHashSet<Nodeid>(pw.allNodes()); + dump("initial state", sendToRemote); + // remove base and its parents + LinkedList<Nodeid> queueToClean = new LinkedList<Nodeid>(base); + while (!queueToClean.isEmpty()) { + Nodeid nid = queueToClean.removeFirst(); + if (sendToRemote.remove(nid)) { + pw.appendParentsOf(nid, queueToClean); + } + } + dump("Clean from known parents", sendToRemote); + // XXX I think sendToRemote is what we actually need here - everything local, missing from remote + // however, if we need to send only a subset of these, need to proceed. + LinkedList<Nodeid> result = new LinkedList<Nodeid>(); + // find among left those without parents + for (Nodeid nid : sendToRemote) { + Nodeid p1 = pw.firstParent(nid); + // in fact, we may assume nulls are never part of sendToRemote + if (p1 != null && !sendToRemote.contains(p1)) { + Nodeid p2 = pw.secondParent(nid); + if (p2 == null || !sendToRemote.contains(p2)) { + result.add(nid); + } + } + } + dump("Result", result); + // final outcome is the collection of nodes between(lastresult and revision/tip) + // + System.out.println("TODO: nodes between result and tip"); + } + + private static void dump(String s, Collection<Nodeid> c) { + System.out.println(s); + for (Nodeid n : c) { + System.out.println(n); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Remote.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,131 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.URL; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.util.List; +import java.util.Map; +import java.util.prefs.Preferences; +import java.util.zip.InflaterInputStream; + +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; + +import org.tmatesoft.hg.internal.ConfigFile; +import org.tmatesoft.hg.internal.Internals; + +/** + * WORK IN PROGRESS, DO NOT USE + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Remote { + + /* + * @see http://mercurial.selenic.com/wiki/WireProtocol + cmd=branches gives 4 nodeids (head, root, first parent, second parent) per line (few lines possible, per branch, perhaps?) + cmd=capabilities gives lookup ...subset and 3 compress methods + // lookup changegroupsubset unbundle=HG10GZ,HG10BZ,HG10UN + cmd=heads gives space-separated list of nodeids (or just one) + nodeids are in hex (printable) format, need to convert fromAscii() + cmd=branchmap + */ + public static void main(String[] args) throws Exception { + ConfigFile cfg = new Internals().newConfigFile(); + cfg.addLocation(new File(System.getProperty("user.home"), ".hgrc")); + String svnkitServer = cfg.getSection("paths").get("svnkit"); +// URL url = new URL(svnkitServer + "?cmd=changegroup&roots=a78c980749e3ccebb47138b547e9b644a22797a9"); +// URL url = new URL("http://localhost:8000/" + "?cmd=stream_out"); + URL url = new URL(svnkitServer + "?cmd=stream_out"); + + SSLContext sslContext = SSLContext.getInstance("SSL"); + class TrustEveryone implements X509TrustManager { + public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { + System.out.println("checkClientTrusted " + authType); + } + public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { + System.out.println("checkServerTrusted" + authType); + } + public X509Certificate[] getAcceptedIssuers() { + return new X509Certificate[0]; + } + } + // Hack to get Base64-encoded credentials + Preferences tempNode = Preferences.userRoot().node("xxx"); + tempNode.putByteArray("xxx", url.getUserInfo().getBytes()); + String authInfo = tempNode.get("xxx", null); + tempNode.removeNode(); + // + sslContext.init(null, new TrustManager[] { new TrustEveryone() }, null); + HttpsURLConnection urlConnection = (HttpsURLConnection) url.openConnection(); + urlConnection.addRequestProperty("User-Agent", "jhg/0.1.0"); + urlConnection.addRequestProperty("Accept", "application/mercurial-0.1"); + urlConnection.addRequestProperty("Authorization", "Basic " + authInfo); + urlConnection.setSSLSocketFactory(sslContext.getSocketFactory()); + urlConnection.connect(); + System.out.println("Response headers:"); + final Map<String, List<String>> headerFields = urlConnection.getHeaderFields(); + for (String s : headerFields.keySet()) { + System.out.printf("%s: %s\n", s, urlConnection.getHeaderField(s)); + } + System.out.printf("Content type is %s and its length is %d\n", urlConnection.getContentType(), urlConnection.getContentLength()); + InputStream is = urlConnection.getInputStream(); + // + dump(is, -1); // simple dump, any cmd +// writeBundle(is); // cmd=changegroup + // + urlConnection.disconnect(); + // + } + + private static void dump(InputStream is, int limit) throws IOException { + int b; + while ((b =is.read()) != -1) { + System.out.print((char) b); + if (limit != -1) { + if (--limit < 0) { + break; + } + } + } + System.out.println(); + } + + private static void writeBundle(InputStream is) throws IOException { + InflaterInputStream zipStream = new InflaterInputStream(is); + File tf = File.createTempFile("hg-bundle-", null); + FileOutputStream fos = new FileOutputStream(tf); + int r; + byte[] buf = new byte[8*1024]; + while ((r = zipStream.read(buf)) != -1) { + fos.write(buf, 0, r); + } + fos.close(); + zipStream.close(); + System.out.println(tf); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cmdline/org/tmatesoft/hg/console/Status.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.console; + +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import org.tmatesoft.hg.core.HgRepoFacade; +import org.tmatesoft.hg.core.HgStatus; +import org.tmatesoft.hg.core.HgStatus.Kind; +import org.tmatesoft.hg.core.HgStatusCommand; +import org.tmatesoft.hg.util.Path; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Status { + + public static void main(String[] args) throws Exception { + Options cmdLineOpts = Options.parse(args); + HgRepoFacade hgRepo = new HgRepoFacade(); + if (!hgRepo.init(cmdLineOpts.findRepository())) { + System.err.printf("Can't find repository in: %s\n", hgRepo.getRepository().getLocation()); + return; + } + // + HgStatusCommand cmd = hgRepo.createStatusCommand(); + if (cmdLineOpts.getBoolean("-A", "--all")) { + cmd.all(); + } else { + // default: mardu + cmd.modified(cmdLineOpts.getBoolean(true, "-m", "--modified")); + cmd.added(cmdLineOpts.getBoolean(true, "-a", "--added")); + cmd.removed(cmdLineOpts.getBoolean(true, "-r", "--removed")); + cmd.deleted(cmdLineOpts.getBoolean(true, "-d", "--deleted")); + cmd.unknown(cmdLineOpts.getBoolean(true, "-u", "--unknonwn")); + cmd.clean(cmdLineOpts.getBoolean("-c", "--clean")); + cmd.ignored(cmdLineOpts.getBoolean("-i", "--ignored")); + } +// cmd.subrepo(cmdLineOpts.getBoolean("-S", "--subrepos")) + final boolean noStatusPrefix = cmdLineOpts.getBoolean("-n", "--no-status"); + final boolean showCopies = cmdLineOpts.getBoolean("-C", "--copies"); + class StatusHandler implements HgStatusCommand.Handler { + + final Map<HgStatus.Kind, List<Path>> data = new TreeMap<HgStatus.Kind, List<Path>>(); + final Map<Path, Path> copies = showCopies ? new HashMap<Path,Path>() : null; + + public void handleStatus(HgStatus s) { + List<Path> l = data.get(s.getKind()); + if (l == null) { + l = new LinkedList<Path>(); + data.put(s.getKind(), l); + } + l.add(s.getPath()); + if (s.isCopy() && showCopies) { + copies.put(s.getPath(), s.getOriginalPath()); + } + } + + public void dump() { + sortAndPrint('M', data.get(Kind.Modified), null); + sortAndPrint('A', data.get(Kind.Added), copies); + sortAndPrint('R', data.get(Kind.Removed), null); + sortAndPrint('?', data.get(Kind.Unknown), null); + sortAndPrint('I', data.get(Kind.Ignored), null); + sortAndPrint('C', data.get(Kind.Clean), null); + sortAndPrint('!', data.get(Kind.Missing), null); + } + + private void sortAndPrint(char prefix, List<Path> ul, Map<Path, Path> copies) { + if (ul == null) { + return; + } + ArrayList<Path> sortList = new ArrayList<Path>(ul); + Collections.sort(sortList); + for (Path s : sortList) { + if (!noStatusPrefix) { + System.out.print(prefix); + System.out.print(' '); + } + System.out.println(s); + if (copies != null && copies.containsKey(s)) { + System.out.println(" " + copies.get(s)); + } + } + } + }; + + StatusHandler statusHandler = new StatusHandler(); + int changeRev = cmdLineOpts.getSingleInt(BAD_REVISION, "--change"); + if (changeRev != BAD_REVISION) { + cmd.change(changeRev); + } else { + List<String> revisions = cmdLineOpts.getList("--rev"); + int size = revisions.size(); + if (size > 1) { + cmd.base(Integer.parseInt(revisions.get(size - 2))).revision(Integer.parseInt(revisions.get(size - 1))); + } else if (size > 0) { + cmd.base(Integer.parseInt(revisions.get(0))); + } + } + cmd.execute(statusHandler); + statusHandler.dump(); + } +}
--- a/design.txt Sun Jan 16 01:40:38 2011 +0100 +++ b/design.txt Wed Mar 09 05:22:17 2011 +0100 @@ -32,22 +32,35 @@ +Nodeid to keep 20 bytes always, Revlog.Inspector to get nodeid array of meaningful data exact size (nor heading 00 bytes, nor 12 extra bytes from the spec) +DataAccess - implement memory mapped files, +Changeset to get index (local revision number) ++RevisionWalker (on manifest) and WorkingCopyWalker (io.File) talking to ? and/or dirstate (StatusCollector and WCSC) ++RevlogStream - Inflater. Perhaps, InflaterStream instead? branch:wrap-data-access ++repo.status - use same collector class twice, difference as external code. add external walker that keeps collected maps and use it in Log operation to give files+,files- ++ strip \1\n metadata out from RevlogStream ++ hash/digest long names for fncache -DataAccess - collect debug info (buffer misses, file size/total read operations) to find out better strategy to buffer size detection. Compare performance. delta merge -RevisionWalker (on manifest) and WorkingCopyWalker (io.File) talking to ? and/or dirstate -RevlogStream - Inflater. Perhaps, InflaterStream instead? -Implement use of fncache (use names from it - perhaps, would help for Mac issues Alex mentioned) along with 'digest'-ing long file names +DataAccess - collect debug info (buffer misses, file size/total read operations) to find out better strategy to buffer size detection. Compare performance. - +Strip off metadata from beg of the stream - DataAccess (with rebase/moveBaseOffset(int)) would be handy +Parameterize StatusCollector to produce copy only when needed. And HgDataFile.metadata perhaps should be moved to cacheable place? + +Status operation from GUI - guess, usually on a file/subfolder, hence API should allow for starting path (unlike cmdline, seems useless to implement include/exclide patterns - GUI users hardly enter them, ever) + -> recently introduced FileWalker may perhaps help solving this (if starts walking from selected folder) for status op against WorkingDir? + +? Can I use fncache (names from it - perhaps, would help for Mac issues Alex mentioned) +? Does fncache lists both .i and .d (iow, is it true hashed <long name>.d is different from hashed <long name>.i) -Status operation from GUI - guess, usually on a file/subfolder, hence API should allow for starting path (unlike cmdline, seems useless to implement include/exclide patterns - GUI users hardly enter them, ever) - - ??? encodings of fncache, .hgignore, dirstate ??? http://mercurial.selenic.com/wiki/Manifest says "Multiple changesets may refer to the same manifest revision". To me, each changeset changes repository, hence manifest should update nodeids of the files it lists, effectively creating new manifest revision. +? hg status, compare revision and local file with kw expansion and eol extension +? subrepos in log, status (-S) and manifest commands + + +Commands to get CommandContext where they may share various caches (e.g. StatusCollector) +Perhaps, abstract classes for all Inspectors (i.e. StatusCollector.Inspector) for users to use as base classes to protect from change? + >>>> Effective file read/data access ReadOperation, Revlog does: repo.getFileSystem().run(this.file, new ReadOperation(), long start=0, long end = -1) ReadOperation gets buffer (of whatever size, as decided by FS impl), parses it and then reports if needs more data. @@ -55,11 +68,30 @@ and allows buffer management (i.e. reuse. Single buffer for all reads). Scheduling multiple operations (in future, to deal with writes - single queue for FS operations - no locks?) +WRITE: Need to register instances that cache files (e.g. dirstate or .hgignore) to FS notifier, so that cache may get cleared if the file changes (i.e. WriteOperation touches it). + File access: * NIO and mapped files - should be fast. Although seems to give less control on mem usage. * Regular InputStreams and chunked stream on top - allocate List<byte[]>, each (but last) chunk of fixed size (depending on initial file size) + +* API + + rename in .core Cset -> HgChangeset, + + rename in .repo Changeset to HgChangelog.Changeset, Changeset.Inspector -> HgChangelog.Inspector + - CommandContext + - Data access - not bytes, but ByteChannel + - HgRepository constants (TIP, BAD, WC) to HgRevisions enum + - RevisionMap to replace TreeMap<Integer, ?> + + .core.* rename to Hg* + + RepositoryTreeWalker to ManifestCommand to match other command classes + +* defects + + ConfigFile to strip comments from values (#) + <<<<< Tests: -DataAccess - readBytes(length > memBufferSize, length*2 > memBufferSize) - to check impl is capable to read huge chunks of data, regardless of own buffer size \ No newline at end of file +DataAccess - readBytes(length > memBufferSize, length*2 > memBufferSize) - to check impl is capable to read huge chunks of data, regardless of own buffer size + +ExecHelper('cmd', OutputParser()).run(). StatusOutputParser, LogOutputParser extends OutputParser. construct java result similar to that of cmd, compare results +
--- a/src/com/tmate/hgkit/console/Bundle.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.io.File; - -import com.tmate.hgkit.fs.DataAccessProvider; -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgBundle; -import com.tmate.hgkit.ll.HgRepository; - -/** - * - * @author artem - */ -public class Bundle { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - File bundleFile = new File("/temp/hg/hg-bundle-a78c980749e3.tmp"); - DataAccessProvider dap = new DataAccessProvider(); - HgBundle hgBundle = new HgBundle(dap, bundleFile); -// hgBundle.dump(); - hgBundle.changes(hgRepo); - } -}
--- a/src/com/tmate/hgkit/console/Cat.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.DigestHelper; -import com.tmate.hgkit.ll.HgDataFile; -import com.tmate.hgkit.ll.HgIgnore; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.LocalHgRepo; - -/** - * @author artem - * - */ -public class Cat { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - HgIgnore ignore = ((LocalHgRepo) hgRepo).loadIgnore(); - for (String s : new String[] {"design.txt", "src/com/tmate/hgkit/ll/Changelog.java", "src/Extras.java", "bin/com/tmate/hgkit/ll/Changelog.class"} ) { - System.out.println("Ignored " + s + ": " + ignore.isIgnored(s)); - } - DigestHelper dh = new DigestHelper(); - for (String fname : cmdLineOpts.files) { - System.out.println(fname); - HgDataFile fn = hgRepo.getFileNode(fname); - if (fn.exists()) { - int total = fn.getRevisionCount(); - System.out.printf("Total revisions: %d\n", total); - for (int i = 0; i < total; i++) { - byte[] content = fn.content(i); - System.out.println("==========>"); - System.out.println(new String(content)); - int[] parentRevisions = new int[2]; - byte[] parent1 = new byte[20]; - byte[] parent2 = new byte[20]; - fn.parents(i, parentRevisions, parent1, parent2); - System.out.println(dh.sha1(parent1, parent2, content).asHexString()); - } - } else { - System.out.println(">>>Not found!"); - } - } - } -}
--- a/src/com/tmate/hgkit/console/Incoming.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,185 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; - -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.Revlog; - -/** - * - * @author artem - */ -public class Incoming { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - // in fact, all we need from changelog is set of all nodeids. However, since ParentWalker reuses same Nodeids, it's not too expensive - // to reuse it here, XXX although later this may need to be refactored - final Revlog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker(); - pw.init(); - // - HashSet<Nodeid> base = new HashSet<Nodeid>(); - HashSet<Nodeid> unknownRemoteHeads = new HashSet<Nodeid>(); - // imagine empty repository - any nodeid from remote heads would be unknown - unknownRemoteHeads.add(Nodeid.fromAscii("382cfe9463db0484a14136e4b38407419525f0c0".getBytes(), 0, 40)); - // - LinkedList<RemoteBranch> remoteBranches = new LinkedList<RemoteBranch>(); - remoteBranches(unknownRemoteHeads, remoteBranches); - // - HashSet<Nodeid> visited = new HashSet<Nodeid>(); - HashSet<RemoteBranch> processed = new HashSet<RemoteBranch>(); - LinkedList<Nodeid[]> toScan = new LinkedList<Nodeid[]>(); - LinkedHashSet<Nodeid> toFetch = new LinkedHashSet<Nodeid>(); - // next one seems to track heads we've asked (or plan to ask) remote.branches for - HashSet<Nodeid> unknownHeads /*req*/ = new HashSet<Nodeid>(unknownRemoteHeads); - while (!remoteBranches.isEmpty()) { - LinkedList<Nodeid> toQueryRemote = new LinkedList<Nodeid>(); - while (!remoteBranches.isEmpty()) { - RemoteBranch next = remoteBranches.removeFirst(); - if (visited.contains(next.head) || processed.contains(next)) { - continue; - } - if (Nodeid.NULL.equals(next.head)) { - // it's discovery.py that expects next.head to be nullid here, I can't imagine how this may happen, hence this exception - throw new IllegalStateException("I wonder if null if may ever get here with remote branches"); - } else if (pw.knownNode(next.root)) { - // root of the remote change is known locally, analyze to find exact missing changesets - toScan.addLast(new Nodeid[] { next.head, next.root }); - processed.add(next); - } else { - if (!visited.contains(next.root) && !toFetch.contains(next.root)) { - // if parents are locally known, this is new branch (sequence of changes) (sequence sprang out of known parents) - if ((next.p1 == null || pw.knownNode(next.p1)) && (next.p2 == null || pw.knownNode(next.p2))) { - toFetch.add(next.root); - } - // XXX perhaps, may combine this parent processing below (I don't understand what this code is exactly about) - if (pw.knownNode(next.p1)) { - base.add(next.p1); - } - if (pw.knownNode(next.p2)) { - base.add(next.p2); - } - } - if (next.p1 != null && !pw.knownNode(next.p1) && !unknownHeads.contains(next.p1)) { - toQueryRemote.add(next.p1); - unknownHeads.add(next.p1); - } - if (next.p2 != null && !pw.knownNode(next.p2) && !unknownHeads.contains(next.p2)) { - toQueryRemote.add(next.p2); - unknownHeads.add(next.p2); - } - } - visited.add(next.head); - } - if (!toQueryRemote.isEmpty()) { - // discovery.py in fact does this in batches of 10 revisions a time. - // however, this slicing may be done in remoteBranches call instead (if needed) - remoteBranches(toQueryRemote, remoteBranches); - } - } - while (!toScan.isEmpty()) { - Nodeid[] head_root = toScan.removeFirst(); - List<Nodeid> nodesBetween = remoteBetween(head_root[0], head_root[1], new LinkedList<Nodeid>()); - nodesBetween.add(head_root[1]); - int x = 1; - Nodeid p = head_root[0]; - for (Nodeid i : nodesBetween) { - System.out.println("narrowing " + x + ":" + nodesBetween.size() + " " + i.shortNotation()); - if (pw.knownNode(i)) { - if (x <= 2) { - toFetch.add(p); - base.add(i); - } else { - // XXX original discovery.py collects new elements to scan separately - // likely to "batch" calls to server - System.out.println("narrowed branch search to " + p.shortNotation() + ":" + i.shortNotation()); - toScan.addLast(new Nodeid[] { p, i }); - } - break; - } - x = x << 1; - p = i; - } - } - for (Nodeid n : toFetch) { - if (pw.knownNode(n)) { - System.out.println("Erroneous to fetch:" + n); - } else { - System.out.println(n); - } - } - - } - - static final class RemoteBranch { - public Nodeid head, root, p1, p2; - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (false == obj instanceof RemoteBranch) { - return false; - } - RemoteBranch o = (RemoteBranch) obj; - return head.equals(o.head) && root.equals(o.root) && (p1 == null && o.p1 == null || p1.equals(o.p1)) && (p2 == null && o.p2 == null || p2.equals(o.p2)); - } - } - - private static void remoteBranches(Collection<Nodeid> unknownRemoteHeads, List<RemoteBranch> remoteBranches) { - // discovery.findcommonincoming: - // unknown = remote.branches(remote.heads); - // sent: cmd=branches&roots=d6d2a630f4a6d670c90a5ca909150f2b426ec88f+ - // received: d6d2a630f4a6d670c90a5ca909150f2b426ec88f dbd663faec1f0175619cf7668bddc6350548b8d6 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 - // head, root, first parent, second parent - // - // TODO implement this with remote access - // - RemoteBranch rb = new RemoteBranch(); - rb.head = unknownRemoteHeads.iterator().next(); - rb.root = Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40); - remoteBranches.add(rb); - } - - private static List<Nodeid> remoteBetween(Nodeid nodeid1, Nodeid nodeid2, List<Nodeid> list) { - // sent: cmd=between&pairs=d6d2a630f4a6d670c90a5ca909150f2b426ec88f-dbd663faec1f0175619cf7668bddc6350548b8d6 - // received: a78c980749e3ccebb47138b547e9b644a22797a9 286d221f6c28cbfce25ea314e1f46a23b7f979d3 fc265ddeab262ff5c34b4cf4e2522d8d41f1f05b a3576694a4d1edaa681cab15b89d6b556b02aff4 - // 1st, 2nd, fourth and eights of total 8 changes between rev9 and rev0 - // - // - // a78c980749e3ccebb47138b547e9b644a22797a9 286d221f6c28cbfce25ea314e1f46a23b7f979d3 fc265ddeab262ff5c34b4cf4e2522d8d41f1f05b a3576694a4d1edaa681cab15b89d6b556b02aff4 - //d6d2a630f4a6d670c90a5ca909150f2b426ec88f a78c980749e3ccebb47138b547e9b644a22797a9 5abe5af181bd6a6d3e94c378376c901f0f80da50 08db726a0fb7914ac9d27ba26dc8bbf6385a0554 - - // TODO implement with remote access - String response = null; - if (nodeid1.equals(Nodeid.fromAscii("382cfe9463db0484a14136e4b38407419525f0c0".getBytes(), 0, 40)) && nodeid2.equals(Nodeid.fromAscii("dbd663faec1f0175619cf7668bddc6350548b8d6".getBytes(), 0, 40))) { - response = "d6d2a630f4a6d670c90a5ca909150f2b426ec88f a78c980749e3ccebb47138b547e9b644a22797a9 5abe5af181bd6a6d3e94c378376c901f0f80da50 08db726a0fb7914ac9d27ba26dc8bbf6385a0554"; - } else if (nodeid1.equals(Nodeid.fromAscii("a78c980749e3ccebb47138b547e9b644a22797a9".getBytes(), 0, 40)) && nodeid2.equals(Nodeid.fromAscii("5abe5af181bd6a6d3e94c378376c901f0f80da50".getBytes(), 0, 40))) { - response = "286d221f6c28cbfce25ea314e1f46a23b7f979d3"; - } - if (response == null) { - throw HgRepository.notImplemented(); - } - for (String s : response.split(" ")) { - list.add(Nodeid.fromAscii(s.getBytes(), 0, 40)); - } - return list; - } - -}
--- a/src/com/tmate/hgkit/console/Log.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,202 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.util.Formatter; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.Changeset; -import com.tmate.hgkit.ll.HgDataFile; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.Revlog; - -/** - * @author artem - */ -public class Log { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - System.out.println(hgRepo.getLocation()); - final Dump dump = new Dump(hgRepo); - dump.complete = true; //cmdLineOpts; - dump.reverseOrder = true; - dump.branches = cmdLineOpts.branches; - if (cmdLineOpts.users != null) { - dump.users = new LinkedHashSet<String>(); - for (String u : cmdLineOpts.users) { - dump.users.add(u.toLowerCase()); - } - } - if (cmdLineOpts.files.isEmpty()) { - if (cmdLineOpts.limit == -1) { - // no revisions and no limit - hgRepo.getChangelog().all(dump); - } else { - // in fact, external (to dump inspector) --limit processing yelds incorrect results when other args - // e.g. -u or -b are used (i.e. with -u shall give <limit> csets with user, not check last <limit> csets for user - int[] r = new int[] { 0, hgRepo.getChangelog().getRevisionCount() }; - if (fixRange(r, dump.reverseOrder, cmdLineOpts.limit) == 0) { - System.out.println("No changes"); - return; - } - hgRepo.getChangelog().range(r[0], r[1], dump); - } - dump.complete(); - } else { - for (String fname : cmdLineOpts.files) { - HgDataFile f1 = hgRepo.getFileNode(fname); - System.out.println("History of the file: " + f1.getPath()); - if (cmdLineOpts.limit == -1) { - f1.history(dump); - } else { - int[] r = new int[] { 0, f1.getRevisionCount() }; - if (fixRange(r, dump.reverseOrder, cmdLineOpts.limit) == 0) { - System.out.println("No changes"); - continue; - } - f1.history(r[0], r[1], dump); - } - dump.complete(); - } - } - // - // XXX new ChangelogWalker().setFile("hello.c").setRevisionRange(1, 4).accept(new Visitor); - } - - private static int fixRange(int[] start_end, boolean reverse, int limit) { - assert start_end.length == 2; - if (limit < start_end[1]) { - if (reverse) { - // adjust left boundary of the range - start_end[0] = start_end[1] - limit; - } else { - start_end[1] = limit; // adjust right boundary - } - } - int rv = start_end[1] - start_end[0]; - start_end[1]--; // range needs index, not length - return rv; - } - - // Differences with standard hg log output - // - complete == true (--debug) files are not broke down to modified,+ and - - private static final class Dump implements Changeset.Inspector { - // params - boolean complete = false; - boolean reverseOrder = false; - Set<String> branches; - Set<String> users; // shall be lowercased - // own - private LinkedList<String> l = new LinkedList<String>(); - private final HgRepository repo; - private Revlog.ParentWalker changelogWalker; - private final int tip ; - - public Dump(HgRepository hgRepo) { - repo = hgRepo; - tip = hgRepo.getChangelog().getRevisionCount() - 1; - } - - public void next(int revisionNumber, Nodeid nodeid, Changeset cset) { - if (branches != null && !branches.contains(cset.branch())) { - return; - } - if (users != null) { - String csetUser = cset.user().toLowerCase(); - boolean found = false; - for (String u : users) { - if (csetUser.indexOf(u) != -1) { - found = true; - break; - } - } - if (!found) { - return; - } - } - final String s = print(revisionNumber, nodeid, cset); - if (reverseOrder) { - l.addFirst(s); - } else { - System.out.print(s); - } - } - - public void complete() { - if (!reverseOrder) { - return; - } - for (String s : l) { - System.out.print(s); - } - l.clear(); - changelogWalker = null; - } - - private String print(int revNumber, Nodeid csetNodeid, Changeset cset) { - StringBuilder sb = new StringBuilder(); - Formatter f = new Formatter(sb); - f.format("changeset: %d:%s\n", revNumber, complete ? csetNodeid : csetNodeid.shortNotation()); - if (revNumber == tip || repo.getTags().isTagged(csetNodeid)) { - - sb.append("tag: "); - for (String t : repo.getTags().tags(csetNodeid)) { - sb.append(t); - sb.append(' '); - } - if (revNumber == tip) { - sb.append("tip"); - } - sb.append('\n'); - } - if (complete) { - if (changelogWalker == null) { - changelogWalker = repo.getChangelog().new ParentWalker(); - changelogWalker.init(); - } - Nodeid p1 = changelogWalker.safeFirstParent(csetNodeid); - Nodeid p2 = changelogWalker.safeSecondParent(csetNodeid); - int p1x = p1 == Nodeid.NULL ? -1 : repo.getChangelog().getLocalRevisionNumber(p1); - int p2x = p2 == Nodeid.NULL ? -1 : repo.getChangelog().getLocalRevisionNumber(p2); - int mx = repo.getManifest().getLocalRevisionNumber(cset.manifest()); - f.format("parent: %d:%s\nparent: %d:%s\nmanifest: %d:%s\n", p1x, p1, p2x, p2, mx, cset.manifest()); - } - f.format("user: %s\ndate: %s\n", cset.user(), cset.dateString()); - if (complete) { - final List<String> files = cset.files(); - sb.append("files: "); - for (String s : files) { - sb.append(' '); - sb.append(s); - } - if (cset.extras() != null) { - sb.append("\nextra: "); - for (Map.Entry<String, String> e : cset.extras().entrySet()) { - sb.append(' '); - sb.append(e.getKey()); - sb.append('='); - sb.append(e.getValue()); - } - } - f.format("\ndescription:\n%s\n\n", cset.comment()); - } else { - f.format("summary: %s\n\n", cset.comment()); - } - return sb.toString(); - } - } -}
--- a/src/com/tmate/hgkit/console/Main.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.io.BufferedInputStream; -import java.io.DataInput; -import java.io.DataInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.math.BigInteger; -import java.util.LinkedList; -import java.util.Locale; -import java.util.zip.Inflater; - -import com.tmate.hgkit.ll.Changeset; - -/** - * - * @author artem - */ -public class Main { - - public static void main(String[] args) throws Exception { - String repo = "/temp/hg/hello/.hg/"; - String filename = "store/00changelog.i"; -// String filename = "store/data/hello.c.i"; -// String filename = "store/data/docs/readme.i"; -// String repo = "/eclipse-3.7/ws.hg/com.tmate.hgkit/.hg/"; -// String filename = "store/data/design.txt.i"; -// String filename = "store/data/src/com/tmate/hgkit/ll/_revlog_stream.java.i"; - // - LinkedList<Changeset> changelog = new LinkedList<Changeset>(); - // - DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(new File(repo + filename)))); - DataInput di = dis; - dis.mark(10); - int versionField = di.readInt(); - dis.reset(); - final int INLINEDATA = 1 << 16; - - boolean inlineData = (versionField & INLINEDATA) != 0; - System.out.printf("%#8x, inline: %b\n", versionField, inlineData); - System.out.println("\tOffset\tFlags\tPacked\t Actual\tBase Rev Link Rev\tParent1\tParent2\tnodeid"); - int entryCount = 0; - while (dis.available() > 0) { - long l = di.readLong(); - long offset = l >>> 16; - int flags = (int) (l & 0X0FFFF); - int compressedLen = di.readInt(); - int actualLen = di.readInt(); - int baseRevision = di.readInt(); - int linkRevision = di.readInt(); - int parent1Revision = di.readInt(); - int parent2Revision = di.readInt(); - byte[] buf = new byte[32]; - di.readFully(buf, 12, 20); - dis.skip(12); - System.out.printf("%14d %6X %10d %10d %10d %10d %8d %8d %040x\n", offset, flags, compressedLen, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, new BigInteger(buf)); - if (inlineData) { - String resultString; - byte[] data = new byte[compressedLen]; - di.readFully(data); - if (data[0] == 0x78 /* 'x' */) { - Inflater zlib = new Inflater(); - zlib.setInput(data, 0, compressedLen); - byte[] result = new byte[actualLen*2]; - int resultLen = zlib.inflate(result); - zlib.end(); - resultString = new String(result, 0, resultLen, "UTF-8"); - } else if (data[0] == 0x75 /* 'u' */) { - resultString = new String(data, 1, data.length - 1, "UTF-8"); - } else { - resultString = new String(data); - } - System.out.println(resultString); - } - } - dis.close(); - // - System.out.println("\n\n"); - System.out.println("====================>"); - for (Changeset cset : changelog) { - System.out.println(">"); - System.out.println("User: " + cset.user()); - System.out.println("Comment: " + cset.comment()); - System.out.println("Manifest: " + cset.manifest()); - System.out.printf(Locale.US, "Date: %ta %<tb %<td %<tH:%<tM:%<tS %<tY %<tz\n", cset.date()); - System.out.println("Files: " + cset.files().size()); - if (cset.extras() != null) { - System.out.println("Extra: " + cset.extras()); - } - for (String s : cset.files()) { - System.out.print('\t'); - System.out.println(s); - } - System.out.println("<"); - } - } -}
--- a/src/com/tmate/hgkit/console/Manifest.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import static com.tmate.hgkit.ll.HgRepository.TIP; - -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgManifest; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; - -/** - * - * @author artem - */ -public class Manifest { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - System.out.println(hgRepo.getLocation()); - HgManifest.Inspector insp = new Dump(); - hgRepo.getManifest().walk(0, TIP, insp); - } - - public static final class Dump implements HgManifest.Inspector { - public boolean begin(int revision, Nodeid nid) { - System.out.printf("%d : %s\n", revision, nid); - return true; - } - - public boolean next(Nodeid nid, String fname, String flags) { - System.out.println(nid + "\t" + fname + "\t\t" + flags); - return true; - } - - public boolean end(int revision) { - System.out.println(); - return true; - } - } -}
--- a/src/com/tmate/hgkit/console/Outgoing.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import java.util.Collection; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; - -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.Nodeid; -import com.tmate.hgkit.ll.Revlog; - -/** - * hg out - * @author artem - */ -public class Outgoing { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - // FIXME detection of - List<Nodeid> base = new LinkedList<Nodeid>(); - base.add(Nodeid.fromAscii("d6d2a630f4a6d670c90a5ca909150f2b426ec88f".getBytes(), 0, 40)); - // - // fill with all known - Revlog.ParentWalker pw = hgRepo.getChangelog().new ParentWalker(); - pw.init(); - LinkedHashSet<Nodeid> sendToRemote = new LinkedHashSet<Nodeid>(pw.allNodes()); - dump("initial state", sendToRemote); - // remove base and its parents - LinkedList<Nodeid> queueToClean = new LinkedList<Nodeid>(base); - while (!queueToClean.isEmpty()) { - Nodeid nid = queueToClean.removeFirst(); - if (sendToRemote.remove(nid)) { - pw.appendParentsOf(nid, queueToClean); - } - } - dump("Clean from known parents", sendToRemote); - // XXX I think sendToRemote is what we actually need here - everything local, missing from remote - // however, if we need to send only a subset of these, need to proceed. - LinkedList<Nodeid> result = new LinkedList<Nodeid>(); - // find among left those without parents - for (Nodeid nid : sendToRemote) { - Nodeid p1 = pw.firstParent(nid); - // in fact, we may assume nulls are never part of sendToRemote - if (p1 != null && !sendToRemote.contains(p1)) { - Nodeid p2 = pw.secondParent(nid); - if (p2 == null || !sendToRemote.contains(p2)) { - result.add(nid); - } - } - } - dump("Result", result); - // final outcome is the collection of nodes between(lastresult and revision/tip) - // - System.out.println("TODO: nodes between result and tip"); - } - - private static void dump(String s, Collection<Nodeid> c) { - System.out.println(s); - for (Nodeid n : c) { - System.out.println(n); - } - } -}
--- a/src/com/tmate/hgkit/console/Status.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.console; - -import static com.tmate.hgkit.ll.HgRepository.TIP; - -import com.tmate.hgkit.fs.RepositoryLookup; -import com.tmate.hgkit.ll.HgDataFile; -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.LocalHgRepo; -import com.tmate.hgkit.ll.Nodeid; - -/** - * - * @author artem - */ -public class Status { - - public static void main(String[] args) throws Exception { - RepositoryLookup repoLookup = new RepositoryLookup(); - RepositoryLookup.Options cmdLineOpts = RepositoryLookup.Options.parse(args); - HgRepository hgRepo = repoLookup.detect(cmdLineOpts); - if (hgRepo.isInvalid()) { - System.err.printf("Can't find repository in: %s\n", hgRepo.getLocation()); - return; - } - System.out.println(hgRepo.getLocation()); - ((LocalHgRepo) hgRepo).loadDirstate().dump(); - final StatusDump dump = new StatusDump(); - dump.showIgnored = false; - dump.showClean = false; - final int r1 = 0, r2 = 11; - System.out.printf("Status for changes between revision %d and %d:\n", r1, r2); - hgRepo.status(r1, r2, dump); - System.out.println("\nStatus against working dir:"); - ((LocalHgRepo) hgRepo).statusLocal(TIP, dump); - System.out.println(); - System.out.printf("Manifest of the revision %d:\n", r2); - hgRepo.getManifest().walk(r2, r2, new Manifest.Dump()); - System.out.println(); - System.out.printf("\nStatus of working dir against %d:\n", r2); - ((LocalHgRepo) hgRepo).statusLocal(r2, dump); - } - - protected static void testStatusInternals(HgRepository hgRepo) { - HgDataFile n = hgRepo.getFileNode("design.txt"); - for (String s : new String[] {"011dfd44417c72bd9e54cf89b82828f661b700ed", "e5529faa06d53e06a816e56d218115b42782f1ba", "c18e7111f1fc89a80a00f6a39d51288289a382fc"}) { - // expected: 359, 2123, 3079 - byte[] b = s.getBytes(); - final Nodeid nid = Nodeid.fromAscii(b, 0, b.length); - System.out.println(s + " : " + n.length(nid)); - } - } - - private static class StatusDump implements HgRepository.StatusInspector { - public boolean hideStatusPrefix = false; // hg status -n option - public boolean showCopied = true; // -C - public boolean showIgnored = true; // -i - public boolean showClean = true; // -c - - public void modified(String fname) { - print('M', fname); - } - - public void added(String fname) { - print('A', fname); - } - - public void copied(String fnameOrigin, String fnameAdded) { - added(fnameAdded); - if (showCopied) { - print(' ', fnameOrigin); - } - } - - public void removed(String fname) { - print('R', fname); - } - - public void clean(String fname) { - if (showClean) { - print('C', fname); - } - } - - public void missing(String fname) { - print('!', fname); - } - - public void unknown(String fname) { - print('?', fname); - } - - public void ignored(String fname) { - if (showIgnored) { - print('I', fname); - } - } - - private void print(char status, String fname) { - if (!hideStatusPrefix) { - System.out.print(status); - System.out.print(' '); - } - System.out.println(fname); - } - } -}
--- a/src/com/tmate/hgkit/fs/ByteArrayDataAccess.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.IOException; - -/** - * - * @author artem - */ -public class ByteArrayDataAccess extends DataAccess { - - private final byte[] data; - private final int offset; - private final int length; - private int pos; - - public ByteArrayDataAccess(byte[] data) { - this(data, 0, data.length); - } - - public ByteArrayDataAccess(byte[] data, int offset, int length) { - this.data = data; - this.offset = offset; - this.length = length; - pos = 0; - } - - @Override - public byte readByte() throws IOException { - if (pos >= length) { - throw new IOException(); - } - return data[offset + pos++]; - } - @Override - public void readBytes(byte[] buf, int off, int len) throws IOException { - if (len > (this.length - pos)) { - throw new IOException(); - } - System.arraycopy(data, pos, buf, off, len); - pos += len; - } - - @Override - public void reset() { - pos = 0; - } - @Override - public long length() { - return length; - } - @Override - public void seek(long offset) { - pos = (int) offset; - } - @Override - public void skip(int bytes) throws IOException { - seek(pos + bytes); - } - @Override - public boolean isEmpty() { - return pos >= length; - } - - // - - // when byte[] needed from DA, we may save few cycles and some memory giving this (otherwise unsafe) access to underlying data - @Override - public byte[] byteArray() { - return data; - } -}
--- a/src/com/tmate/hgkit/fs/DataAccess.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.IOException; - -/** - * relevant parts of DataInput, non-stream nature (seek operation), explicit check for end of data. - * convenient skip (+/- bytes) - * Primary goal - effective file read, so that clients don't need to care whether to call few - * distinct getInt() or readBytes(totalForFewInts) and parse themselves instead in an attempt to optimize. - * Name: ByteSource? DataSource, DataInput, ByteInput - */ -public class DataAccess { - public boolean isEmpty() { - return true; - } - public long length() { - return 0; - } - // get this instance into initial state - public void reset() throws IOException { - // nop, empty instance is always in the initial state - } - // absolute positioning - public void seek(long offset) throws IOException { - throw new UnsupportedOperationException(); - } - // relative positioning - public void skip(int bytes) throws IOException { - throw new UnsupportedOperationException(); - } - // shall be called once this object no longer needed - public void done() { - // no-op in this empty implementation - } - public int readInt() throws IOException { - byte[] b = new byte[4]; - readBytes(b, 0, 4); - return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); - } - public long readLong() throws IOException { - byte[] b = new byte[8]; - readBytes(b, 0, 8); - int i1 = b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); - int i2 = b[4] << 24 | (b[5] & 0xFF) << 16 | (b[6] & 0xFF) << 8 | (b[7] & 0xFF); - return ((long) i1) << 32 | ((long) i2 & 0xFFFFFFFF); - } - public void readBytes(byte[] buf, int offset, int length) throws IOException { - throw new UnsupportedOperationException(); - } - public byte readByte() throws IOException { - throw new UnsupportedOperationException(); - } - - // XXX decide whether may or may not change position in the DataAccess - // FIXME exception handling is not right, just for the sake of quick test - public byte[] byteArray() { - byte[] rv = new byte[(int) length()]; - try { - reset(); - readBytes(rv, 0, rv.length); - } catch (IOException ex) { - ex.printStackTrace(); - } - return rv; - } -} \ No newline at end of file
--- a/src/com/tmate/hgkit/fs/DataAccessProvider.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,285 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.MappedByteBuffer; -import java.nio.channels.FileChannel; - -/** - * - * @author artem - */ -public class DataAccessProvider { - - private final int mapioMagicBoundary; - private final int bufferSize; - - public DataAccessProvider() { - this(100 * 1024, 8 * 1024); - } - - public DataAccessProvider(int mapioBoundary, int regularBufferSize) { - mapioMagicBoundary = mapioBoundary; - bufferSize = regularBufferSize; - } - - public DataAccess create(File f) { - if (!f.exists()) { - return new DataAccess(); - } - try { - FileChannel fc = new FileInputStream(f).getChannel(); - if (fc.size() > mapioMagicBoundary) { - // TESTS: bufLen of 1024 was used to test MemMapFileAccess - return new MemoryMapFileAccess(fc, fc.size(), mapioMagicBoundary); - } else { - // XXX once implementation is more or less stable, - // may want to try ByteBuffer.allocateDirect() to see - // if there's any performance gain. - boolean useDirectBuffer = false; - // TESTS: bufferSize of 100 was used to check buffer underflow states when readBytes reads chunks bigger than bufSize - return new FileAccess(fc, fc.size(), bufferSize, useDirectBuffer); - } - } catch (IOException ex) { - // unlikely to happen, we've made sure file exists. - ex.printStackTrace(); // FIXME log error - } - return new DataAccess(); // non-null, empty. - } - - // DOESN'T WORK YET - private static class MemoryMapFileAccess extends DataAccess { - private FileChannel fileChannel; - private final long size; - private long position = 0; // always points to buffer's absolute position in the file - private final int memBufferSize; - private MappedByteBuffer buffer; - - public MemoryMapFileAccess(FileChannel fc, long channelSize, int /*long?*/ bufferSize) { - fileChannel = fc; - size = channelSize; - memBufferSize = bufferSize; - } - - @Override - public boolean isEmpty() { - return position + (buffer == null ? 0 : buffer.position()) >= size; - } - - @Override - public long length() { - return size; - } - - @Override - public void reset() throws IOException { - seek(0); - } - - @Override - public void seek(long offset) { - assert offset >= 0; - // offset may not necessarily be further than current position in the file (e.g. rewind) - if (buffer != null && /*offset is within buffer*/ offset >= position && (offset - position) < buffer.limit()) { - buffer.position((int) (offset - position)); - } else { - position = offset; - buffer = null; - } - } - - @Override - public void skip(int bytes) throws IOException { - assert bytes >= 0; - if (buffer == null) { - position += bytes; - return; - } - if (buffer.remaining() > bytes) { - buffer.position(buffer.position() + bytes); - } else { - position += buffer.position() + bytes; - buffer = null; - } - } - - private void fill() throws IOException { - if (buffer != null) { - position += buffer.position(); - } - long left = size - position; - buffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, position, left < memBufferSize ? left : memBufferSize); - } - - @Override - public void readBytes(byte[] buf, int offset, int length) throws IOException { - if (buffer == null || !buffer.hasRemaining()) { - fill(); - } - // XXX in fact, we may try to create a MappedByteBuffer of exactly length size here, and read right away - while (length > 0) { - int tail = buffer.remaining(); - if (tail == 0) { - throw new IOException(); - } - if (tail >= length) { - buffer.get(buf, offset, length); - } else { - buffer.get(buf, offset, tail); - fill(); - } - offset += tail; - length -= tail; - } - } - - @Override - public byte readByte() throws IOException { - if (buffer == null || !buffer.hasRemaining()) { - fill(); - } - if (buffer.hasRemaining()) { - return buffer.get(); - } - throw new IOException(); - } - - @Override - public void done() { - buffer = null; - if (fileChannel != null) { - try { - fileChannel.close(); - } catch (IOException ex) { - ex.printStackTrace(); // log debug - } - fileChannel = null; - } - } - } - - // (almost) regular file access - FileChannel and buffers. - private static class FileAccess extends DataAccess { - private FileChannel fileChannel; - private final long size; - private ByteBuffer buffer; - private long bufferStartInFile = 0; // offset of this.buffer in the file. - - public FileAccess(FileChannel fc, long channelSize, int bufferSizeHint, boolean useDirect) { - fileChannel = fc; - size = channelSize; - final int capacity = size < bufferSizeHint ? (int) size : bufferSizeHint; - buffer = useDirect ? ByteBuffer.allocateDirect(capacity) : ByteBuffer.allocate(capacity); - buffer.flip(); // or .limit(0) to indicate it's empty - } - - @Override - public boolean isEmpty() { - return bufferStartInFile + buffer.position() >= size; - } - - @Override - public long length() { - return size; - } - - @Override - public void reset() throws IOException { - seek(0); - } - - @Override - public void seek(long offset) throws IOException { - if (offset > size) { - throw new IllegalArgumentException(); - } - if (offset < bufferStartInFile + buffer.limit() && offset >= bufferStartInFile) { - buffer.position((int) (offset - bufferStartInFile)); - } else { - // out of current buffer, invalidate it (force re-read) - // XXX or ever re-read it right away? - bufferStartInFile = offset; - buffer.clear(); - buffer.limit(0); // or .flip() to indicate we switch to reading - fileChannel.position(offset); - } - } - - @Override - public void skip(int bytes) throws IOException { - final int newPos = buffer.position() + bytes; - if (newPos >= 0 && newPos < buffer.limit()) { - // no need to move file pointer, just rewind/seek buffer - buffer.position(newPos); - } else { - // - seek(bufferStartInFile + newPos); - } - } - - private boolean fill() throws IOException { - if (!buffer.hasRemaining()) { - bufferStartInFile += buffer.limit(); - buffer.clear(); - if (bufferStartInFile < size) { // just in case there'd be any exception on EOF, not -1 - fileChannel.read(buffer); - // may return -1 when EOF, but empty will reflect this, hence no explicit support here - } - buffer.flip(); - } - return buffer.hasRemaining(); - } - - @Override - public void readBytes(byte[] buf, int offset, int length) throws IOException { - if (!buffer.hasRemaining()) { - fill(); - } - while (length > 0) { - int tail = buffer.remaining(); - if (tail == 0) { - throw new IOException(); // shall not happen provided stream contains expected data and no attempts to read past isEmpty() == true are made. - } - if (tail >= length) { - buffer.get(buf, offset, length); - } else { - buffer.get(buf, offset, tail); - fill(); - } - offset += tail; - length -= tail; - } - } - - @Override - public byte readByte() throws IOException { - if (buffer.hasRemaining()) { - return buffer.get(); - } - if (fill()) { - return buffer.get(); - } - throw new IOException(); - } - - @Override - public void done() { - if (buffer != null) { - buffer = null; - } - if (fileChannel != null) { - try { - fileChannel.close(); - } catch (IOException ex) { - ex.printStackTrace(); // log debug - } - fileChannel = null; - } - } - } -}
--- a/src/com/tmate/hgkit/fs/FilterDataAccess.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.IOException; - -/** - * XXX Perhaps, DataAccessSlice? Unlike FilterInputStream, we limit amount of data read from DataAccess being filtered. - * - * @author artem - */ -public class FilterDataAccess extends DataAccess { - private final DataAccess dataAccess; - private final long offset; - private final int length; - private int count; - - public FilterDataAccess(DataAccess dataAccess, long offset, int length) { - this.dataAccess = dataAccess; - this.offset = offset; - this.length = length; - count = length; - } - - protected int available() { - return count; - } - - @Override - public void reset() throws IOException { - count = length; - } - - @Override - public boolean isEmpty() { - return count <= 0; - } - - @Override - public long length() { - return length; - } - - @Override - public void seek(long localOffset) throws IOException { - if (localOffset < 0 || localOffset > length) { - throw new IllegalArgumentException(); - } - dataAccess.seek(offset + localOffset); - count = (int) (length - localOffset); - } - - @Override - public void skip(int bytes) throws IOException { - int newCount = count - bytes; - if (newCount < 0 || newCount > length) { - throw new IllegalArgumentException(); - } - seek(length - newCount); - /* - can't use next code because don't want to rewind backing DataAccess on reset() - i.e. this.reset() modifies state of this instance only, while filtered DA may go further. - Only actual this.skip/seek/read would rewind it to desired position - dataAccess.skip(bytes); - count = newCount; - */ - - } - - @Override - public byte readByte() throws IOException { - if (count <= 0) { - throw new IllegalArgumentException("Underflow"); // XXX be descriptive - } - if (count == length) { - dataAccess.seek(offset); - } - count--; - return dataAccess.readByte(); - } - - @Override - public void readBytes(byte[] b, int off, int len) throws IOException { - if (count <= 0 || len > count) { - throw new IllegalArgumentException("Underflow"); // XXX be descriptive - } - if (count == length) { - dataAccess.seek(offset); - } - dataAccess.readBytes(b, off, len); - count -= len; - } - - // done shall be no-op, as we have no idea what's going on with DataAccess we filter -}
--- a/src/com/tmate/hgkit/fs/InflaterDataAccess.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,145 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.EOFException; -import java.io.IOException; -import java.util.zip.DataFormatException; -import java.util.zip.Inflater; -import java.util.zip.ZipException; - -/** - * DataAccess counterpart for InflaterInputStream. - * XXX is it really needed to be subclass of FilterDataAccess? - * @author artem - */ -public class InflaterDataAccess extends FilterDataAccess { - - private final Inflater inflater; - private final byte[] buffer; - private final byte[] singleByte = new byte[1]; - private int decompressedPos = 0; - private int decompressedLength = -1; - - public InflaterDataAccess(DataAccess dataAccess, long offset, int length) { - this(dataAccess, offset, length, new Inflater(), 512); - } - - public InflaterDataAccess(DataAccess dataAccess, long offset, int length, Inflater inflater, int bufSize) { - super(dataAccess, offset, length); - this.inflater = inflater; - buffer = new byte[bufSize]; - } - - @Override - public void reset() throws IOException { - super.reset(); - inflater.reset(); - decompressedPos = 0; - } - - @Override - protected int available() { - throw new IllegalStateException("Can't tell how much uncompressed data left"); - } - - @Override - public boolean isEmpty() { - return super.available() <= 0 && inflater.finished(); // and/or inflater.getRemaining() <= 0 ? - } - - @Override - public long length() { - if (decompressedLength != -1) { - return decompressedLength; - } - int c = 0; - try { - int oldPos = decompressedPos; - while (!isEmpty()) { - readByte(); - c++; - } - decompressedLength = c + oldPos; - reset(); - seek(oldPos); - return decompressedLength; - } catch (IOException ex) { - ex.printStackTrace(); // FIXME log error - decompressedLength = -1; // better luck next time? - return 0; - } - } - - @Override - public void seek(long localOffset) throws IOException { - System.out.println("Seek: " + localOffset); - if (localOffset < 0 /* || localOffset >= length() */) { - throw new IllegalArgumentException(); - } - if (localOffset >= decompressedPos) { - skip((int) (localOffset - decompressedPos)); - } else { - reset(); - skip((int) localOffset); - } - } - - @Override - public void skip(int bytes) throws IOException { - if (bytes < 0) { - bytes += decompressedPos; - if (bytes < 0) { - throw new IOException("Underflow. Rewind past start of the slice."); - } - reset(); - // fall-through - } - while (!isEmpty() && bytes > 0) { - readByte(); - bytes--; - } - if (bytes != 0) { - throw new IOException("Underflow. Rewind past end of the slice"); - } - } - - @Override - public byte readByte() throws IOException { - readBytes(singleByte, 0, 1); - return singleByte[0]; - } - - @Override - public void readBytes(byte[] b, int off, int len) throws IOException { - try { - int n; - while (len > 0) { - while ((n = inflater.inflate(b, off, len)) == 0) { - if (inflater.finished() || inflater.needsDictionary()) { - throw new EOFException(); - } - if (inflater.needsInput()) { - // fill: - int toRead = super.available(); - if (toRead > buffer.length) { - toRead = buffer.length; - } - super.readBytes(buffer, 0, toRead); - inflater.setInput(buffer, 0, toRead); - } - } - off += n; - len -= n; - decompressedPos += n; - if (len == 0) { - return; // filled - } - } - } catch (DataFormatException e) { - String s = e.getMessage(); - throw new ZipException(s != null ? s : "Invalid ZLIB data format"); - } - } -}
--- a/src/com/tmate/hgkit/fs/RepositoryLookup.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,119 +0,0 @@ -/** - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.fs; - -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - -import com.tmate.hgkit.ll.HgRepository; -import com.tmate.hgkit.ll.LocalHgRepo; - -/** - * @author artem - */ -public class RepositoryLookup { - - public HgRepository detect(Options opts) throws Exception { - if (opts.repoLocation != null) { - return detect(opts.repoLocation); - } - return detectFromWorkingDir(); - } - - public HgRepository detect(String[] commandLineArgs) throws Exception { - return detect(Options.parse(commandLineArgs)); - } - - public HgRepository detectFromWorkingDir() throws Exception { - return detect(System.getProperty("user.dir")); - } - - public HgRepository detect(String location) throws Exception /*FIXME Exception type, RepoInitException? */ { - File dir = new File(location); - File repository; - do { - repository = new File(dir, ".hg"); - if (repository.exists() && repository.isDirectory()) { - break; - } - repository = null; - dir = dir.getParentFile(); - - } while(dir != null); - if (repository == null) { - return new LocalHgRepo(location); - } - return new LocalHgRepo(repository); - } - - public static class Options { - - public String repoLocation; - public List<String> files; - public int limit = -1; - public Set<String> users; - public Set<String> branches; - - public static Options parse(String[] commandLineArgs) { - Options rv = new Options(); - List<String> args = Arrays.asList(commandLineArgs); - LinkedList<String> files = new LinkedList<String>(); - for (Iterator<String> it = args.iterator(); it.hasNext(); ) { - String arg = it.next(); - if (arg.charAt(0) == '-') { - // option - if (arg.length() == 1) { - throw new IllegalArgumentException("Bad option: -"); - } - switch ((int) arg.charAt(1)) { - case (int) 'R' : { - if (! it.hasNext()) { - throw new IllegalArgumentException("Need repo location"); - } - rv.repoLocation = it.next(); - break; - } - case (int) 'l' : { - if (!it.hasNext()) { - throw new IllegalArgumentException(); - } - rv.limit = Integer.parseInt(it.next()); - break; - } - case (int) 'u' : { - if (rv.users == null) { - rv.users = new LinkedHashSet<String>(); - } - rv.users.add(it.next()); - break; - } - case (int) 'b' : { - if (rv.branches == null) { - rv.branches = new LinkedHashSet<String>(); - } - rv.branches.add(it.next()); - break; - } - } - } else { - // filename - files.add(arg); - } - } - if (!files.isEmpty()) { - rv.files = new ArrayList<String>(files); - } else { - rv.files = Collections.emptyList(); - } - return rv; - } - } -}
--- a/src/com/tmate/hgkit/fs/package.html Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -<html> -<boody> -File System operations -</bidy> -</html> \ No newline at end of file
--- a/src/com/tmate/hgkit/ll/Changelog.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import com.tmate.hgkit.fs.DataAccess; - -/** - * Representation of the Mercurial changelog file (list of ChangeSets) - * @author artem - */ -public class Changelog extends Revlog { - - /*package-local*/ Changelog(HgRepository hgRepo, RevlogStream content) { - super(hgRepo, content); - } - - public void all(final Changeset.Inspector inspector) { - range(0, content.revisionCount() - 1, inspector); - } - - public void range(int start, int end, final Changeset.Inspector inspector) { - Revlog.Inspector i = new Revlog.Inspector() { - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { - byte[] data = da.byteArray(); - Changeset cset = Changeset.parse(data, 0, data.length); - // XXX there's no guarantee for Changeset.Callback that distinct instance comes each time, consider instance reuse - inspector.next(revisionNumber, Nodeid.fromBinary(nodeid, 0), cset); - } - }; - content.iterate(start, end, true, i); - } - - public List<Changeset> range(int start, int end) { - final ArrayList<Changeset> rv = new ArrayList<Changeset>(end - start + 1); - Revlog.Inspector i = new Revlog.Inspector() { - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { - byte[] data = da.byteArray(); - Changeset cset = Changeset.parse(data, 0, data.length); - rv.add(cset); - } - }; - content.iterate(start, end, true, i); - return rv; - } - - public void range(final Changeset.Inspector inspector, final int... revisions) { - if (revisions == null || revisions.length == 0) { - return; - } - Revlog.Inspector i = new Revlog.Inspector() { - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { - if (Arrays.binarySearch(revisions, revisionNumber) >= 0) { - byte[] data = da.byteArray(); - Changeset cset = Changeset.parse(data, 0, data.length); - inspector.next(revisionNumber, Nodeid.fromBinary(nodeid, 0), cset); - } - } - }; - Arrays.sort(revisions); - content.iterate(revisions[0], revisions[revisions.length - 1], true, i); - } -}
--- a/src/com/tmate/hgkit/ll/Changeset.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,202 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.UnsupportedEncodingException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.Formatter; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -/** - * @see mercurial/changelog.py:read() - * <pre> - format used: - nodeid\n : manifest node in ascii - user\n : user, no \n or \r allowed - time tz extra\n : date (time is int or float, timezone is int) - : extra is metadatas, encoded and separated by '\0' - : older versions ignore it - files\n\n : files modified by the cset, no \n or \r allowed - (.*) : comment (free text, ideally utf-8) - - changelog v0 doesn't use extra - * </pre> - * @author artem - */ -public class Changeset implements Cloneable /*for those that would like to keep a copy*/ { - // TODO immutable - private /*final*/ Nodeid manifest; - private String user; - private String comment; - private List<String> files; // unmodifiable collection (otherwise #files() and implicit #clone() shall be revised) - private Date time; - private int timezone; // not sure it's of any use - private Map<String,String> extras; - - private Changeset() { - } - - public Nodeid manifest() { - return manifest; - } - - public String user() { - return user; - } - - public String comment() { - return comment; - } - - public List<String> files() { - return files; - } - - public Date date() { - return time; - } - - public String dateString() { - StringBuilder sb = new StringBuilder(30); - Formatter f = new Formatter(sb, Locale.US); - f.format("%ta %<tb %<td %<tH:%<tM:%<tS %<tY %<tz", time); - return sb.toString(); - } - - public Map<String, String> extras() { - return extras; - } - - public String branch() { - return extras.get("branch"); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Changeset {"); - sb.append("User: ").append(user).append(", "); - sb.append("Comment: ").append(comment).append(", "); - sb.append("Manifest: ").append(manifest).append(", "); - sb.append("Date: ").append(time).append(", "); - sb.append("Files: ").append(files.size()); - for (String s : files) { - sb.append(", ").append(s); - } - if (extras != null) { - sb.append(", Extra: ").append(extras); - } - sb.append("}"); - return sb.toString(); - } - - public static Changeset parse(byte[] data, int offset, int length) { - Changeset rv = new Changeset(); - rv.init(data, offset, length); - return rv; - } - - /*package-local*/ void init(byte[] data, int offset, int length) { - final int bufferEndIndex = offset + length; - final byte lineBreak = (byte) '\n'; - int breakIndex1 = indexOf(data, lineBreak, offset, bufferEndIndex); - if (breakIndex1 == -1) { - throw new IllegalArgumentException("Bad Changeset data"); - } - Nodeid _nodeid = Nodeid.fromAscii(data, 0, breakIndex1); - int breakIndex2 = indexOf(data, lineBreak, breakIndex1+1, bufferEndIndex); - if (breakIndex2 == -1) { - throw new IllegalArgumentException("Bad Changeset data"); - } - String _user = new String(data, breakIndex1+1, breakIndex2 - breakIndex1 - 1); - int breakIndex3 = indexOf(data, lineBreak, breakIndex2+1, bufferEndIndex); - if (breakIndex3 == -1) { - throw new IllegalArgumentException("Bad Changeset data"); - } - String _timeString = new String(data, breakIndex2+1, breakIndex3 - breakIndex2 - 1); - int space1 = _timeString.indexOf(' '); - if (space1 == -1) { - throw new IllegalArgumentException("Bad Changeset data"); - } - int space2 = _timeString.indexOf(' ', space1+1); - if (space2 == -1) { - space2 = _timeString.length(); - } - long unixTime = Long.parseLong(_timeString.substring(0, space1)); // XXX Float, perhaps - int _timezone = Integer.parseInt(_timeString.substring(space1+1, space2)); - // XXX not sure need to add timezone here - I can't figure out whether Hg keeps GMT time, and records timezone just for info, or unixTime is taken local - // on commit and timezone is recorded to adjust it to UTC. - Date _time = new Date(unixTime * 1000); - String _extras = space2 < _timeString.length() ? _timeString.substring(space2+1) : null; - Map<String, String> _extrasMap; - if (_extras == null) { - _extrasMap = Collections.singletonMap("branch", "default"); - } else { - _extrasMap = new HashMap<String, String>(); - for (String pair : _extras.split("\00")) { - int eq = pair.indexOf('='); - // FIXME need to decode key/value, @see changelog.py:decodeextra - _extrasMap.put(pair.substring(0, eq), pair.substring(eq+1)); - } - if (!_extrasMap.containsKey("branch")) { - _extrasMap.put("branch", "default"); - } - _extrasMap = Collections.unmodifiableMap(_extrasMap); - } - - // - int lastStart = breakIndex3 + 1; - int breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex); - ArrayList<String> _files = new ArrayList<String>(5); - while (breakIndex4 != -1 && breakIndex4 + 1 < bufferEndIndex) { - _files.add(new String(data, lastStart, breakIndex4 - lastStart)); - lastStart = breakIndex4 + 1; - if (data[breakIndex4 + 1] == lineBreak) { - // found \n\n - break; - } else { - breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex); - } - } - if (breakIndex4 == -1 || breakIndex4 >= bufferEndIndex) { - throw new IllegalArgumentException("Bad Changeset data"); - } - String _comment; - try { - _comment = new String(data, breakIndex4+2, bufferEndIndex - breakIndex4 - 2, "UTF-8"); - } catch (UnsupportedEncodingException ex) { - _comment = ""; - throw new IllegalStateException("Could hardly happen"); - } - // change this instance at once, don't leave it partially changes in case of error - this.manifest = _nodeid; - this.user = _user; - this.time = _time; - this.timezone = _timezone; - this.files = Collections.unmodifiableList(_files); - this.comment = _comment; - this.extras = _extrasMap; - } - - private static int indexOf(byte[] src, byte what, int startOffset, int endIndex) { - for (int i = startOffset; i < endIndex; i++) { - if (src[i] == what) { - return i; - } - } - return -1; - } - - public interface Inspector { - // first(), last(), single(). - // <T> - // TODO describe whether cset is new instance each time - void next(int revisionNumber, Nodeid nodeid, Changeset cset); - } -}
--- a/src/com/tmate/hgkit/ll/DigestHelper.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.IOException; -import java.io.InputStream; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; - -/** - * <pre> - * DigestHelper dh; - * dh.sha1(...).asHexString(); - * or - * dh = dh.sha1(...); - * nodeid.equalsTo(dh.asBinary()); - * </pre> - * @author artem - */ -public class DigestHelper { - private MessageDigest sha1; - private byte[] digest; - - public DigestHelper() { - } - - private MessageDigest getSHA1() { - if (sha1 == null) { - try { - sha1 = MessageDigest.getInstance("SHA-1"); - } catch (NoSuchAlgorithmException ex) { - // could hardly happen, JDK from Sun always has sha1. - ex.printStackTrace(); // FIXME log error - } - } - return sha1; - } - - - public DigestHelper sha1(Nodeid nodeid1, Nodeid nodeid2, byte[] data) { - return sha1(nodeid1.cloneData(), nodeid2.cloneData(), data); - } - - // sha1_digest(min(p1,p2) ++ max(p1,p2) ++ final_text) - public DigestHelper sha1(byte[] nodeidParent1, byte[] nodeidParent2, byte[] data) { - MessageDigest alg = getSHA1(); - if ((nodeidParent1[0] & 0x00FF) < (nodeidParent2[0] & 0x00FF)) { - alg.update(nodeidParent1); - alg.update(nodeidParent2); - } else { - alg.update(nodeidParent2); - alg.update(nodeidParent1); - } - digest = alg.digest(data); - assert digest.length == 20; - return this; - } - - public String asHexString() { - if (digest == null) { - throw new IllegalStateException("Shall init with sha1() call first"); - } - return toHexString(digest, 0, digest.length); - } - - // by reference, be careful not to modify (or #clone() if needed) - public byte[] asBinary() { - if (digest == null) { - throw new IllegalStateException("Shall init with sha1() call first"); - } - return digest; - } - - // XXX perhaps, digest functions should throw an exception, as it's caller responsibility to deal with eof, etc - public DigestHelper sha1(InputStream is /*ByteBuffer*/) throws IOException { - MessageDigest alg = getSHA1(); - byte[] buf = new byte[1024]; - int c; - while ((c = is.read(buf)) != -1) { - alg.update(buf, 0, c); - } - digest = alg.digest(); - return this; - } - - public static String toHexString(byte[] data, final int offset, final int count) { - char[] result = new char[count << 1]; - final String hexDigits = "0123456789abcdef"; - final int end = offset+count; - for (int i = offset, j = 0; i < end; i++) { - result[j++] = hexDigits.charAt((data[i] >>> 4) & 0x0F); - result[j++] = hexDigits.charAt(data[i] & 0x0F); - } - return new String(result); - } -}
--- a/src/com/tmate/hgkit/ll/HgBundle.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.File; -import java.io.IOException; -import java.util.LinkedList; -import java.util.List; - -import com.tmate.hgkit.fs.ByteArrayDataAccess; -import com.tmate.hgkit.fs.DataAccess; -import com.tmate.hgkit.fs.DataAccessProvider; - -/** - * @see http://mercurial.selenic.com/wiki/BundleFormat - * - * @author artem - */ -public class HgBundle { - - private final File bundleFile; - private final DataAccessProvider accessProvider; - - public HgBundle(DataAccessProvider dap, File bundle) { - accessProvider = dap; - bundleFile = bundle; - } - - public void changes(HgRepository hgRepo) throws IOException { - DataAccess da = accessProvider.create(bundleFile); - DigestHelper dh = new DigestHelper(); - try { - List<GroupElement> changelogGroup = readGroup(da); - if (changelogGroup.isEmpty()) { - throw new IllegalStateException("No changelog group in the bundle"); // XXX perhaps, just be silent and/or log? - } - // XXX in fact, bundle not necessarily starts with the first revision missing in hgRepo - // need to 'scroll' till the last one common. - final Nodeid base = changelogGroup.get(0).firstParent(); - if (!hgRepo.getChangelog().isKnown(base)) { - throw new IllegalArgumentException("unknown parent"); - } - // BundleFormat wiki says: - // Each Changelog entry patches the result of all previous patches - // (the previous, or parent patch of a given patch p is the patch that has a node equal to p's p1 field) - byte[] baseRevContent = hgRepo.getChangelog().content(base); - for (GroupElement ge : changelogGroup) { - byte[] csetContent = RevlogStream.apply(new ByteArrayDataAccess(baseRevContent), -1, ge.patches); - dh = dh.sha1(ge.firstParent(), ge.secondParent(), csetContent); // XXX ge may give me access to byte[] content of nodeid directly, perhaps, I don't need DH to be friend of Nodeid? - if (!ge.node().equalsTo(dh.asBinary())) { - throw new IllegalStateException("Integrity check failed on " + bundleFile + ", node:" + ge.node()); - } - Changeset cs = Changeset.parse(csetContent, 0, csetContent.length); - System.out.println(cs.toString()); - baseRevContent = csetContent; - } - } finally { - da.done(); - } - } - - public void dump() throws IOException { - DataAccess da = accessProvider.create(bundleFile); - try { - LinkedList<String> names = new LinkedList<String>(); - if (!da.isEmpty()) { - System.out.println("Changelog group"); - List<GroupElement> changelogGroup = readGroup(da); - for (GroupElement ge : changelogGroup) { - System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); - } - System.out.println("Manifest group"); - List<GroupElement> manifestGroup = readGroup(da); - for (GroupElement ge : manifestGroup) { - System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); - } - while (!da.isEmpty()) { - int fnameLen = da.readInt(); - if (fnameLen <= 4) { - break; // null chunk, the last one. - } - byte[] fname = new byte[fnameLen - 4]; - da.readBytes(fname, 0, fname.length); - names.add(new String(fname)); - List<GroupElement> fileGroup = readGroup(da); - System.out.println(names.getLast()); - for (GroupElement ge : fileGroup) { - System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); - } - } - } - System.out.println(names.size()); - for (String s : names) { - System.out.println(s); - } - } finally { - da.done(); - } - } - - private static List<GroupElement> readGroup(DataAccess da) throws IOException { - int len = da.readInt(); - LinkedList<GroupElement> rv = new LinkedList<HgBundle.GroupElement>(); - while (len > 4 && !da.isEmpty()) { - byte[] nb = new byte[80]; - da.readBytes(nb, 0, 80); - int dataLength = len-84; - LinkedList<RevlogStream.PatchRecord> patches = new LinkedList<RevlogStream.PatchRecord>(); - while (dataLength > 0) { - RevlogStream.PatchRecord pr = RevlogStream.PatchRecord.read(da); - patches.add(pr); - dataLength -= pr.len + 12; - } - rv.add(new GroupElement(nb, patches)); - len = da.isEmpty() ? 0 : da.readInt(); - } - return rv; - } - - static class GroupElement { - private byte[] header; // byte[80] takes 120 bytes, 4 Nodeids - 192 - private List<RevlogStream.PatchRecord> patches; - - GroupElement(byte[] fourNodeids, List<RevlogStream.PatchRecord> patchList) { - assert fourNodeids != null && fourNodeids.length == 80; - // patchList.size() > 0 - header = fourNodeids; - patches = patchList; - } - public Nodeid node() { - return Nodeid.fromBinary(header, 0); - } - public Nodeid firstParent() { - return Nodeid.fromBinary(header, 20); - } - public Nodeid secondParent() { - return Nodeid.fromBinary(header, 40); - } - public Nodeid cset() { // cs seems to be changeset - return Nodeid.fromBinary(header, 60); - } - } -}
--- a/src/com/tmate/hgkit/ll/HgDataFile.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.HgRepository.TIP; - -import java.util.Arrays; - -import com.tmate.hgkit.fs.DataAccess; - -/** - * Extends Revlog/uses RevlogStream? - * ? name:HgFileNode? - * @author artem - */ -public class HgDataFile extends Revlog { - - // absolute from repo root? - // slashes, unix-style? - // repo location agnostic, just to give info to user, not to access real storage - private final String path; - - /*package-local*/HgDataFile(HgRepository hgRepo, String path, RevlogStream content) { - super(hgRepo, content); - this.path = path; - } - - public boolean exists() { - return content != null; // XXX need better impl - } - - public String getPath() { - return path; // hgRepo.backresolve(this) -> name? - } - - public int length(Nodeid nodeid) { - return content.dataLength(getLocalRevisionNumber(nodeid)); - } - - public byte[] content() { - return content(TIP); - } - - public void history(Changeset.Inspector inspector) { - history(0, content.revisionCount() - 1, inspector); - } - - public void history(int start, int end, Changeset.Inspector inspector) { - if (!exists()) { - throw new IllegalStateException("Can't get history of invalid repository file node"); - } - final int[] commitRevisions = new int[end - start + 1]; - Revlog.Inspector insp = new Revlog.Inspector() { - int count = 0; - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { - commitRevisions[count++] = linkRevision; - } - }; - content.iterate(start, end, false, insp); - getRepo().getChangelog().range(inspector, commitRevisions); - } - - /** - * XXX perhaps, return value Nodeid[2] and boolean needNodeids is better (and higher level) API for this query? - * - * @param revision - revision to query parents, or {@link HgRepository#TIP} - * @param parentRevisions - int[2] to get local revision numbers of parents (e.g. {6, -1}) - * @param parent1 - byte[20] or null, if parent's nodeid is not needed - * @param parent2 - byte[20] or null, if second parent's nodeid is not needed - * @return - */ - public void parents(int revision, int[] parentRevisions, byte[] parent1, byte[] parent2) { - if (revision != TIP && !(revision >= 0 && revision < content.revisionCount())) { - throw new IllegalArgumentException(String.valueOf(revision)); - } - if (parentRevisions == null || parentRevisions.length < 2) { - throw new IllegalArgumentException(String.valueOf(parentRevisions)); - } - if (parent1 != null && parent1.length < 20) { - throw new IllegalArgumentException(parent1.toString()); - } - if (parent2 != null && parent2.length < 20) { - throw new IllegalArgumentException(parent2.toString()); - } - class ParentCollector implements Revlog.Inspector { - public int p1 = -1; - public int p2 = -1; - public byte[] nodeid; - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { - p1 = parent1Revision; - p2 = parent2Revision; - this.nodeid = new byte[20]; - // nodeid arg now comes in 32 byte from (as in file format description), however upper 12 bytes are zeros. - System.arraycopy(nodeid, nodeid.length > 20 ? nodeid.length - 20 : 0, this.nodeid, 0, 20); - } - }; - ParentCollector pc = new ParentCollector(); - content.iterate(revision, revision, false, pc); - parentRevisions[0] = pc.p1; - parentRevisions[1] = pc.p2; - if (parent1 != null) { - if (parentRevisions[0] == -1) { - Arrays.fill(parent1, 0, 20, (byte) 0); - } else { - content.iterate(parentRevisions[0], parentRevisions[0], false, pc); - System.arraycopy(pc.nodeid, 0, parent1, 0, 20); - } - } - if (parent2 != null) { - if (parentRevisions[1] == -1) { - Arrays.fill(parent2, 0, 20, (byte) 0); - } else { - content.iterate(parentRevisions[1], parentRevisions[1], false, pc); - System.arraycopy(pc.nodeid, 0, parent2, 0, 20); - } - } - } -}
--- a/src/com/tmate/hgkit/ll/HgDirstate.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,158 +0,0 @@ -/* - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.File; -import java.io.IOException; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.TreeSet; - -import com.tmate.hgkit.fs.DataAccess; -import com.tmate.hgkit.fs.DataAccessProvider; - -/** - * @see http://mercurial.selenic.com/wiki/DirState - * @see http://mercurial.selenic.com/wiki/FileFormats#dirstate - * @author artem - */ -public class HgDirstate { - - private final LocalHgRepo repo; - private final File dirstateFile; - private Map<String, Record> normal; - private Map<String, Record> added; - private Map<String, Record> removed; - private Map<String, Record> merged; - - public HgDirstate(LocalHgRepo hgRepo, File dirstate) { - this.repo = hgRepo; - this.dirstateFile = dirstate; - } - - private void read() { - normal = added = removed = merged = Collections.<String, Record>emptyMap(); - if (!dirstateFile.exists()) { - return; - } - DataAccessProvider dap = repo.getDataAccess(); - DataAccess da = dap.create(dirstateFile); - if (da.isEmpty()) { - return; - } - // not sure linked is really needed here, just for ease of debug - normal = new LinkedHashMap<String, Record>(); - added = new LinkedHashMap<String, Record>(); - removed = new LinkedHashMap<String, Record>(); - merged = new LinkedHashMap<String, Record>(); - try { - // XXX skip(40) if we don't need these? - byte[] parents = new byte[40]; - da.readBytes(parents, 0, 40); - parents = null; - do { - final byte state = da.readByte(); - final int fmode = da.readInt(); - final int size = da.readInt(); - final int time = da.readInt(); - final int nameLen = da.readInt(); - String fn1 = null, fn2 = null; - byte[] name = new byte[nameLen]; - da.readBytes(name, 0, nameLen); - for (int i = 0; i < nameLen; i++) { - if (name[i] == 0) { - fn1 = new String(name, 0, i, "UTF-8"); // XXX unclear from documentation what encoding is used there - fn2 = new String(name, i+1, nameLen - i - 1, "UTF-8"); // need to check with different system codepages - break; - } - } - if (fn1 == null) { - fn1 = new String(name); - } - Record r = new Record(fmode, size, time, fn1, fn2); - if (state == 'n') { - normal.put(r.name1, r); - } else if (state == 'a') { - added.put(r.name1, r); - } else if (state == 'r') { - removed.put(r.name1, r); - } else if (state == 'm') { - merged.put(r.name1, r); - } else { - // FIXME log error? - } - } while (!da.isEmpty()); - } catch (IOException ex) { - ex.printStackTrace(); // FIXME log error, clean dirstate? - } finally { - da.done(); - } - } - - // new, modifiable collection - /*package-local*/ TreeSet<String> all() { - read(); - TreeSet<String> rv = new TreeSet<String>(); - @SuppressWarnings("unchecked") - Map<String, Record>[] all = new Map[] { normal, added, removed, merged }; - for (int i = 0; i < all.length; i++) { - for (Record r : all[i].values()) { - rv.add(r.name1); - } - } - return rv; - } - - /*package-local*/ Record checkNormal(String fname) { - return normal.get(fname); - } - - /*package-local*/ Record checkAdded(String fname) { - return added.get(fname); - } - /*package-local*/ Record checkRemoved(String fname) { - return removed.get(fname); - } - /*package-local*/ Record checkMerged(String fname) { - return merged.get(fname); - } - - - - - public void dump() { - read(); - @SuppressWarnings("unchecked") - Map<String, Record>[] all = new Map[] { normal, added, removed, merged }; - char[] x = new char[] {'n', 'a', 'r', 'm' }; - for (int i = 0; i < all.length; i++) { - for (Record r : all[i].values()) { - System.out.printf("%c %3o%6d %30tc\t\t%s", x[i], r.mode, r.size, (long) r.time * 1000, r.name1); - if (r.name2 != null) { - System.out.printf(" --> %s", r.name2); - } - System.out.println(); - } - System.out.println(); - } - } - - /*package-local*/ static class Record { - final int mode; - final int size; - final int time; - final String name1; - final String name2; - - public Record(int fmode, int fsize, int ftime, String name1, String name2) { - mode = fmode; - size = fsize; - time = ftime; - this.name1 = name1; - this.name2 = name2; - - } - } -}
--- a/src/com/tmate/hgkit/ll/HgIgnore.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.util.Collections; -import java.util.Set; -import java.util.TreeSet; - -/** - * - * @author artem - */ -public class HgIgnore { - - private final LocalHgRepo repo; - private Set<String> entries; - - public HgIgnore(LocalHgRepo localRepo) { - this.repo = localRepo; - } - - private void read() { - entries = Collections.emptySet(); - File hgignoreFile = new File(repo.getRepositoryRoot().getParentFile(), ".hgignore"); - if (!hgignoreFile.exists()) { - return; - } - entries = new TreeSet<String>(); - try { - BufferedReader fr = new BufferedReader(new FileReader(hgignoreFile)); - String line; - while ((line = fr.readLine()) != null) { - // FIXME need to detect syntax:glob and other parameters - entries.add(line.trim()); // shall I account for local paths in the file (i.e. back-slashed on windows)? - } - } catch (IOException ex) { - ex.printStackTrace(); // log warn - } - } - - public void reset() { - // FIXME does anyone really need to clear HgIgnore? Perhaps, repo may return new instance each time, - // which is used throughout invocation and then discarded? - entries = null; - } - - public boolean isIgnored(String path) { - if (entries == null) { - read(); - } - if (entries.contains(path)) { - // easy part - return true; - } - // substrings are memory-friendly - int x = 0, i = path.indexOf('/', 0); - while (i != -1) { - if (entries.contains(path.substring(x, i))) { - return true; - } - // try one with ending slash - if (entries.contains(path.substring(x, i+1))) { // even if i is last index, i+1 is safe here - return true; - } - x = i+1; - i = path.indexOf('/', x); - } - return false; - } -}
--- a/src/com/tmate/hgkit/ll/HgManifest.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import com.tmate.hgkit.fs.DataAccess; - -/** - * - * @author artem - */ -public class HgManifest extends Revlog { - - /*package-local*/ HgManifest(HgRepository hgRepo, RevlogStream content) { - super(hgRepo, content); - } - - public void walk(int start, int end, final Inspector inspector) { - Revlog.Inspector insp = new Revlog.Inspector() { - - private boolean gtg = true; // good to go - - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { - if (!gtg) { - return; - } - gtg = gtg && inspector.begin(revisionNumber, new Nodeid(nodeid, true)); - int i; - String fname = null; - String flags = null; - Nodeid nid = null; - byte[] data = da.byteArray(); - for (i = 0; gtg && i < actualLen; i++) { - int x = i; - for( ; data[i] != '\n' && i < actualLen; i++) { - if (fname == null && data[i] == 0) { - fname = new String(data, x, i - x); - x = i+1; - } - } - if (i < actualLen) { - assert data[i] == '\n'; - int nodeidLen = i - x < 40 ? i-x : 40; - nid = Nodeid.fromAscii(data, x, nodeidLen); - if (nodeidLen + x < i) { - // 'x' and 'l' for executable bits and symlinks? - // hg --debug manifest shows 644 for each regular file in my repo - flags = new String(data, x + nodeidLen, i-x-nodeidLen); - } - gtg = gtg && inspector.next(nid, fname, flags); - } - nid = null; - fname = flags = null; - } - gtg = gtg && inspector.end(revisionNumber); - } - }; - content.iterate(start, end, true, insp); - } - - public interface Inspector { - boolean begin(int revision, Nodeid nid); - boolean next(Nodeid nid, String fname, String flags); - boolean end(int revision); - } -}
--- a/src/com/tmate/hgkit/ll/HgRepository.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - - -/** - * @author artem - */ -public abstract class HgRepository { - - public static final int TIP = -1; - - // temp aux marker method - public static IllegalStateException notImplemented() { - return new IllegalStateException("Not implemented"); - } - - private Changelog changelog; - private HgManifest manifest; - private HgTags tags; - - private boolean isInvalid = true; - - public boolean isInvalid() { - return this.isInvalid; - } - - protected void setInvalid(boolean invalid) { - isInvalid = invalid; - } - - public final Changelog getChangelog() { - if (this.changelog == null) { - // might want delegate to protected createChangelog() some day - RevlogStream content = resolve(toStoragePath("00changelog.i", false)); // XXX perhaps, knowledge about filenames should be in LocalHgRepo? - this.changelog = new Changelog(this, content); - } - return this.changelog; - } - - public final HgManifest getManifest() { - if (this.manifest == null) { - RevlogStream content = resolve(toStoragePath("00manifest.i", false)); - this.manifest = new HgManifest(this, content); - } - return this.manifest; - } - - public final HgTags getTags() { - if (tags == null) { - tags = createTags(); - } - return tags; - } - - protected abstract HgTags createTags(); - - public abstract HgDataFile getFileNode(String path); - - public abstract String getLocation(); - - - protected abstract String toStoragePath(String path, boolean isData); - - /** - * Perhaps, should be separate interface, like ContentLookup - */ - protected abstract RevlogStream resolve(String repositoryPath); - - public abstract void status(int rev1, int rev2 /*WorkingDir - TIP, TIP?*/, StatusInspector inspector); - - public interface StatusInspector { - void modified(String fname); - void added(String fname); - void copied(String fnameOrigin, String fnameAdded); // if copied files of no interest, should delegate to self.added(fnameAdded); - void removed(String fname); - void clean(String fname); - void missing(String fname); // aka deleted (tracked by Hg, but not available in FS any more - void unknown(String fname); // not tracked - void ignored(String fname); - } -}
--- a/src/com/tmate/hgkit/ll/HgTags.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.util.Collections; -import java.util.List; - -/** - * FIXME Place-holder, implement - * @author artem - */ -public class HgTags { - - public List<String> tags(Nodeid nid) { - return Collections.emptyList(); - } - - public boolean isTagged(Nodeid nid) { - // TODO implement - return false; - } -}
--- a/src/com/tmate/hgkit/ll/LocalHgRepo.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,439 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.BufferedInputStream; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.lang.ref.SoftReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.TreeSet; - -import com.tmate.hgkit.fs.DataAccessProvider; - -/** - * @author artem - */ -public class LocalHgRepo extends HgRepository { - - private File repoDir; // .hg folder - private final String repoLocation; - private final DataAccessProvider dataAccess; - - public LocalHgRepo(String repositoryPath) { - setInvalid(true); - repoLocation = repositoryPath; - dataAccess = null; - } - - public LocalHgRepo(File repositoryRoot) throws IOException { - assert ".hg".equals(repositoryRoot.getName()) && repositoryRoot.isDirectory(); - setInvalid(false); - repoDir = repositoryRoot; - repoLocation = repositoryRoot.getParentFile().getCanonicalPath(); - dataAccess = new DataAccessProvider(); - parseRequires(); - } - - @Override - public String getLocation() { - return repoLocation; - } - - @Override - public void status(int rev1, int rev2, final StatusInspector inspector) { - final ManifestRevisionCollector collect = new ManifestRevisionCollector(); - getManifest().walk(rev1, rev1, collect); - - HgManifest.Inspector compare = new HgManifest.Inspector() { - - public boolean begin(int revision, Nodeid nid) { - return true; - } - - public boolean next(Nodeid nid, String fname, String flags) { - Nodeid nidR1 = collect.idsMap.remove(fname); - String flagsR1 = collect.flagsMap.remove(fname); - if (nidR1 == null) { - inspector.added(fname); - } else { - if (nidR1.equals(nid) && ((flags == null && flagsR1 == null) || flags.equals(flagsR1))) { - inspector.clean(fname); - } else { - inspector.modified(fname); - } - } - return true; - } - - public boolean end(int revision) { - for (String fname : collect.idsMap.keySet()) { - inspector.removed(fname); - } - if (collect.idsMap.size() != collect.flagsMap.size()) { - throw new IllegalStateException(); - } - return false; - } - }; - getManifest().walk(rev2, rev2, compare); - } - - public void statusLocal(int baseRevision, StatusInspector inspector) { - LinkedList<File> folders = new LinkedList<File>(); - final File rootDir = repoDir.getParentFile(); - folders.add(rootDir); - final HgDirstate dirstate = loadDirstate(); - final HgIgnore hgignore = loadIgnore(); - TreeSet<String> knownEntries = dirstate.all(); - final boolean isTipBase = baseRevision == TIP || baseRevision == getManifest().getRevisionCount(); - final ManifestRevisionCollector collect = isTipBase ? null : new ManifestRevisionCollector(); - if (!isTipBase) { - getManifest().walk(baseRevision, baseRevision, collect); - } - do { - File d = folders.removeFirst(); - for (File f : d.listFiles()) { - if (f.isDirectory()) { - if (!".hg".equals(f.getName())) { - folders.addLast(f); - } - } else { - // FIXME path relative to rootDir - need more robust approach - String fname = normalize(f.getPath().substring(rootDir.getPath().length() + 1)); - if (hgignore.isIgnored(fname)) { - inspector.ignored(fname); - } else { - if (knownEntries.remove(fname)) { - // modified, added, removed, clean - if (collect != null) { // need to check against base revision, not FS file - checkLocalStatusAgainstBaseRevision(collect, fname, f, dirstate, inspector); - } else { - checkLocalStatusAgainstFile(fname, f, dirstate, inspector); - } - } else { - inspector.unknown(fname); - } - } - } - } - } while (!folders.isEmpty()); - if (collect != null) { - for (String r : collect.idsMap.keySet()) { - inspector.removed(r); - } - } - for (String m : knownEntries) { - // removed from the repository and missing from working dir shall not be reported as 'deleted' - if (dirstate.checkRemoved(m) == null) { - inspector.missing(m); - } - } - } - - private static void checkLocalStatusAgainstFile(String fname, File f, HgDirstate dirstate, StatusInspector inspector) { - HgDirstate.Record r; - if ((r = dirstate.checkNormal(fname)) != null) { - // either clean or modified - if (f.lastModified() / 1000 == r.time && r.size == f.length()) { - inspector.clean(fname); - } else { - // FIXME check actual content to avoid false modified files - inspector.modified(fname); - } - } else if ((r = dirstate.checkAdded(fname)) != null) { - if (r.name2 == null) { - inspector.added(fname); - } else { - inspector.copied(fname, r.name2); - } - } else if ((r = dirstate.checkRemoved(fname)) != null) { - inspector.removed(fname); - } else if ((r = dirstate.checkMerged(fname)) != null) { - inspector.modified(fname); - } - } - - // XXX refactor checkLocalStatus methods in more OO way - private void checkLocalStatusAgainstBaseRevision(ManifestRevisionCollector collect, String fname, File f, HgDirstate dirstate, StatusInspector inspector) { - // fname is in the dirstate, either Normal, Added, Removed or Merged - Nodeid nid1 = collect.idsMap.remove(fname); - String flags = collect.flagsMap.remove(fname); - HgDirstate.Record r; - if (nid1 == null) { - // normal: added? - // added: not known at the time of baseRevision, shall report - // merged: was not known, report as added? - if ((r = dirstate.checkAdded(fname)) != null) { - if (r.name2 != null && collect.idsMap.containsKey(r.name2)) { - collect.idsMap.remove(r.name2); - collect.idsMap.remove(r.name2); - inspector.copied(r.name2, fname); - return; - } - // fall-through, report as added - } else if (dirstate.checkRemoved(fname) != null) { - // removed: removed file was not known at the time of baseRevision, and we should not report it as removed - return; - } - inspector.added(fname); - } else { - // was known; check whether clean or modified - // when added - seems to be the case of a file added once again, hence need to check if content is different - if ((r = dirstate.checkNormal(fname)) != null || (r = dirstate.checkMerged(fname)) != null || (r = dirstate.checkAdded(fname)) != null) { - // either clean or modified - HgDataFile fileNode = getFileNode(fname); - final int lengthAtRevision = fileNode.length(nid1); - if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { - inspector.modified(fname); - } else { - // check actual content to see actual changes - // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison - if (areTheSame(f, fileNode.content(nid1))) { - inspector.clean(fname); - } else { - inspector.modified(fname); - } - } - } - // only those left in idsMap after processing are reported as removed - } - - // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest - // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively - // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: - // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest - // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). - // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' - } - - private static String todoGenerateFlags(String fname) { - // FIXME implement - return null; - } - private static boolean areTheSame(File f, byte[] data) { - try { - BufferedInputStream is = new BufferedInputStream(new FileInputStream(f)); - int i = 0; - while (i < data.length && data[i] == is.read()) { - i++; // increment only for successful match, otherwise won't tell last byte in data was the same as read from the stream - } - return i == data.length && is.read() == -1; // although data length is expected to be the same (see caller), check that we reached EOF, no more data left. - } catch (IOException ex) { - ex.printStackTrace(); // log warn - } - return false; - } - - // XXX package-local, unless there are cases when required from outside (guess, working dir/revision walkers may hide dirstate access and no public visibility needed) - public final HgDirstate loadDirstate() { - // XXX may cache in SoftReference if creation is expensive - return new HgDirstate(this, new File(repoDir, "dirstate")); - } - - // package-local, see comment for loadDirstate - public final HgIgnore loadIgnore() { - return new HgIgnore(this); - } - - /*package-local*/ DataAccessProvider getDataAccess() { - return dataAccess; - } - - /*package-local*/ File getRepositoryRoot() { - return repoDir; - } - - @Override - protected HgTags createTags() { - return new HgTags(); - } - - private final HashMap<String, SoftReference<RevlogStream>> streamsCache = new HashMap<String, SoftReference<RevlogStream>>(); - - /** - * path - repository storage path (i.e. one usually with .i or .d) - */ - @Override - protected RevlogStream resolve(String path) { - final SoftReference<RevlogStream> ref = streamsCache.get(path); - RevlogStream cached = ref == null ? null : ref.get(); - if (cached != null) { - return cached; - } - File f = new File(repoDir, path); - if (f.exists()) { - RevlogStream s = new RevlogStream(dataAccess, f); - streamsCache.put(path, new SoftReference<RevlogStream>(s)); - return s; - } - return null; - } - - @Override - public HgDataFile getFileNode(String path) { - String nPath = normalize(path); - String storagePath = toStoragePath(nPath, true); - RevlogStream content = resolve(storagePath); - // XXX no content when no file? or HgDataFile.exists() to detect that? How about files that were removed in previous releases? - return new HgDataFile(this, nPath, content); - } - - private boolean revlogv1; - private boolean store; - private boolean fncache; - private boolean dotencode; - - - private void parseRequires() { - File requiresFile = new File(repoDir, "requires"); - if (!requiresFile.exists()) { - return; - } - try { - BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(requiresFile))); - String line; - while ((line = br.readLine()) != null) { - revlogv1 |= "revlogv1".equals(line); - store |= "store".equals(line); - fncache |= "fncache".equals(line); - dotencode |= "dotencode".equals(line); - } - } catch (IOException ex) { - ex.printStackTrace(); // FIXME log - } - } - - // FIXME document what path argument is, whether it includes .i or .d, and whether it's 'normalized' (slashes) or not. - // since .hg/store keeps both .i files and files without extension (e.g. fncache), guees, for data == false - // we shall assume path has extension - // FIXME much more to be done, see store.py:_hybridencode - // @see http://mercurial.selenic.com/wiki/CaseFoldingPlan - @Override - protected String toStoragePath(String path, boolean data) { - path = normalize(path); - final String STR_STORE = "store/"; - final String STR_DATA = "data/"; - final String STR_DH = "dh/"; - if (!data) { - return this.store ? STR_STORE + path : path; - } - path = path.replace(".hg/", ".hg.hg/").replace(".i/", ".i.hg/").replace(".d/", ".d.hg/"); - StringBuilder sb = new StringBuilder(path.length() << 1); - if (store || fncache) { - // encodefilename - final String reservedChars = "\\:*?\"<>|"; - // in fact, \\ is unlikely to match, ever - we've replaced all of them already, above. Just regards to store.py - int x; - char[] hexByte = new char[2]; - for (int i = 0; i < path.length(); i++) { - final char ch = path.charAt(i); - if (ch >= 'a' && ch <= 'z') { - sb.append(ch); // POIRAE - } else if (ch >= 'A' && ch <= 'Z') { - sb.append('_'); - sb.append(Character.toLowerCase(ch)); // Perhaps, (char) (((int) ch) + 32)? Even better, |= 0x20? - } else if ( (x = reservedChars.indexOf(ch)) != -1) { - sb.append('~'); - sb.append(toHexByte(reservedChars.charAt(x), hexByte)); - } else if ((ch >= '~' /*126*/ && ch <= 255) || ch < ' ' /*32*/) { - sb.append('~'); - sb.append(toHexByte(ch, hexByte)); - } else if (ch == '_') { - // note, encoding from store.py:_buildencodefun and :_build_lower_encodefun - // differ in the way they process '_' (latter doesn't escape it) - sb.append('_'); - sb.append('_'); - } else { - sb.append(ch); - } - } - // auxencode - if (fncache) { - x = 0; // last segment start - final TreeSet<String> windowsReservedFilenames = new TreeSet<String>(); - windowsReservedFilenames.addAll(Arrays.asList("con prn aux nul com1 com2 com3 com4 com5 com6 com7 com8 com9 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9".split(" "))); - do { - int i = sb.indexOf("/", x); - if (i == -1) { - i = sb.length(); - } - // windows reserved filenames are at least of length 3 - if (i - x >= 3) { - boolean found = false; - if (i-x == 3) { - found = windowsReservedFilenames.contains(sb.subSequence(x, i)); - } else if (sb.charAt(x+3) == '.') { // implicit i-x > 3 - found = windowsReservedFilenames.contains(sb.subSequence(x, x+3)); - } else if (i-x > 4 && sb.charAt(x+4) == '.') { - found = windowsReservedFilenames.contains(sb.subSequence(x, x+4)); - } - if (found) { - sb.setCharAt(x, '~'); - sb.insert(x+1, toHexByte(sb.charAt(x+2), hexByte)); - i += 2; - } - } - if (dotencode && (sb.charAt(x) == '.' || sb.charAt(x) == ' ')) { - sb.insert(x+1, toHexByte(sb.charAt(x), hexByte)); - sb.setCharAt(x, '~'); // setChar *after* charAt/insert to get ~2e, not ~7e for '.' - i += 2; - } - x = i+1; - } while (x < sb.length()); - } - } - final int MAX_PATH_LEN_IN_HGSTORE = 120; - if (fncache && (sb.length() + STR_DATA.length() > MAX_PATH_LEN_IN_HGSTORE)) { - throw HgRepository.notImplemented(); // FIXME digest and fncache use - } - if (this.store) { - sb.insert(0, STR_STORE + STR_DATA); - } - sb.append(".i"); - return sb.toString(); - } - - private static char[] toHexByte(int ch, char[] buf) { - assert buf.length > 1; - final String hexDigits = "0123456789abcdef"; - buf[0] = hexDigits.charAt((ch & 0x00F0) >>> 4); - buf[1] = hexDigits.charAt(ch & 0x0F); - return buf; - } - - // TODO handle . and .. (although unlikely to face them from GUI client) - private static String normalize(String path) { - path = path.replace('\\', '/').replace("//", "/"); - if (path.startsWith("/")) { - path = path.substring(1); - } - return path; - } - - // XXX idsMap is being modified from outside. It's better to let outer (modifying) code to create these maps instead - private static final class ManifestRevisionCollector implements HgManifest.Inspector { - final HashMap<String, Nodeid> idsMap = new HashMap<String, Nodeid>(); - final HashMap<String, String> flagsMap = new HashMap<String, String>(); - - public boolean next(Nodeid nid, String fname, String flags) { - idsMap.put(fname, nid); - flagsMap.put(fname, flags); - return true; - } - - public boolean end(int revision) { - return false; - } - - public boolean begin(int revision, Nodeid nid) { - return true; - } - } -}
--- a/src/com/tmate/hgkit/ll/Nodeid.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,123 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.DigestHelper.toHexString; - -import java.util.Arrays; - - - -/** - * Whether to store fixed size array (20 bytes) - ease of manipulation (e.g. hashcode/equals), or - * memory effective - reuse supplied array, keep significant bits only? - * Fixed size array looks most appealing to me now - I doubt one can save any significant amount of memory. - * There'd always 20 non-zero bytes, the difference is only for any extra bytes one may pass to constructor - * @author artem - * - */ -public final class Nodeid { - - public static final Nodeid NULL = new Nodeid(new byte[20], false); - private final byte[] binaryData; - - /** - * @param binaryRepresentation - byte[20], kept by reference - * @param shallClone - true if array is subject to future modification and shall be copied, not referenced - */ - public Nodeid(byte[] binaryRepresentation, boolean shallClone) { - // 5 int fields => 32 bytes - // byte[20] => 48 bytes - if (binaryRepresentation == null || binaryRepresentation.length != 20) { - throw new IllegalArgumentException(); - } - this.binaryData = shallClone ? binaryRepresentation.clone() : binaryRepresentation; - } - - @Override - public int hashCode() { - // digest (part thereof) seems to be nice candidate for the hashCode - byte[] b = binaryData; - return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); - } - - @Override - public boolean equals(Object o) { - if (o instanceof Nodeid) { - return Arrays.equals(this.binaryData, ((Nodeid) o).binaryData); - } - return false; - } - - public boolean equalsTo(byte[] buf) { - return Arrays.equals(this.binaryData, buf); - } - - @Override - public String toString() { - // XXX may want to output just single 0 for the NULL id? - return toHexString(binaryData, 0, binaryData.length); - } - - public String shortNotation() { - return toHexString(binaryData, 0, 6); - } - - public boolean isNull() { - if (this == NULL) { - return true; - } - for (int i = 0; i < 20; i++) { - if (this.binaryData[i] != 0) { - return false; - } - } - return true; - } - - // primary purpose is to give DigestHelper access to internal structure. Despite it's friends-only (package visibility), it's still makes sense to - // return a copy, to avoid any accidental modification (same reason field is not made visible, nor any callback, e.g. Output.write(byte[]) was introduced) - /*package-local*/byte[] cloneData() { - return binaryData.clone(); - } - - // primary difference with cons is handling of NULL id (this method returns constant) - // always makes a copy of an array passed - public static Nodeid fromBinary(byte[] binaryRepresentation, int offset) { - if (binaryRepresentation == null || binaryRepresentation.length - offset < 20) { - throw new IllegalArgumentException(); - } - int i = 0; - while (i < 20 && binaryRepresentation[offset+i] == 0) i++; - if (i == 20) { - return NULL; - } - if (offset == 0 && binaryRepresentation.length == 20) { - return new Nodeid(binaryRepresentation, true); - } - byte[] b = new byte[20]; // create new instance if no other reasonable guesses possible - System.arraycopy(binaryRepresentation, offset, b, 0, 20); - return new Nodeid(b, false); - } - - // binascii.unhexlify() - public static Nodeid fromAscii(byte[] asciiRepresentation, int offset, int length) { - if (length != 40) { - throw new IllegalArgumentException(); - } - byte[] data = new byte[20]; - boolean zeroBytes = true; - for (int i = 0, j = offset; i < data.length; i++) { - int hiNibble = Character.digit(asciiRepresentation[j++], 16); - int lowNibble = Character.digit(asciiRepresentation[j++], 16); - byte b = (byte) (((hiNibble << 4) | lowNibble) & 0xFF); - data[i] = b; - zeroBytes = zeroBytes && b == 0; - } - if (zeroBytes) { - return NULL; - } - return new Nodeid(data, false); - } -}
--- a/src/com/tmate/hgkit/ll/Revlog.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,184 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; - -import com.tmate.hgkit.fs.DataAccess; - -/** - * - * @author artem - */ -public abstract class Revlog { - - private final HgRepository hgRepo; - protected final RevlogStream content; - - protected Revlog(HgRepository hgRepo, RevlogStream content) { - if (hgRepo == null) { - throw new NullPointerException(); - } - this.hgRepo = hgRepo; - this.content = content; - } - - public final HgRepository getRepo() { - return hgRepo; - } - - public int getRevisionCount() { - return content.revisionCount(); - } - - public int getLocalRevisionNumber(Nodeid nid) { - int revision = content.findLocalRevisionNumber(nid); - if (revision == Integer.MIN_VALUE) { - throw new IllegalArgumentException(String.format("%s doesn't represent a revision of %s", nid.toString(), this /*XXX HgDataFile.getPath might be more suitable here*/)); - } - return revision; - } - - // Till now, i follow approach that NULL nodeid is never part of revlog - public boolean isKnown(Nodeid nodeid) { - final int rn = content.findLocalRevisionNumber(nodeid); - if (Integer.MIN_VALUE == rn) { - return false; - } - if (rn < 0 || rn >= content.revisionCount()) { - // Sanity check - throw new IllegalStateException(); - } - return true; - } - - /** - * Access to revision data as is (decompressed, but otherwise unprocessed, i.e. not parsed for e.g. changeset or manifest entries) - * @param nodeid - */ - public byte[] content(Nodeid nodeid) { - return content(getLocalRevisionNumber(nodeid)); - } - - /** - * @param revision - repo-local index of this file change (not a changelog revision number!) - */ - public byte[] content(int revision) { - final byte[][] dataPtr = new byte[1][]; - Revlog.Inspector insp = new Revlog.Inspector() { - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { - dataPtr[0] = data.byteArray(); - } - }; - content.iterate(revision, revision, true, insp); - return dataPtr[0]; - } - - // FIXME byte[] data might be too expensive, for few usecases it may be better to have intermediate Access object (when we don't need full data - // instantly - e.g. calculate hash, or comparing two revisions - // XXX seems that RevlogStream is better place for this class. - public interface Inspector { - // XXX boolean retVal to indicate whether to continue? - // TODO specify nodeid and data length, and reuse policy (i.e. if revlog stream doesn't reuse nodeid[] for each call) - void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[/*20*/] nodeid, DataAccess data); - } - - /* - * XXX think over if it's better to do either: - * pw = getChangelog().new ParentWalker(); pw.init() and pass pw instance around as needed - * or - * add Revlog#getParentWalker(), static class, make cons() and #init package-local, and keep SoftReference to allow walker reuse. - * - * and yes, walker is not a proper name - */ - public final class ParentWalker { - private Map<Nodeid, Nodeid> firstParent; - private Map<Nodeid, Nodeid> secondParent; - private Set<Nodeid> allNodes; - - public ParentWalker() { - firstParent = secondParent = Collections.emptyMap(); - allNodes = Collections.emptySet(); - } - - public void init() { - final RevlogStream stream = Revlog.this.content; - final int revisionCount = stream.revisionCount(); - firstParent = new HashMap<Nodeid, Nodeid>(revisionCount); - secondParent = new HashMap<Nodeid, Nodeid>(firstParent.size() >> 1); // assume branches/merges are less frequent - allNodes = new LinkedHashSet<Nodeid>(); - - Inspector insp = new Inspector() { - final Nodeid[] sequentialRevisionNodeids = new Nodeid[revisionCount]; - int ix = 0; - public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { - if (ix != revisionNumber) { - // XXX temp code, just to make sure I understand what's going on here - throw new IllegalStateException(); - } - if (parent1Revision >= revisionNumber || parent2Revision >= revisionNumber) { - throw new IllegalStateException(); // sanity, revisions are sequential - } - final Nodeid nid = new Nodeid(nodeid, true); - sequentialRevisionNodeids[ix++] = nid; - allNodes.add(nid); - if (parent1Revision != -1) { - firstParent.put(nid, sequentialRevisionNodeids[parent1Revision]); - if (parent2Revision != -1) { - secondParent.put(nid, sequentialRevisionNodeids[parent2Revision]); - } - } - } - }; - stream.iterate(0, -1, false, insp); - } - - public Set<Nodeid> allNodes() { - return Collections.unmodifiableSet(allNodes); - } - - // FIXME need to decide whether Nodeid(00 * 20) is always known or not - public boolean knownNode(Nodeid nid) { - return allNodes.contains(nid); - } - - // null if none - public Nodeid firstParent(Nodeid nid) { - return firstParent.get(nid); - } - - // never null, Nodeid.NULL if none known - public Nodeid safeFirstParent(Nodeid nid) { - Nodeid rv = firstParent(nid); - return rv == null ? Nodeid.NULL : rv; - } - - public Nodeid secondParent(Nodeid nid) { - return secondParent.get(nid); - } - - public Nodeid safeSecondParent(Nodeid nid) { - Nodeid rv = secondParent(nid); - return rv == null ? Nodeid.NULL : rv; - } - - public boolean appendParentsOf(Nodeid nid, Collection<Nodeid> c) { - Nodeid p1 = firstParent(nid); - boolean modified = false; - if (p1 != null) { - modified = c.add(p1); - Nodeid p2 = secondParent(nid); - if (p2 != null) { - modified = c.add(p2) || modified; - } - } - return modified; - } - } -}
--- a/src/com/tmate/hgkit/ll/RevlogIndexStreamAccess.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,60 +0,0 @@ -/** - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import java.io.DataInput; -import java.io.IOException; - -/** - * @author artem - * - */ -public class RevlogIndexStreamAccess { - - private final RevlogStream stream; - - // takes RevlogStream. RevlogStream delegates calls for data to this accessor, which in turn refers back to RevlogStream to get - // correct [Input|Data]Stream according to revlog version (Revlogv0 or RevlogNG) - - public RevlogIndexStreamAccess(RevlogStream stream) { - this.stream = stream; - // TODO Auto-generated constructor stub - } - - - void readRevlogV0Record() throws IOException { - DataInput di = null; //FIXME stream.getIndexStream(); - int offset = di.readInt(); - int compressedLen = di.readInt(); - int baseRevision = di.readInt(); - int linkRevision = di.readInt(); -// int r = (((buf[0] & 0xff) << 24) | ((buf[1] & 0xff) << 16) | ((buf[2] & 0xff) << 8) | (buf[3] & 0xff)); - byte[] buf = new byte[20]; - di.readFully(buf, 0, 20); - Object nodeidOwn = buf.clone(); - // XXX nodeid as an Object with hash/equals? - di.readFully(buf, 0, 20); - Object nodeidParent1 = buf.clone(); - di.readFully(buf, 0, 20); - Object nodeidParent2 = buf.clone(); - } - - // another subclass? - void readRevlogNGRecord() throws IOException { - DataInput di = null; //FIXME stream.getIndexStream(); - long l = di.readLong(); - long offset = l >>> 16; - int flags = (int) (l & 0X0FFFF); - int compressedLen = di.readInt(); - int actualLen = di.readInt(); - int baseRevision = di.readInt(); - int linkRevision = di.readInt(); - int parent1Revision = di.readInt(); - int parent2Revision = di.readInt(); - byte[] buf = new byte[32]; - di.readFully(buf, 0, 20+12); - Object nodeid = buf/*[0..20]*/; - - } -}
--- a/src/com/tmate/hgkit/ll/RevlogIterator.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ -/** - * Copyright (c) 2010 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -/** - * To walk against revlog - * XXX consider external iterator approach - * @author artem - */ -public class RevlogIterator { - - public RevlogIterator(RevlogStream stream) { - - } -}
--- a/src/com/tmate/hgkit/ll/RevlogStream.java Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,363 +0,0 @@ -/* - * Copyright (c) 2010, 2011 Artem Tikhomirov - */ -package com.tmate.hgkit.ll; - -import static com.tmate.hgkit.ll.HgRepository.TIP; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; - -import com.tmate.hgkit.fs.ByteArrayDataAccess; -import com.tmate.hgkit.fs.DataAccess; -import com.tmate.hgkit.fs.DataAccessProvider; -import com.tmate.hgkit.fs.FilterDataAccess; -import com.tmate.hgkit.fs.InflaterDataAccess; - -/** - * ? Single RevlogStream per file per repository with accessor to record access session (e.g. with back/forward operations), - * or numerous RevlogStream with separate representation of the underlaying data (cached, lazy ChunkStream)? - * @author artem - * @see http://mercurial.selenic.com/wiki/Revlog - * @see http://mercurial.selenic.com/wiki/RevlogNG - */ -public class RevlogStream { - - private List<IndexEntry> index; // indexed access highly needed - private boolean inline = false; - private final File indexFile; - private final DataAccessProvider dataAccess; - - // if we need anything else from HgRepo, might replace DAP parameter with HgRepo and query it for DAP. - RevlogStream(DataAccessProvider dap, File indexFile) { - this.dataAccess = dap; - this.indexFile = indexFile; - } - - /*package*/ DataAccess getIndexStream() { - return dataAccess.create(indexFile); - } - - /*package*/ DataAccess getDataStream() { - final String indexName = indexFile.getName(); - File dataFile = new File(indexFile.getParentFile(), indexName.substring(0, indexName.length() - 1) + "d"); - return dataAccess.create(dataFile); - } - - public int revisionCount() { - initOutline(); - return index.size(); - } - - public int dataLength(int revision) { - // XXX in fact, use of iterate() instead of this implementation may be quite reasonable. - // - final int indexSize = revisionCount(); - DataAccess daIndex = getIndexStream(); // XXX may supply a hint that I'll need really few bytes of data (although at some offset) - if (revision == TIP) { - revision = indexSize - 1; - } - try { - int recordOffset = inline ? (int) index.get(revision).offset : revision * REVLOGV1_RECORD_SIZE; - daIndex.seek(recordOffset + 12); // 6+2+4 - int actualLen = daIndex.readInt(); - return actualLen; - } catch (IOException ex) { - ex.printStackTrace(); // log error. FIXME better handling - throw new IllegalStateException(ex); - } finally { - daIndex.done(); - } - } - - // Perhaps, RevlogStream should be limited to use of plain int revisions for access, - // while Nodeids should be kept on the level up, in Revlog. Guess, Revlog better keep - // map of nodeids, and once this comes true, we may get rid of this method. - // Unlike its counterpart, Revlog#getLocalRevisionNumber, doesn't fail with exception if node not found, - // returns a predefined constant instead - /*package-local*/ int findLocalRevisionNumber(Nodeid nodeid) { - // XXX this one may be implemented with iterate() once there's mechanism to stop iterations - final int indexSize = revisionCount(); - DataAccess daIndex = getIndexStream(); - try { - byte[] nodeidBuf = new byte[20]; - for (int i = 0; i < indexSize; i++) { - daIndex.skip(8); - int compressedLen = daIndex.readInt(); - daIndex.skip(20); - daIndex.readBytes(nodeidBuf, 0, 20); - if (nodeid.equalsTo(nodeidBuf)) { - return i; - } - daIndex.skip(inline ? 12 + compressedLen : 12); - } - } catch (IOException ex) { - ex.printStackTrace(); // log error. FIXME better handling - throw new IllegalStateException(ex); - } finally { - daIndex.done(); - } - return Integer.MIN_VALUE; - } - - - private final int REVLOGV1_RECORD_SIZE = 64; - - // should be possible to use TIP, ALL, or -1, -2, -n notation of Hg - // ? boolean needsNodeid - public void iterate(int start, int end, boolean needData, Revlog.Inspector inspector) { - initOutline(); - final int indexSize = index.size(); - if (indexSize == 0) { - return; - } - if (end == TIP) { - end = indexSize - 1; - } - if (start == TIP) { - start = indexSize - 1; - } - if (start < 0 || start >= indexSize) { - throw new IllegalArgumentException("Bad left range boundary " + start); - } - if (end < start || end >= indexSize) { - throw new IllegalArgumentException("Bad right range boundary " + end); - } - // XXX may cache [start .. end] from index with a single read (pre-read) - - DataAccess daIndex = null, daData = null; - daIndex = getIndexStream(); - if (needData && !inline) { - daData = getDataStream(); - } - try { - byte[] nodeidBuf = new byte[20]; - DataAccess lastUserData = null; - int i; - boolean extraReadsToBaseRev = false; - if (needData && index.get(start).baseRevision < start) { - i = index.get(start).baseRevision; - extraReadsToBaseRev = true; - } else { - i = start; - } - - daIndex.seek(inline ? index.get(i).offset : i * REVLOGV1_RECORD_SIZE); - for (; i <= end; i++ ) { - if (inline && needData) { - // inspector reading data (though FilterDataAccess) may have affected index position - daIndex.seek(index.get(i).offset); - } - long l = daIndex.readLong(); - @SuppressWarnings("unused") - long offset = l >>> 16; - @SuppressWarnings("unused") - int flags = (int) (l & 0X0FFFF); - int compressedLen = daIndex.readInt(); - int actualLen = daIndex.readInt(); - int baseRevision = daIndex.readInt(); - int linkRevision = daIndex.readInt(); - int parent1Revision = daIndex.readInt(); - int parent2Revision = daIndex.readInt(); - // Hg has 32 bytes here, uses 20 for nodeid, and keeps 12 last bytes empty - daIndex.readBytes(nodeidBuf, 0, 20); - daIndex.skip(12); - DataAccess userDataAccess = null; - if (needData) { - final byte firstByte; - long streamOffset = index.get(i).offset; - DataAccess streamDataAccess; - if (inline) { - streamDataAccess = daIndex; - streamOffset += REVLOGV1_RECORD_SIZE; // don't need to do seek as it's actual position in the index stream - } else { - streamDataAccess = daData; - daData.seek(streamOffset); - } - firstByte = streamDataAccess.readByte(); - if (firstByte == 0x78 /* 'x' */) { - userDataAccess = new InflaterDataAccess(streamDataAccess, streamOffset, compressedLen); - } else if (firstByte == 0x75 /* 'u' */) { - userDataAccess = new FilterDataAccess(streamDataAccess, streamOffset+1, compressedLen-1); - } else { - // XXX Python impl in fact throws exception when there's not 'x', 'u' or '0' - // but I don't see reason not to return data as is - userDataAccess = new FilterDataAccess(streamDataAccess, streamOffset, compressedLen); - } - // XXX - if (baseRevision != i) { // XXX not sure if this is the right way to detect a patch - // this is a patch - LinkedList<PatchRecord> patches = new LinkedList<PatchRecord>(); - while (!userDataAccess.isEmpty()) { - PatchRecord pr = PatchRecord.read(userDataAccess); - System.out.printf("PatchRecord:%d %d %d\n", pr.start, pr.end, pr.len); - patches.add(pr); - } - userDataAccess.done(); - // - byte[] userData = apply(lastUserData, actualLen, patches); - userDataAccess = new ByteArrayDataAccess(userData); - } - } else { - if (inline) { - daIndex.skip(compressedLen); - } - } - if (!extraReadsToBaseRev || i >= start) { - inspector.next(i, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, nodeidBuf, userDataAccess); - } - if (userDataAccess != null) { - userDataAccess.reset(); - if (lastUserData != null) { - lastUserData.done(); - } - lastUserData = userDataAccess; - } - } - } catch (IOException ex) { - throw new IllegalStateException(ex); // FIXME need better handling - } finally { - daIndex.done(); - if (daData != null) { - daData.done(); - } - } - } - - private void initOutline() { - if (index != null && !index.isEmpty()) { - return; - } - ArrayList<IndexEntry> res = new ArrayList<IndexEntry>(); - DataAccess da = getIndexStream(); - try { - int versionField = da.readInt(); - da.readInt(); // just to skip next 2 bytes of offset + flags - final int INLINEDATA = 1 << 16; - inline = (versionField & INLINEDATA) != 0; - long offset = 0; // first offset is always 0, thus Hg uses it for other purposes - while(true) { - int compressedLen = da.readInt(); - // 8+4 = 12 bytes total read here - @SuppressWarnings("unused") - int actualLen = da.readInt(); - int baseRevision = da.readInt(); - // 12 + 8 = 20 bytes read here -// int linkRevision = di.readInt(); -// int parent1Revision = di.readInt(); -// int parent2Revision = di.readInt(); -// byte[] nodeid = new byte[32]; - if (inline) { - res.add(new IndexEntry(offset + REVLOGV1_RECORD_SIZE * res.size(), baseRevision)); - da.skip(3*4 + 32 + compressedLen); // Check: 44 (skip) + 20 (read) = 64 (total RevlogNG record size) - } else { - res.add(new IndexEntry(offset, baseRevision)); - da.skip(3*4 + 32); - } - if (da.isEmpty()) { - // fine, done then - res.trimToSize(); - index = res; - break; - } else { - // start reading next record - long l = da.readLong(); - offset = l >>> 16; - } - } - } catch (IOException ex) { - ex.printStackTrace(); // log error - // too bad, no outline then. - index = Collections.emptyList(); - } finally { - da.done(); - } - - } - - - // perhaps, package-local or protected, if anyone else from low-level needs them - // XXX think over if we should keep offset in case of separate data file - we read the field anyway. Perhaps, distinct entry classes for Inline and non-inline indexes? - private static class IndexEntry { - public final long offset; // for separate .i and .d - copy of index record entry, for inline index - actual offset of the record in the .i file (record entry + revision * record size)) - //public final int length; // data past fixed record (need to decide whether including header size or not), and whether length is of compressed data or not - public final int baseRevision; - - public IndexEntry(long o, int baseRev) { - offset = o; - baseRevision = baseRev; - } - } - - // mpatch.c : apply() - // FIXME need to implement patch merge (fold, combine, gather and discard from aforementioned mpatch.[c|py]), also see Revlog and Mercurial PDF - /*package-local for HgBundle; until moved to better place*/static byte[] apply(DataAccess baseRevisionContent, int outcomeLen, List<PatchRecord> patch) throws IOException { - int last = 0, destIndex = 0; - if (outcomeLen == -1) { - outcomeLen = (int) baseRevisionContent.length(); - for (PatchRecord pr : patch) { - outcomeLen += pr.start - last + pr.len; - last = pr.end; - } - outcomeLen -= last; - last = 0; - } - System.out.println(baseRevisionContent.length()); - byte[] rv = new byte[outcomeLen]; - for (PatchRecord pr : patch) { - baseRevisionContent.seek(last); - baseRevisionContent.readBytes(rv, destIndex, pr.start-last); - destIndex += pr.start - last; - System.arraycopy(pr.data, 0, rv, destIndex, pr.data.length); - destIndex += pr.data.length; - last = pr.end; - } - baseRevisionContent.seek(last); - baseRevisionContent.readBytes(rv, destIndex, (int) (baseRevisionContent.length() - last)); - return rv; - } - - // @see http://mercurial.selenic.com/wiki/BundleFormat, in Changelog group description - /*package-local*/ static class PatchRecord { // copy of struct frag from mpatch.c - /* - Given there are pr1 and pr2: - pr1.start to pr1.end will be replaced with pr's data (of pr1.len) - pr1.end to pr2.start gets copied from base - */ - int start, end, len; - byte[] data; - - // TODO consider PatchRecord that only records data position (absolute in data source), and acquires data as needed - private PatchRecord(int p1, int p2, int length, byte[] src) { - start = p1; - end = p2; - len = length; - data = src; - } - - /*package-local*/ static PatchRecord read(byte[] data, int offset) { - final int x = offset; // shorthand - int p1 = ((data[x] & 0xFF)<< 24) | ((data[x+1] & 0xFF) << 16) | ((data[x+2] & 0xFF) << 8) | (data[x+3] & 0xFF); - int p2 = ((data[x+4] & 0xFF) << 24) | ((data[x+5] & 0xFF) << 16) | ((data[x+6] & 0xFF) << 8) | (data[x+7] & 0xFF); - int len = ((data[x+8] & 0xFF) << 24) | ((data[x+9] & 0xFF) << 16) | ((data[x+10] & 0xFF) << 8) | (data[x+11] & 0xFF); - byte[] dataCopy = new byte[len]; - System.arraycopy(data, x+12, dataCopy, 0, len); - return new PatchRecord(p1, p2, len, dataCopy); - } - - /*package-local*/ static PatchRecord read(DataAccess da) throws IOException { - int p1 = da.readInt(); - int p2 = da.readInt(); - int len = da.readInt(); - byte[] src = new byte[len]; - da.readBytes(src, 0, len); - return new PatchRecord(p1, p2, len, src); - } - - - } -}
--- a/src/com/tmate/hgkit/ll/package.html Sun Jan 16 01:40:38 2011 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -<html> -<boody> -Low-level API operations -</bidy> -</html> \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgBadStateException.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +/** + * hg4j's own internal error or unexpected state. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +@SuppressWarnings("serial") +public class HgBadStateException extends RuntimeException { + + // FIXME quick-n-dirty fix, don't allow exceptions without a cause + public HgBadStateException() { + super("Internal error"); + } + + public HgBadStateException(String message) { + super(message); + } + + public HgBadStateException(Throwable cause) { + super(cause); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgCatCommand.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import static org.tmatesoft.hg.repo.HgInternals.wrongLocalRevision; +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.FileNotFoundException; +import java.io.IOException; + +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.ByteChannel; +import org.tmatesoft.hg.util.CancelledException; +import org.tmatesoft.hg.util.Path; + +/** + * Command to obtain content of a file, 'hg cat' counterpart. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgCatCommand { + + private final HgRepository repo; + private Path file; + private int localRevision = TIP; + private Nodeid revision; + + public HgCatCommand(HgRepository hgRepo) { + repo = hgRepo; + } + + /** + * File to read, required parameter + * @param fname path to a repository file, can't be <code>null</code> + * @return <code>this</code> for convenience + * @throws IllegalArgumentException if supplied fname is null or points to directory + */ + public HgCatCommand file(Path fname) { + if (fname == null || fname.isDirectory()) { + throw new IllegalArgumentException(String.valueOf(fname)); + } + file = fname; + return this; + } + + /** + * Invocation of this method clears revision set with {@link #revision(Nodeid)} or {@link #revision(int)} earlier. + * XXX rev can't be WORKING_COPY (if allowed, need to implement in #execute()) + * @param rev local revision number, non-negative, or one of predefined constants. Note, use of {@link HgRepository#BAD_REVISION}, + * although possible, makes little sense (command would fail if executed). + * @return <code>this</code> for convenience + */ + public HgCatCommand revision(int rev) { + if (wrongLocalRevision(rev)) { + throw new IllegalArgumentException(String.valueOf(rev)); + } + localRevision = rev; + revision = null; + return this; + } + + /** + * Select revision to read. Invocation of this method clears revision set with {@link #revision(int)} or {@link #revision(Nodeid)} earlier. + * + * @param nodeid - unique revision identifier, Note, use of <code>null</code> or {@link Nodeid#NULL} is senseless + * @return <code>this</code> for convenience + */ + public HgCatCommand revision(Nodeid nodeid) { + if (nodeid != null && nodeid.isNull()) { + nodeid = null; + } + revision = nodeid; + localRevision = BAD_REVISION; + return this; + } + + /** + * Runs the command with current set of parameters and pipes data to provided sink. + * + * @param sink output channel to write data to. + * @throws HgDataStreamException + * @throws IllegalArgumentException when command arguments are incomplete or wrong + */ + public void execute(ByteChannel sink) throws HgDataStreamException, IOException, CancelledException { + if (localRevision == BAD_REVISION && revision == null) { + throw new IllegalArgumentException("Either local file revision number or nodeid shall be specified"); + } + if (file == null) { + throw new IllegalArgumentException("Name of the file is missing"); + } + if (sink == null) { + throw new IllegalArgumentException("Need an output channel"); + } + HgDataFile dataFile = repo.getFileNode(file); + if (!dataFile.exists()) { + throw new HgDataStreamException(file.toString(), new FileNotFoundException(file.toString())); + } + int revToExtract; + if (revision != null) { + revToExtract = dataFile.getLocalRevision(revision); + } else { + revToExtract = localRevision; + } + dataFile.contentWithFilters(revToExtract, sink); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgChangeset.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.tmatesoft.hg.core.HgLogCommand.FileRevision; +import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.HgStatusCollector; +import org.tmatesoft.hg.util.Path; + + +/** + * Record in the Mercurial changelog, describing single commit. + * + * Not thread-safe, don't try to read from different threads + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgChangeset implements Cloneable { + private final HgStatusCollector statusHelper; + private final Path.Source pathHelper; + + // + private RawChangeset changeset; + private Nodeid nodeid; + + // + private List<FileRevision> modifiedFiles, addedFiles; + private List<Path> deletedFiles; + private int revNumber; + + // XXX consider CommandContext with StatusCollector, PathPool etc. Commands optionally get CC through a cons or create new + // and pass it around + /*package-local*/HgChangeset(HgStatusCollector statusCollector, Path.Source pathFactory) { + statusHelper = statusCollector; + pathHelper = pathFactory; + } + + /*package-local*/ + void init(int localRevNumber, Nodeid nid, RawChangeset rawChangeset) { + revNumber = localRevNumber; + nodeid = nid; + changeset = rawChangeset; + modifiedFiles = addedFiles = null; + deletedFiles = null; + } + public int getRevision() { + return revNumber; + } + public Nodeid getNodeid() { + return nodeid; + } + public String getUser() { + return changeset.user(); + } + public String getComment() { + return changeset.comment(); + } + public String getBranch() { + return changeset.branch(); + } + public String getDate() { + return changeset.dateString(); + } + public Nodeid getManifestRevision() { + return changeset.manifest(); + } + + public List<Path> getAffectedFiles() { + // reports files as recorded in changelog. Note, merge revisions may have no + // files listed, and thus this method would return empty list, while + // #getModifiedFiles() would return list with merged file(s) (because it uses status to get 'em, not + // what #files() gives). + ArrayList<Path> rv = new ArrayList<Path>(changeset.files().size()); + for (String name : changeset.files()) { + rv.add(pathHelper.path(name)); + } + return rv; + } + + public List<FileRevision> getModifiedFiles() { + if (modifiedFiles == null) { + initFileChanges(); + } + return modifiedFiles; + } + + public List<FileRevision> getAddedFiles() { + if (addedFiles == null) { + initFileChanges(); + } + return addedFiles; + } + + public List<Path> getRemovedFiles() { + if (deletedFiles == null) { + initFileChanges(); + } + return deletedFiles; + } + + public boolean isMerge() { + return !Nodeid.NULL.equals(getSecondParentRevision()); + } + + public Nodeid getFirstParentRevision() { + // XXX may read once for both p1 and p2 + // or use ParentWalker to minimize reads even more. + byte[] p1 = new byte[20]; + statusHelper.getRepo().getChangelog().parents(revNumber, new int[2], p1, null); + return Nodeid.fromBinary(p1, 0); + } + + public Nodeid getSecondParentRevision() { + byte[] p2 = new byte[20]; + statusHelper.getRepo().getChangelog().parents(revNumber, new int[2], null, p2); + return Nodeid.fromBinary(p2, 0); + } + + @Override + public HgChangeset clone() { + try { + HgChangeset copy = (HgChangeset) super.clone(); + copy.changeset = changeset.clone(); + return copy; + } catch (CloneNotSupportedException ex) { + throw new InternalError(ex.toString()); + } + } + + private /*synchronized*/ void initFileChanges() { + ArrayList<Path> deleted = new ArrayList<Path>(); + ArrayList<FileRevision> modified = new ArrayList<FileRevision>(); + ArrayList<FileRevision> added = new ArrayList<FileRevision>(); + HgStatusCollector.Record r = new HgStatusCollector.Record(); + statusHelper.change(revNumber, r); + final HgRepository repo = statusHelper.getRepo(); + for (Path s : r.getModified()) { + Nodeid nid = r.nodeidAfterChange(s); + if (nid == null) { + throw new HgBadStateException(); + } + modified.add(new FileRevision(repo, nid, s)); + } + for (Path s : r.getAdded()) { + Nodeid nid = r.nodeidAfterChange(s); + if (nid == null) { + throw new HgBadStateException(); + } + added.add(new FileRevision(repo, nid, s)); + } + for (Path s : r.getRemoved()) { + // with Path from getRemoved, may just copy + deleted.add(s); + } + modified.trimToSize(); + added.trimToSize(); + deleted.trimToSize(); + modifiedFiles = Collections.unmodifiableList(modified); + addedFiles = Collections.unmodifiableList(added); + deletedFiles = Collections.unmodifiableList(deleted); + } +} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgDataStreamException.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import org.tmatesoft.hg.repo.HgDataFile; + +/** + * Any erroneous state with @link {@link HgDataFile} input/output, read/write operations + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +@SuppressWarnings("serial") +public class HgDataStreamException extends HgException { + + public HgDataStreamException(String message, Throwable cause) { + super(message, cause); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgException.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +/** + * Root class for all hg4j exceptions. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +@SuppressWarnings("serial") +public class HgException extends Exception { + + public HgException(String reason) { + super(reason); + } + + public HgException(String reason, Throwable cause) { + super(reason, cause); + } + + public HgException(Throwable cause) { + super(cause); + } + +// /* XXX CONSIDER capability to pass extra information about errors */ +// public static class Status { +// public Status(String message, Throwable cause, int errorCode, Object extraData) { +// } +// } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgLogCommand.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,321 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.IOException; +import java.util.Calendar; +import java.util.Collections; +import java.util.ConcurrentModificationException; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + +import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; +import org.tmatesoft.hg.repo.HgChangelog; +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.HgStatusCollector; +import org.tmatesoft.hg.util.ByteChannel; +import org.tmatesoft.hg.util.CancelledException; +import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.PathPool; +import org.tmatesoft.hg.util.PathRewrite; + + +/** + * Access to changelog, 'hg log' command counterpart. + * + * <pre> + * Usage: + * new LogCommand().limit(20).branch("maintenance-2.1").user("me").execute(new MyHandler()); + * </pre> + * Not thread-safe (each thread has to use own {@link HgLogCommand} instance). + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgLogCommand implements HgChangelog.Inspector { + + private final HgRepository repo; + private Set<String> users; + private Set<String> branches; + private int limit = 0, count = 0; + private int startRev = 0, endRev = TIP; + private Handler delegate; + private Calendar date; + private Path file; + private boolean followHistory; // makes sense only when file != null + private HgChangeset changeset; + + public HgLogCommand(HgRepository hgRepo) { + repo = hgRepo; + } + + /** + * Limit search to specified user. Multiple user names may be specified. Once set, user names can't be + * cleared, use new command instance in such cases. + * @param user - full or partial name of the user, case-insensitive, non-null. + * @return <code>this</code> instance for convenience + * @throws IllegalArgumentException when argument is null + */ + public HgLogCommand user(String user) { + if (user == null) { + throw new IllegalArgumentException(); + } + if (users == null) { + users = new TreeSet<String>(); + } + users.add(user.toLowerCase()); + return this; + } + + /** + * Limit search to specified branch. Multiple branch specification possible (changeset from any of these + * would be included in result). If unspecified, all branches are considered. There's no way to clean branch selection + * once set, create fresh new command instead. + * @param branch - branch name, case-sensitive, non-null. + * @return <code>this</code> instance for convenience + * @throws IllegalArgumentException when branch argument is null + */ + public HgLogCommand branch(String branch) { + if (branch == null) { + throw new IllegalArgumentException(); + } + if (branches == null) { + branches = new TreeSet<String>(); + } + branches.add(branch); + return this; + } + + // limit search to specific date + // multiple? + public HgLogCommand date(Calendar date) { + this.date = date; + // FIXME implement + // isSet(field) - false => don't use in detection of 'same date' + throw HgRepository.notImplemented(); + } + + /** + * + * @param num - number of changeset to produce. Pass 0 to clear the limit. + * @return <code>this</code> instance for convenience + */ + public HgLogCommand limit(int num) { + limit = num; + return this; + } + + /** + * Limit to specified subset of Changelog, [min(rev1,rev2), max(rev1,rev2)], inclusive. + * Revision may be specified with {@link HgRepository#TIP} + * @param rev1 - local revision number + * @param rev2 - local revision number + * @return <code>this</code> instance for convenience + */ + public HgLogCommand range(int rev1, int rev2) { + if (rev1 != TIP && rev2 != TIP) { + startRev = rev2 < rev1 ? rev2 : rev1; + endRev = startRev == rev2 ? rev1 : rev2; + } else if (rev1 == TIP && rev2 != TIP) { + startRev = rev2; + endRev = rev1; + } else { + startRev = rev1; + endRev = rev2; + } + return this; + } + + /** + * Visit history of a given file only. + * @param file path relative to repository root. Pass <code>null</code> to reset. + * @param followCopyRename true to report changesets of the original file(-s), if copy/rename ever occured to the file. + */ + public HgLogCommand file(Path file, boolean followCopyRename) { + // multiple? Bad idea, would need to include extra method into Handler to tell start of next file + this.file = file; + followHistory = followCopyRename; + return this; + } + + /** + * Handy analog of {@link #file(Path, boolean)} when clients' paths come from filesystem and need conversion to repository's + */ + public HgLogCommand file(String file, boolean followCopyRename) { + return file(Path.create(repo.getToRepoPathHelper().rewrite(file)), followCopyRename); + } + + /** + * Similar to {@link #execute(org.tmatesoft.hg.repo.RawChangeset.Inspector)}, collects and return result as a list. + */ + public List<HgChangeset> execute() throws HgException { + CollectHandler collector = new CollectHandler(); + execute(collector); + return collector.getChanges(); + } + + /** + * + * @param inspector + * @throws IllegalArgumentException when inspector argument is null + * @throws ConcurrentModificationException if this log command instance is already running + */ + public void execute(Handler handler) throws HgException { + if (handler == null) { + throw new IllegalArgumentException(); + } + if (delegate != null) { + throw new ConcurrentModificationException(); + } + try { + delegate = handler; + count = 0; + HgStatusCollector statusCollector = new HgStatusCollector(repo); + // files listed in a changeset don't need their names to be rewritten (they are normalized already) + PathPool pp = new PathPool(new PathRewrite.Empty()); + // #file(String, boolean) above may utilize PathPool as well. CommandContext? + statusCollector.setPathPool(pp); + changeset = new HgChangeset(statusCollector, pp); + if (file == null) { + repo.getChangelog().range(startRev, endRev, this); + } else { + HgDataFile fileNode = repo.getFileNode(file); + fileNode.history(startRev, endRev, this); + if (fileNode.isCopy()) { + // even if we do not follow history, report file rename + do { + if (handler instanceof FileHistoryHandler) { + FileRevision src = new FileRevision(repo, fileNode.getCopySourceRevision(), fileNode.getCopySourceName()); + FileRevision dst = new FileRevision(repo, fileNode.getRevision(0), fileNode.getPath()); + ((FileHistoryHandler) handler).copy(src, dst); + } + if (limit > 0 && count >= limit) { + // if limit reach, follow is useless. + break; + } + if (followHistory) { + fileNode = repo.getFileNode(fileNode.getCopySourceName()); + fileNode.history(this); + } + } while (followHistory && fileNode.isCopy()); + } + } + } finally { + delegate = null; + changeset = null; + } + } + + // + + public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) { + if (limit > 0 && count >= limit) { + return; + } + if (branches != null && !branches.contains(cset.branch())) { + return; + } + if (users != null) { + String csetUser = cset.user().toLowerCase(); + boolean found = false; + for (String u : users) { + if (csetUser.indexOf(u) != -1) { + found = true; + break; + } + } + if (!found) { + return; + } + } + if (date != null) { + // FIXME + } + count++; + changeset.init(revisionNumber, nodeid, cset); + delegate.next(changeset); + } + + public interface Handler { + /** + * @param changeset not necessarily a distinct instance each time, {@link HgChangeset#clone() clone()} if need a copy. + */ + void next(HgChangeset changeset); + } + + /** + * When {@link HgLogCommand} is executed against file, handler passed to {@link HgLogCommand#execute(Handler)} may optionally + * implement this interface to get information about file renames. Method {@link #copy(FileRevision, FileRevision)} would + * get invoked prior any changeset of the original file (if file history being followed) is reported via {@link #next(HgChangeset)}. + * + * For {@link HgLogCommand#file(Path, boolean)} with renamed file path and follow argument set to false, + * {@link #copy(FileRevision, FileRevision)} would be invoked for the first copy/rename in the history of the file, but not + * followed by any changesets. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ + public interface FileHistoryHandler extends Handler { + // XXX perhaps, should distinguish copy from rename? And what about merged revisions and following them? + void copy(FileRevision from, FileRevision to); + } + + public static class CollectHandler implements Handler { + private final List<HgChangeset> result = new LinkedList<HgChangeset>(); + + public List<HgChangeset> getChanges() { + return Collections.unmodifiableList(result); + } + + public void next(HgChangeset changeset) { + result.add(changeset.clone()); + } + } + + public static final class FileRevision { + private final HgRepository repo; + private final Nodeid revision; + private final Path path; + + /*package-local*/FileRevision(HgRepository hgRepo, Nodeid rev, Path p) { + if (hgRepo == null || rev == null || p == null) { + // since it's package local, it is our code to blame for non validated arguments + throw new HgBadStateException(); + } + repo = hgRepo; + revision = rev; + path = p; + } + + public Path getPath() { + return path; + } + public Nodeid getRevision() { + return revision; + } + public void putContentTo(ByteChannel sink) throws HgDataStreamException, IOException, CancelledException { + HgDataFile fn = repo.getFileNode(path); + int localRevision = fn.getLocalRevision(revision); + fn.contentWithFilters(localRevision, sink); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgManifestCommand.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,196 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import static org.tmatesoft.hg.repo.HgRepository.*; +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.util.ConcurrentModificationException; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.HgLogCommand.FileRevision; +import org.tmatesoft.hg.repo.HgManifest; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.PathPool; +import org.tmatesoft.hg.util.PathRewrite; + + +/** + * Gives access to list of files in each revision (Mercurial manifest information), 'hg manifest' counterpart. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgManifestCommand { + + private final HgRepository repo; + private Path.Matcher matcher; + private int startRev = 0, endRev = TIP; + private Handler visitor; + private boolean needDirs = false; + + private final Mediator mediator = new Mediator(); + + public HgManifestCommand(HgRepository hgRepo) { + repo = hgRepo; + } + + /** + * Parameterize command to visit revisions <code>[rev1..rev2]</code>. + * @param rev1 - local revision number to start from. Non-negative. May be {@link HgRepository#TIP} (rev2 argument shall be {@link HgRepository#TIP} as well, then) + * @param rev2 - local revision number to end with, inclusive. Non-negative, greater or equal to rev1. May be {@link HgRepository#TIP}. + * @return <code>this</code> for convenience. + * @throws IllegalArgumentException if revision arguments are incorrect (see above). + */ + public HgManifestCommand range(int rev1, int rev2) { + // XXX if manifest range is different from that of changelog, need conversion utils (external?) + boolean badArgs = rev1 == BAD_REVISION || rev2 == BAD_REVISION || rev1 == WORKING_COPY || rev2 == WORKING_COPY; + badArgs |= rev2 != TIP && rev2 < rev1; // range(3, 1); + badArgs |= rev1 == TIP && rev2 != TIP; // range(TIP, 2), although this may be legitimate when TIP points to 2 + if (badArgs) { + throw new IllegalArgumentException(String.format("Bad range: [%d, %d]", rev1, rev2)); + } + startRev = rev1; + endRev = rev2; + return this; + } + + public HgManifestCommand revision(int rev) { + startRev = endRev = rev; + return this; + } + + public HgManifestCommand dirs(boolean include) { + // XXX whether directories with directories only are include or not + // now lists only directories with files + needDirs = include; + return this; + } + + /** + * Limit manifest walk to a subset of files. + * @param pathMatcher - filter, pass <code>null</code> to clear. + * @return <code>this</code> instance for convenience + */ + public HgManifestCommand match(Path.Matcher pathMatcher) { + matcher = pathMatcher; + return this; + } + + /** + * Runs the command. + * @param handler - callback to get the outcome + * @throws IllegalArgumentException if handler is <code>null</code> + * @throws ConcurrentModificationException if this command is already in use (running) + */ + public void execute(Handler handler) { + if (handler == null) { + throw new IllegalArgumentException(); + } + if (visitor != null) { + throw new ConcurrentModificationException(); + } + try { + visitor = handler; + mediator.start(); + repo.getManifest().walk(startRev, endRev, mediator); + } finally { + mediator.done(); + visitor = null; + } + } + + /** + * Callback to walk file/directory tree of a revision + */ + public interface Handler { + void begin(Nodeid manifestRevision); + void dir(Path p); // optionally invoked (if walker was configured to spit out directories) prior to any files from this dir and subdirs + void file(FileRevision fileRevision); // XXX allow to check p is invalid (df.exists()) + void end(Nodeid manifestRevision); + } + + // I'd rather let HgManifestCommand implement HgManifest.Inspector directly, but this pollutes API alot + private class Mediator implements HgManifest.Inspector { + // file names are likely to repeat in each revision, hence caching of Paths. + // However, once HgManifest.Inspector switches to Path objects, perhaps global Path pool + // might be more effective? + private PathPool pathPool; + private List<FileRevision> manifestContent; + private Nodeid manifestNodeid; + + public void start() { + // Manifest keeps normalized paths + pathPool = new PathPool(new PathRewrite.Empty()); + } + + public void done() { + manifestContent = null; + pathPool = null; + } + + public boolean begin(int revision, Nodeid nid) { + if (needDirs && manifestContent == null) { + manifestContent = new LinkedList<FileRevision>(); + } + visitor.begin(manifestNodeid = nid); + return true; + } + public boolean end(int revision) { + if (needDirs) { + LinkedHashMap<Path, LinkedList<FileRevision>> breakDown = new LinkedHashMap<Path, LinkedList<FileRevision>>(); + for (FileRevision fr : manifestContent) { + Path filePath = fr.getPath(); + Path dirPath = pathPool.parent(filePath); + LinkedList<FileRevision> revs = breakDown.get(dirPath); + if (revs == null) { + revs = new LinkedList<FileRevision>(); + breakDown.put(dirPath, revs); + } + revs.addLast(fr); + } + for (Path dir : breakDown.keySet()) { + visitor.dir(dir); + for (FileRevision fr : breakDown.get(dir)) { + visitor.file(fr); + } + } + manifestContent.clear(); + } + visitor.end(manifestNodeid); + manifestNodeid = null; + return true; + } + public boolean next(Nodeid nid, String fname, String flags) { + Path p = pathPool.path(fname); + if (matcher != null && !matcher.accept(p)) { + return true; + } + FileRevision fr = new FileRevision(repo, nid, p); + if (needDirs) { + manifestContent.add(fr); + } else { + visitor.file(fr); + } + return true; + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgRepoFacade.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import java.io.File; + +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.HgLookup; + +/** + * Starting point for the library. + * <p>Sample use: + * <pre> + * HgRepoFacade f = new HgRepoFacade(); + * f.initFrom(System.getenv("whatever.repo.location")); + * HgStatusCommand statusCmd = f.createStatusCommand(); + * HgStatusCommand.Handler handler = ...; + * statusCmd.execute(handler); + * </pre> + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgRepoFacade { + private HgRepository repo; + + public HgRepoFacade() { + } + + /** + * @param hgRepo + * @return true on successful initialization + * @throws IllegalArgumentException when argument is null + */ + public boolean init(HgRepository hgRepo) { + if (hgRepo == null) { + throw new IllegalArgumentException(); + } + repo = hgRepo; + return !repo.isInvalid(); + } + + /** + * Tries to find repository starting from the current working directory. + * @return <code>true</code> if found valid repository + * @throws HgException in case of errors during repository initialization + */ + public boolean init() throws HgException { + repo = new HgLookup().detectFromWorkingDir(); + return repo != null && !repo.isInvalid(); + } + + /** + * Looks up Mercurial repository starting from specified location and up to filesystem root. + * + * @param repoLocation path to any folder within structure of a Mercurial repository. + * @return <code>true</code> if found valid repository + * @throws HgException if there are errors accessing specified location + * @throws IllegalArgumentException if argument is <code>null</code> + */ + public boolean initFrom(File repoLocation) throws HgException { + if (repoLocation == null) { + throw new IllegalArgumentException(); + } + repo = new HgLookup().detect(repoLocation); + return repo != null && !repo.isInvalid(); + } + + public HgRepository getRepository() { + if (repo == null) { + throw new IllegalStateException("Call any of #init*() methods first first"); + } + return repo; + } + + public HgLogCommand createLogCommand() { + return new HgLogCommand(repo/*, getCommandContext()*/); + } + + public HgStatusCommand createStatusCommand() { + return new HgStatusCommand(repo/*, getCommandContext()*/); + } + + public HgCatCommand createCatCommand() { + return new HgCatCommand(repo); + } + + public HgManifestCommand createManifestCommand() { + return new HgManifestCommand(repo); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgStatus.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import java.util.Date; + +import org.tmatesoft.hg.internal.ChangelogHelper; +import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; +import org.tmatesoft.hg.util.Path; + +/** + * Repository file status and extra handy information. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgStatus { + + public enum Kind { + Modified, Added, Removed, Missing, Unknown, Clean, Ignored + // I'd refrain from changing order of these constants, just in case someone (erroneously, of course ;), uses .ordinal() + }; + + private final HgStatus.Kind kind; + private final Path path; + private final Path origin; + private final ChangelogHelper logHelper; + + HgStatus(HgStatus.Kind kind, Path path, ChangelogHelper changelogHelper) { + this(kind, path, null, changelogHelper); + } + + HgStatus(HgStatus.Kind kind, Path path, Path copyOrigin, ChangelogHelper changelogHelper) { + this.kind = kind; + this.path = path; + origin = copyOrigin; + logHelper = changelogHelper; + } + + public HgStatus.Kind getKind() { + return kind; + } + + public Path getPath() { + return path; + } + + public Path getOriginalPath() { + return origin; + } + + public boolean isCopy() { + return origin != null; + } + + /** + * @return <code>null</code> if author for the change can't be deduced (e.g. for clean files it's senseless) + */ + public String getModificationAuthor() { + RawChangeset cset = logHelper.findLatestChangeWith(path); + if (cset == null) { + if (kind == Kind.Modified || kind == Kind.Added || kind == Kind.Removed /*&& RightBoundary is TIP*/) { + // perhaps, also for Kind.Missing? + return logHelper.getNextCommitUsername(); + } + } else { + return cset.user(); + } + return null; + } + + public Date getModificationDate() { + RawChangeset cset = logHelper.findLatestChangeWith(path); + if (cset == null) { + // FIXME check dirstate and/or local file for tstamp + return new Date(); // what's correct + } else { + return cset.date(); + } + } +} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/HgStatusCommand.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,284 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import static org.tmatesoft.hg.core.HgStatus.Kind.*; +import static org.tmatesoft.hg.repo.HgInternals.wrongLocalRevision; +import static org.tmatesoft.hg.repo.HgRepository.*; + +import java.util.ConcurrentModificationException; + +import org.tmatesoft.hg.internal.ChangelogHelper; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.HgStatusCollector; +import org.tmatesoft.hg.repo.HgStatusInspector; +import org.tmatesoft.hg.repo.HgWorkingCopyStatusCollector; +import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.Path.Matcher; + +/** + * Command to obtain file status information, 'hg status' counterpart. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgStatusCommand { + private final HgRepository repo; + + private int startRevision = TIP; + private int endRevision = WORKING_COPY; + + private final Mediator mediator = new Mediator(); + + public HgStatusCommand(HgRepository hgRepo) { + repo = hgRepo; + defaults(); + } + + public HgStatusCommand defaults() { + final Mediator m = mediator; + m.needModified = m.needAdded = m.needRemoved = m.needUnknown = m.needMissing = true; + m.needCopies = m.needClean = m.needIgnored = false; + return this; + } + public HgStatusCommand all() { + final Mediator m = mediator; + m.needModified = m.needAdded = m.needRemoved = m.needUnknown = m.needMissing = true; + m.needCopies = m.needClean = m.needIgnored = true; + return this; + } + + + public HgStatusCommand modified(boolean include) { + mediator.needModified = include; + return this; + } + public HgStatusCommand added(boolean include) { + mediator.needAdded = include; + return this; + } + public HgStatusCommand removed(boolean include) { + mediator.needRemoved = include; + return this; + } + public HgStatusCommand deleted(boolean include) { + mediator.needMissing = include; + return this; + } + public HgStatusCommand unknown(boolean include) { + mediator.needUnknown = include; + return this; + } + public HgStatusCommand clean(boolean include) { + mediator.needClean = include; + return this; + } + public HgStatusCommand ignored(boolean include) { + mediator.needIgnored = include; + return this; + } + + /** + * If set, either base:revision or base:workingdir + * to unset, pass {@link HgRepository#TIP} or {@link HgRepository#BAD_REVISION} + * @param revision - local revision number to base status from + * @return <code>this</code> for convenience + * @throws IllegalArgumentException when revision is negative or {@link HgRepository#WORKING_COPY} + */ + public HgStatusCommand base(int revision) { + if (revision == WORKING_COPY || wrongLocalRevision(revision)) { + throw new IllegalArgumentException(String.valueOf(revision)); + } + if (revision == BAD_REVISION) { + revision = TIP; + } + startRevision = revision; + return this; + } + + /** + * Revision without base == --change + * Pass {@link HgRepository#WORKING_COPY} or {@link HgRepository#BAD_REVISION} to reset + * @param revision - non-negative local revision number, or any of {@link HgRepository#BAD_REVISION}, {@link HgRepository#WORKING_COPY} or {@link HgRepository#TIP} + * @return <code>this</code> for convenience + * @throws IllegalArgumentException if local revision number doesn't specify legitimate revision. + */ + public HgStatusCommand revision(int revision) { + if (revision == BAD_REVISION) { + revision = WORKING_COPY; + } + if (wrongLocalRevision(revision)) { + throw new IllegalArgumentException(String.valueOf(revision)); + } + endRevision = revision; + return this; + } + + /** + * Shorthand for {@link #base(int) cmd.base(BAD_REVISION)}{@link #change(int) .revision(revision)} + * + * @param revision compare given revision against its parent + * @return <code>this</code> for convenience + */ + public HgStatusCommand change(int revision) { + base(BAD_REVISION); + return revision(revision); + } + + /** + * Limit status operation to certain sub-tree. + * + * @param pathMatcher - matcher to use, pass <code>null/<code> to reset + * @return <code>this</code> for convenience + */ + public HgStatusCommand match(Path.Matcher pathMatcher) { + mediator.matcher = pathMatcher; + return this; + } + + public HgStatusCommand subrepo(boolean visit) { + throw HgRepository.notImplemented(); + } + + /** + * Perform status operation according to parameters set. + * + * @param handler callback to get status information + * @throws IllegalArgumentException if handler is <code>null</code> + * @throws ConcurrentModificationException if this command already runs (i.e. being used from another thread) + */ + public void execute(Handler statusHandler) { + if (statusHandler == null) { + throw new IllegalArgumentException(); + } + if (mediator.busy()) { + throw new ConcurrentModificationException(); + } + HgStatusCollector sc = new HgStatusCollector(repo); // TODO from CommandContext +// PathPool pathHelper = new PathPool(repo.getPathHelper()); // TODO from CommandContext + try { + // XXX if I need a rough estimation (for ProgressMonitor) of number of work units, + // I may use number of files in either rev1 or rev2 manifest edition + mediator.start(statusHandler, new ChangelogHelper(repo, startRevision)); + if (endRevision == WORKING_COPY) { + HgWorkingCopyStatusCollector wcsc = new HgWorkingCopyStatusCollector(repo); + wcsc.setBaseRevisionCollector(sc); + wcsc.walk(startRevision, mediator); + } else { + if (startRevision == TIP) { + sc.change(endRevision, mediator); + } else { + sc.walk(startRevision, endRevision, mediator); + } + } + } finally { + mediator.done(); + } + } + + public interface Handler { + void handleStatus(HgStatus s); + } + + private class Mediator implements HgStatusInspector { + boolean needModified; + boolean needAdded; + boolean needRemoved; + boolean needUnknown; + boolean needMissing; + boolean needClean; + boolean needIgnored; + boolean needCopies; + Matcher matcher; + Handler handler; + private ChangelogHelper logHelper; + + Mediator() { + } + + public void start(Handler h, ChangelogHelper changelogHelper) { + handler = h; + logHelper = changelogHelper; + } + + public void done() { + handler = null; + logHelper = null; + } + + public boolean busy() { + return handler != null; + } + + public void modified(Path fname) { + if (needModified) { + if (matcher == null || matcher.accept(fname)) { + handler.handleStatus(new HgStatus(Modified, fname, logHelper)); + } + } + } + public void added(Path fname) { + if (needAdded) { + if (matcher == null || matcher.accept(fname)) { + handler.handleStatus(new HgStatus(Added, fname, logHelper)); + } + } + } + public void removed(Path fname) { + if (needRemoved) { + if (matcher == null || matcher.accept(fname)) { + handler.handleStatus(new HgStatus(Removed, fname, logHelper)); + } + } + } + public void copied(Path fnameOrigin, Path fnameAdded) { + if (needCopies) { + if (matcher == null || matcher.accept(fnameAdded)) { + handler.handleStatus(new HgStatus(Added, fnameAdded, fnameOrigin, logHelper)); + } + } + } + public void missing(Path fname) { + if (needMissing) { + if (matcher == null || matcher.accept(fname)) { + handler.handleStatus(new HgStatus(Missing, fname, logHelper)); + } + } + } + public void unknown(Path fname) { + if (needUnknown) { + if (matcher == null || matcher.accept(fname)) { + handler.handleStatus(new HgStatus(Unknown, fname, logHelper)); + } + } + } + public void clean(Path fname) { + if (needClean) { + if (matcher == null || matcher.accept(fname)) { + handler.handleStatus(new HgStatus(Clean, fname, logHelper)); + } + } + } + public void ignored(Path fname) { + if (needIgnored) { + if (matcher == null || matcher.accept(fname)) { + handler.handleStatus(new HgStatus(Ignored, fname, logHelper)); + } + } + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/Nodeid.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,163 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.core; + +import static org.tmatesoft.hg.internal.DigestHelper.toHexString; + +import java.util.Arrays; + + + +/** + * A 20-bytes (40 characters) long hash value to identify a revision. + * @see http://mercurial.selenic.com/wiki/Nodeid + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + * + */ +public final class Nodeid { + + /** + * <b>nullid</b>, empty root revision. + */ + public static final Nodeid NULL = new Nodeid(new byte[20], false); + + private final byte[] binaryData; + + /** + * @param binaryRepresentation - array of exactly 20 bytes + * @param shallClone - true if array is subject to future modification and shall be copied, not referenced + * @throws IllegalArgumentException if supplied binary representation doesn't correspond to 20 bytes of sha1 digest + */ + public Nodeid(byte[] binaryRepresentation, boolean shallClone) { + // 5 int fields => 32 bytes + // byte[20] => 48 bytes + if (binaryRepresentation == null || binaryRepresentation.length != 20) { + throw new IllegalArgumentException(); + } + this.binaryData = shallClone ? binaryRepresentation.clone() : binaryRepresentation; + } + + @Override + public int hashCode() { + // digest (part thereof) seems to be nice candidate for the hashCode + byte[] b = binaryData; + return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); + } + + @Override + public boolean equals(Object o) { + if (o instanceof Nodeid) { + return this == o || equalsTo(((Nodeid) o).binaryData); + } + return false; + } + + public boolean equalsTo(byte[] buf) { + return Arrays.equals(this.binaryData, buf); + } + + @Override + public String toString() { + // XXX may want to output just single 0 for the NULL id? + return toHexString(binaryData, 0, binaryData.length); + } + + public String shortNotation() { + return toHexString(binaryData, 0, 6); + } + + public boolean isNull() { + if (this == NULL) { + return true; + } + for (int i = 0; i < 20; i++) { + if (this.binaryData[i] != 0) { + return false; + } + } + return true; + } + + // copy + public byte[] toByteArray() { + return binaryData.clone(); + } + + /** + * Factory for {@link Nodeid Nodeids}. + * Primary difference with cons is handling of NULL id (this method returns constant) and control over array + * duplication - this method always makes a copy of an array passed + * @param binaryRepresentation - byte array of a length at least offset + 20 + * @param offset - index in the array to start from + * @throws IllegalArgumentException when arguments don't select 20 bytes + */ + public static Nodeid fromBinary(byte[] binaryRepresentation, int offset) { + if (binaryRepresentation == null || binaryRepresentation.length - offset < 20) { + throw new IllegalArgumentException(); + } + int i = 0; + while (i < 20 && binaryRepresentation[offset+i] == 0) i++; + if (i == 20) { + return NULL; + } + if (offset == 0 && binaryRepresentation.length == 20) { + return new Nodeid(binaryRepresentation, true); + } + byte[] b = new byte[20]; // create new instance if no other reasonable guesses possible + System.arraycopy(binaryRepresentation, offset, b, 0, 20); + return new Nodeid(b, false); + } + + /** + * Parse encoded representation. + * + * @param asciiRepresentation - encoded form of the Nodeid. + * @return object representation + * @throws IllegalArgumentException when argument doesn't match encoded form of 20-bytes sha1 digest. + */ + public static Nodeid fromAscii(String asciiRepresentation) { + if (asciiRepresentation.length() != 40) { + throw new IllegalArgumentException(); + } + // XXX is better impl for String possible? + return fromAscii(asciiRepresentation.getBytes(), 0, 40); + } + + /** + * Parse encoded representation. Similar to {@link #fromAscii(String)}. + */ + public static Nodeid fromAscii(byte[] asciiRepresentation, int offset, int length) { + if (length != 40) { + throw new IllegalArgumentException(); + } + byte[] data = new byte[20]; + boolean zeroBytes = true; + for (int i = 0, j = offset; i < data.length; i++) { + int hiNibble = Character.digit(asciiRepresentation[j++], 16); + int lowNibble = Character.digit(asciiRepresentation[j++], 16); + byte b = (byte) (((hiNibble << 4) | lowNibble) & 0xFF); + data[i] = b; + zeroBytes = zeroBytes && b == 0; + } + if (zeroBytes) { + return NULL; + } + return new Nodeid(data, false); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/core/package.html Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,5 @@ +<html> +<boody> +Hi-level API +</bidy> +</html> \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/ByteArrayChannel.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.nio.ByteBuffer; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.util.ByteChannel; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class ByteArrayChannel implements ByteChannel { + private final List<ByteBuffer> buffers; + private ByteBuffer target; + private byte[] result; + + public ByteArrayChannel() { + this(-1); + } + + public ByteArrayChannel(int size) { + if (size == -1) { + buffers = new LinkedList<ByteBuffer>(); + } else { + if (size < 0) { + throw new IllegalArgumentException(String.valueOf(size)); + } + buffers = null; + target = ByteBuffer.allocate(size); + } + } + + // TODO document what happens on write after toArray() in each case + public int write(ByteBuffer buffer) { + int rv = buffer.remaining(); + if (buffers == null) { + target.put(buffer); + } else { + ByteBuffer copy = ByteBuffer.allocate(rv); + copy.put(buffer); + buffers.add(copy); + } + return rv; + } + + public byte[] toArray() { + if (result != null) { + return result; + } + if (buffers == null) { + assert target.hasArray(); + // int total = target.position(); + // System.arraycopy(target.array(), new byte[total]); + // I don't want to duplicate byte[] for now + // although correct way of doing things is to make a copy and discard target + return target.array(); + } else { + int total = 0; + for (ByteBuffer bb : buffers) { + bb.flip(); + total += bb.limit(); + } + result = new byte[total]; + int off = 0; + for (ByteBuffer bb : buffers) { + bb.get(result, off, bb.limit()); + off += bb.limit(); + } + buffers.clear(); + return result; + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/ByteArrayDataAccess.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.IOException; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class ByteArrayDataAccess extends DataAccess { + + private final byte[] data; + private final int offset; + private final int length; + private int pos; + + public ByteArrayDataAccess(byte[] data) { + this(data, 0, data.length); + } + + public ByteArrayDataAccess(byte[] data, int offset, int length) { + this.data = data; + this.offset = offset; + this.length = length; + pos = 0; + } + + @Override + public byte readByte() throws IOException { + if (pos >= length) { + throw new IOException(); + } + return data[offset + pos++]; + } + @Override + public void readBytes(byte[] buf, int off, int len) throws IOException { + if (len > (this.length - pos)) { + throw new IOException(); + } + System.arraycopy(data, pos, buf, off, len); + pos += len; + } + + @Override + public ByteArrayDataAccess reset() { + pos = 0; + return this; + } + @Override + public long length() { + return length; + } + @Override + public void seek(long offset) { + pos = (int) offset; + } + @Override + public void skip(int bytes) throws IOException { + seek(pos + bytes); + } + @Override + public boolean isEmpty() { + return pos >= length; + } + + // + + // when byte[] needed from DA, we may save few cycles and some memory giving this (otherwise unsafe) access to underlying data + @Override + public byte[] byteArray() { + return data; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/ChangelogHelper.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.util.TreeMap; + +import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgInternals; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class ChangelogHelper { + private final int leftBoundary; + private final HgRepository repo; + private final TreeMap<Integer, RawChangeset> cache = new TreeMap<Integer, RawChangeset>(); + private String nextCommitAuthor; + + /** + * @param hgRepo + * @param leftBoundaryRevision walker never visits revisions with local numbers less than specified, + * IOW only revisions [leftBoundaryRevision..TIP] are considered. + */ + public ChangelogHelper(HgRepository hgRepo, int leftBoundaryRevision) { + repo = hgRepo; + leftBoundary = leftBoundaryRevision; + } + + /** + * @return the repo + */ + public HgRepository getRepo() { + return repo; + } + + /** + * Walks changelog in reverse order + * @param file + * @return changeset where specified file is mentioned among affected files, or + * <code>null</code> if none found up to leftBoundary + */ + public RawChangeset findLatestChangeWith(Path file) { + HgDataFile df = repo.getFileNode(file); + int changelogRev = df.getChangesetLocalRevision(HgRepository.TIP); + if (changelogRev >= leftBoundary) { + // the method is likely to be invoked for different files, + // while changesets might be the same. Cache 'em not to read too much. + RawChangeset cs = cache.get(changelogRev); + if (cs == null) { + cs = repo.getChangelog().range(changelogRev, changelogRev).get(0); + cache.put(changelogRev, cs); + } + return cs; + } + return null; + } + + public String getNextCommitUsername() { + if (nextCommitAuthor == null) { + nextCommitAuthor = new HgInternals(repo).getNextCommitUsername(); + } + return nextCommitAuthor; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/ConfigFile.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class ConfigFile { + + private List<String> sections; + private List<Map<String,String>> content; + + ConfigFile() { + } + + public void addLocation(File path) { + read(path); + } + + public boolean hasSection(String sectionName) { + return sections == null ? false : sections.indexOf(sectionName) == -1; + } + + // XXX perhaps, should be moved to subclass HgRepoConfig, as it is not common operation for any config file + public boolean hasEnabledExtension(String extensionName) { + int x = sections != null ? sections.indexOf("extensions") : -1; + if (x == -1) { + return false; + } + String value = content.get(x).get(extensionName); + return value != null && !"!".equals(value); + } + + public List<String> getSectionNames() { + return sections == null ? Collections.<String>emptyList() : Collections.unmodifiableList(sections); + } + + public Map<String,String> getSection(String sectionName) { + if (sections == null) { + return Collections.emptyMap(); + } + int x = sections.indexOf(sectionName); + if (x == -1) { + return Collections.emptyMap(); + } + return Collections.unmodifiableMap(content.get(x)); + } + + public boolean getBoolean(String sectionName, String key, boolean defaultValue) { + String value = getSection(sectionName).get(key); + if (value == null) { + return defaultValue; + } + for (String s : new String[] { "true", "yes", "on", "1" }) { + if (s.equalsIgnoreCase(value)) { + return true; + } + } + return false; + } + + public String getString(String sectionName, String key, String defaultValue) { + String value = getSection(sectionName).get(key); + return value == null ? defaultValue : value; + } + + // TODO handle %include and %unset directives + // TODO "" and lists + private void read(File f) { + if (f == null || !f.canRead()) { + return; + } + if (sections == null) { + sections = new ArrayList<String>(); + content = new ArrayList<Map<String,String>>(); + } + try { + BufferedReader br = new BufferedReader(new FileReader(f)); + String line; + String sectionName = ""; + Map<String,String> section = new LinkedHashMap<String, String>(); + while ((line = br.readLine()) != null) { + line = line.trim(); + int x; + if ((x = line.indexOf('#')) != -1) { + // do not keep comments in memory, get new, shorter string + line = new String(line.substring(0, x).trim()); + } + if (line.length() <= 2) { // a=b or [a] are at least of length 3 + continue; + } + if (line.charAt(0) == '[' && line.charAt(line.length() - 1) == ']') { + sectionName = line.substring(1, line.length() - 1); + if (sections.indexOf(sectionName) == -1) { + sections.add(sectionName); + content.add(section = new LinkedHashMap<String, String>()); + } else { + section = null; // drop cached value + } + } else if ((x = line.indexOf('=')) != -1) { + // share char[] of the original string + String key = line.substring(0, x).trim(); + String value = line.substring(x+1).trim(); + if (section == null) { + int i = sections.indexOf(sectionName); + assert i >= 0; + section = content.get(i); + } + if (sectionName.length() == 0) { + // add fake section only if there are any values + sections.add(sectionName); + content.add(section); + } + section.put(key, value); + } + } + br.close(); + } catch (IOException ex) { + ex.printStackTrace(); // XXX shall outer world care? + } + ((ArrayList<?>) sections).trimToSize(); + ((ArrayList<?>) content).trimToSize(); + assert sections.size() == content.size(); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/DataAccess.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * relevant parts of DataInput, non-stream nature (seek operation), explicit check for end of data. + * convenient skip (+/- bytes) + * Primary goal - effective file read, so that clients don't need to care whether to call few + * distinct getInt() or readBytes(totalForFewInts) and parse themselves instead in an attempt to optimize. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class DataAccess { + public boolean isEmpty() { + return true; + } + public long length() { + return 0; + } + /** + * get this instance into initial state + * @throws IOException + * @return <code>this</code> for convenience + */ + public DataAccess reset() throws IOException { + // nop, empty instance is always in the initial state + return this; + } + // absolute positioning + public void seek(long offset) throws IOException { + throw new UnsupportedOperationException(); + } + // relative positioning + public void skip(int bytes) throws IOException { + throw new UnsupportedOperationException(); + } + // shall be called once this object no longer needed + public void done() { + // no-op in this empty implementation + } + public int readInt() throws IOException { + byte[] b = new byte[4]; + readBytes(b, 0, 4); + return b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); + } + public long readLong() throws IOException { + byte[] b = new byte[8]; + readBytes(b, 0, 8); + int i1 = b[0] << 24 | (b[1] & 0xFF) << 16 | (b[2] & 0xFF) << 8 | (b[3] & 0xFF); + int i2 = b[4] << 24 | (b[5] & 0xFF) << 16 | (b[6] & 0xFF) << 8 | (b[7] & 0xFF); + return ((long) i1) << 32 | ((long) i2 & 0xFFFFFFFFl); + } + public void readBytes(byte[] buf, int offset, int length) throws IOException { + throw new UnsupportedOperationException(); + } + // reads bytes into ByteBuffer, up to its limit or total data length, whichever smaller + // FIXME perhaps, in DataAccess paradigm (when we read known number of bytes, we shall pass specific byte count to read) + public void readBytes(ByteBuffer buf) throws IOException { +// int toRead = Math.min(buf.remaining(), (int) length()); +// if (buf.hasArray()) { +// readBytes(buf.array(), buf.arrayOffset(), toRead); +// } else { +// byte[] bb = new byte[toRead]; +// readBytes(bb, 0, bb.length); +// buf.put(bb); +// } + // FIXME optimize to read as much as possible at once + while (!isEmpty() && buf.hasRemaining()) { + buf.put(readByte()); + } + } + public byte readByte() throws IOException { + throw new UnsupportedOperationException(); + } + + // XXX decide whether may or may not change position in the DataAccess + // FIXME exception handling is not right, just for the sake of quick test + public byte[] byteArray() throws IOException { + reset(); + byte[] rv = new byte[(int) length()]; + readBytes(rv, 0, rv.length); + return rv; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/DataAccessProvider.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,301 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.MappedByteBuffer; +import java.nio.channels.FileChannel; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class DataAccessProvider { + + private final int mapioMagicBoundary; + private final int bufferSize; + + public DataAccessProvider() { + this(100 * 1024, 8 * 1024); + } + + public DataAccessProvider(int mapioBoundary, int regularBufferSize) { + mapioMagicBoundary = mapioBoundary; + bufferSize = regularBufferSize; + } + + public DataAccess create(File f) { + if (!f.exists()) { + return new DataAccess(); + } + try { + FileChannel fc = new FileInputStream(f).getChannel(); + if (fc.size() > mapioMagicBoundary) { + // TESTS: bufLen of 1024 was used to test MemMapFileAccess + return new MemoryMapFileAccess(fc, fc.size(), mapioMagicBoundary); + } else { + // XXX once implementation is more or less stable, + // may want to try ByteBuffer.allocateDirect() to see + // if there's any performance gain. + boolean useDirectBuffer = false; + // TESTS: bufferSize of 100 was used to check buffer underflow states when readBytes reads chunks bigger than bufSize + return new FileAccess(fc, fc.size(), bufferSize, useDirectBuffer); + } + } catch (IOException ex) { + // unlikely to happen, we've made sure file exists. + ex.printStackTrace(); // FIXME log error + } + return new DataAccess(); // non-null, empty. + } + + // DOESN'T WORK YET + private static class MemoryMapFileAccess extends DataAccess { + private FileChannel fileChannel; + private final long size; + private long position = 0; // always points to buffer's absolute position in the file + private final int memBufferSize; + private MappedByteBuffer buffer; + + public MemoryMapFileAccess(FileChannel fc, long channelSize, int /*long?*/ bufferSize) { + fileChannel = fc; + size = channelSize; + memBufferSize = bufferSize; + } + + @Override + public boolean isEmpty() { + return position + (buffer == null ? 0 : buffer.position()) >= size; + } + + @Override + public long length() { + return size; + } + + @Override + public DataAccess reset() throws IOException { + seek(0); + return this; + } + + @Override + public void seek(long offset) { + assert offset >= 0; + // offset may not necessarily be further than current position in the file (e.g. rewind) + if (buffer != null && /*offset is within buffer*/ offset >= position && (offset - position) < buffer.limit()) { + buffer.position((int) (offset - position)); + } else { + position = offset; + buffer = null; + } + } + + @Override + public void skip(int bytes) throws IOException { + assert bytes >= 0; + if (buffer == null) { + position += bytes; + return; + } + if (buffer.remaining() > bytes) { + buffer.position(buffer.position() + bytes); + } else { + position += buffer.position() + bytes; + buffer = null; + } + } + + private void fill() throws IOException { + if (buffer != null) { + position += buffer.position(); + } + long left = size - position; + buffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, position, left < memBufferSize ? left : memBufferSize); + } + + @Override + public void readBytes(byte[] buf, int offset, int length) throws IOException { + if (buffer == null || !buffer.hasRemaining()) { + fill(); + } + // XXX in fact, we may try to create a MappedByteBuffer of exactly length size here, and read right away + while (length > 0) { + int tail = buffer.remaining(); + if (tail == 0) { + throw new IOException(); + } + if (tail >= length) { + buffer.get(buf, offset, length); + } else { + buffer.get(buf, offset, tail); + fill(); + } + offset += tail; + length -= tail; + } + } + + @Override + public byte readByte() throws IOException { + if (buffer == null || !buffer.hasRemaining()) { + fill(); + } + if (buffer.hasRemaining()) { + return buffer.get(); + } + throw new IOException(); + } + + @Override + public void done() { + buffer = null; + if (fileChannel != null) { + try { + fileChannel.close(); + } catch (IOException ex) { + ex.printStackTrace(); // log debug + } + fileChannel = null; + } + } + } + + // (almost) regular file access - FileChannel and buffers. + private static class FileAccess extends DataAccess { + private FileChannel fileChannel; + private final long size; + private ByteBuffer buffer; + private long bufferStartInFile = 0; // offset of this.buffer in the file. + + public FileAccess(FileChannel fc, long channelSize, int bufferSizeHint, boolean useDirect) { + fileChannel = fc; + size = channelSize; + final int capacity = size < bufferSizeHint ? (int) size : bufferSizeHint; + buffer = useDirect ? ByteBuffer.allocateDirect(capacity) : ByteBuffer.allocate(capacity); + buffer.flip(); // or .limit(0) to indicate it's empty + } + + @Override + public boolean isEmpty() { + return bufferStartInFile + buffer.position() >= size; + } + + @Override + public long length() { + return size; + } + + @Override + public DataAccess reset() throws IOException { + seek(0); + return this; + } + + @Override + public void seek(long offset) throws IOException { + if (offset > size) { + throw new IllegalArgumentException(); + } + if (offset < bufferStartInFile + buffer.limit() && offset >= bufferStartInFile) { + buffer.position((int) (offset - bufferStartInFile)); + } else { + // out of current buffer, invalidate it (force re-read) + // XXX or ever re-read it right away? + bufferStartInFile = offset; + buffer.clear(); + buffer.limit(0); // or .flip() to indicate we switch to reading + fileChannel.position(offset); + } + } + + @Override + public void skip(int bytes) throws IOException { + final int newPos = buffer.position() + bytes; + if (newPos >= 0 && newPos < buffer.limit()) { + // no need to move file pointer, just rewind/seek buffer + buffer.position(newPos); + } else { + // + seek(bufferStartInFile + newPos); + } + } + + private boolean fill() throws IOException { + if (!buffer.hasRemaining()) { + bufferStartInFile += buffer.limit(); + buffer.clear(); + if (bufferStartInFile < size) { // just in case there'd be any exception on EOF, not -1 + fileChannel.read(buffer); + // may return -1 when EOF, but empty will reflect this, hence no explicit support here + } + buffer.flip(); + } + return buffer.hasRemaining(); + } + + @Override + public void readBytes(byte[] buf, int offset, int length) throws IOException { + if (!buffer.hasRemaining()) { + fill(); + } + while (length > 0) { + int tail = buffer.remaining(); + if (tail == 0) { + throw new IOException(); // shall not happen provided stream contains expected data and no attempts to read past isEmpty() == true are made. + } + if (tail >= length) { + buffer.get(buf, offset, length); + } else { + buffer.get(buf, offset, tail); + fill(); + } + offset += tail; + length -= tail; + } + } + + @Override + public byte readByte() throws IOException { + if (buffer.hasRemaining()) { + return buffer.get(); + } + if (fill()) { + return buffer.get(); + } + throw new IOException(); + } + + @Override + public void done() { + if (buffer != null) { + buffer = null; + } + if (fileChannel != null) { + try { + fileChannel.close(); + } catch (IOException ex) { + ex.printStackTrace(); // log debug + } + fileChannel = null; + } + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/DigestHelper.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.IOException; +import java.io.InputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +import org.tmatesoft.hg.core.Nodeid; + + +/** + * <pre> + * DigestHelper dh; + * dh.sha1(...).asHexString(); + * or + * dh = dh.sha1(...); + * nodeid.equalsTo(dh.asBinary()); + * </pre> + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class DigestHelper { + private MessageDigest sha1; + private byte[] digest; + + public DigestHelper() { + } + + private MessageDigest getSHA1() { + if (sha1 == null) { + try { + sha1 = MessageDigest.getInstance("SHA-1"); + } catch (NoSuchAlgorithmException ex) { + // could hardly happen, JDK from Sun always has sha1. + ex.printStackTrace(); // FIXME log error + } + } + return sha1; + } + + + public DigestHelper sha1(Nodeid nodeid1, Nodeid nodeid2, byte[] data) { + return sha1(nodeid1.toByteArray(), nodeid2.toByteArray(), data); + } + + // sha1_digest(min(p1,p2) ++ max(p1,p2) ++ final_text) + public DigestHelper sha1(byte[] nodeidParent1, byte[] nodeidParent2, byte[] data) { + MessageDigest alg = getSHA1(); + if ((nodeidParent1[0] & 0x00FF) < (nodeidParent2[0] & 0x00FF)) { + alg.update(nodeidParent1); + alg.update(nodeidParent2); + } else { + alg.update(nodeidParent2); + alg.update(nodeidParent1); + } + digest = alg.digest(data); + assert digest.length == 20; + return this; + } + + public String asHexString() { + if (digest == null) { + throw new IllegalStateException("Shall init with sha1() call first"); + } + return toHexString(digest, 0, digest.length); + } + + // by reference, be careful not to modify (or #clone() if needed) + public byte[] asBinary() { + if (digest == null) { + throw new IllegalStateException("Shall init with sha1() call first"); + } + return digest; + } + + // XXX perhaps, digest functions should throw an exception, as it's caller responsibility to deal with eof, etc + public DigestHelper sha1(InputStream is /*ByteBuffer*/) throws IOException { + MessageDigest alg = getSHA1(); + byte[] buf = new byte[1024]; + int c; + while ((c = is.read(buf)) != -1) { + alg.update(buf, 0, c); + } + digest = alg.digest(); + return this; + } + + public DigestHelper sha1(CharSequence... seq) { + MessageDigest alg = getSHA1(); + for (CharSequence s : seq) { + byte[] b = s.toString().getBytes(); + alg.update(b); + } + digest = alg.digest(); + return this; + } + + public static String toHexString(byte[] data, final int offset, final int count) { + char[] result = new char[count << 1]; + final String hexDigits = "0123456789abcdef"; + final int end = offset+count; + for (int i = offset, j = 0; i < end; i++) { + result[j++] = hexDigits.charAt((data[i] >>> 4) & 0x0F); + result[j++] = hexDigits.charAt(data[i] & 0x0F); + } + return new String(result); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/Filter.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.nio.ByteBuffer; + +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface Filter { + + // returns a buffer ready to be read. may return original buffer. + // original buffer may not be fully consumed, #compact() might be operation to perform + ByteBuffer filter(ByteBuffer src); + + interface Factory { + void initialize(HgRepository hgRepo, ConfigFile cfg); + // may return null if for a given path and/or options this filter doesn't make any sense + Filter create(Path path, Options opts); + } + + enum Direction { + FromRepo, ToRepo + } + + public class Options { + + private final Direction direction; + public Options(Direction dir) { + direction = dir; + } + + Direction getDirection() { + return direction; + } + + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/FilterByteChannel.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Collection; + +import org.tmatesoft.hg.util.Adaptable; +import org.tmatesoft.hg.util.ByteChannel; +import org.tmatesoft.hg.util.CancelledException; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class FilterByteChannel implements ByteChannel, Adaptable { + private final Filter[] filters; + private final ByteChannel delegate; + + public FilterByteChannel(ByteChannel delegateChannel, Collection<Filter> filtersToApply) { + if (delegateChannel == null || filtersToApply == null) { + throw new IllegalArgumentException(); + } + delegate = delegateChannel; + filters = filtersToApply.toArray(new Filter[filtersToApply.size()]); + } + + public int write(ByteBuffer buffer) throws IOException, CancelledException { + final int srcPos = buffer.position(); + ByteBuffer processed = buffer; + for (Filter f : filters) { + // each next filter consumes not more than previous + // hence total consumed equals position shift in the original buffer + processed = f.filter(processed); + } + delegate.write(processed); + return buffer.position() - srcPos; // consumed as much from original buffer + } + + // adapters or implemented interfaces of the original class shall not be obfuscated by filter + public <T> T getAdapter(Class<T> adapterClass) { + if (delegate instanceof Adaptable) { + return ((Adaptable) delegate).getAdapter(adapterClass); + } + if (adapterClass != null && adapterClass.isInstance(delegate)) { + return adapterClass.cast(delegate); + } + return null; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/FilterDataAccess.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.IOException; + + +/** + * XXX Perhaps, DataAccessSlice? Unlike FilterInputStream, we limit amount of data read from DataAccess being filtered. + * + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class FilterDataAccess extends DataAccess { + private final DataAccess dataAccess; + private final long offset; + private final int length; + private int count; + + public FilterDataAccess(DataAccess dataAccess, long offset, int length) { + this.dataAccess = dataAccess; + this.offset = offset; + this.length = length; + count = length; + } + + protected int available() { + return count; + } + + @Override + public FilterDataAccess reset() throws IOException { + count = length; + return this; + } + + @Override + public boolean isEmpty() { + return count <= 0; + } + + @Override + public long length() { + return length; + } + + @Override + public void seek(long localOffset) throws IOException { + if (localOffset < 0 || localOffset > length) { + throw new IllegalArgumentException(); + } + dataAccess.seek(offset + localOffset); + count = (int) (length - localOffset); + } + + @Override + public void skip(int bytes) throws IOException { + int newCount = count - bytes; + if (newCount < 0 || newCount > length) { + throw new IllegalArgumentException(); + } + seek(length - newCount); + /* + can't use next code because don't want to rewind backing DataAccess on reset() + i.e. this.reset() modifies state of this instance only, while filtered DA may go further. + Only actual this.skip/seek/read would rewind it to desired position + dataAccess.skip(bytes); + count = newCount; + */ + + } + + @Override + public byte readByte() throws IOException { + if (count <= 0) { + throw new IllegalArgumentException("Underflow"); // XXX be descriptive + } + if (count == length) { + dataAccess.seek(offset); + } + count--; + return dataAccess.readByte(); + } + + @Override + public void readBytes(byte[] b, int off, int len) throws IOException { + if (count <= 0 || len > count) { + throw new IllegalArgumentException("Underflow"); // XXX be descriptive + } + if (count == length) { + dataAccess.seek(offset); + } + dataAccess.readBytes(b, off, len); + count -= len; + } + + // done shall be no-op, as we have no idea what's going on with DataAccess we filter +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/InflaterDataAccess.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,164 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.EOFException; +import java.io.IOException; +import java.util.zip.DataFormatException; +import java.util.zip.Inflater; +import java.util.zip.ZipException; + + +/** + * DataAccess counterpart for InflaterInputStream. + * XXX is it really needed to be subclass of FilterDataAccess? + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class InflaterDataAccess extends FilterDataAccess { + + private final Inflater inflater; + private final byte[] buffer; + private final byte[] singleByte = new byte[1]; + private int decompressedPos = 0; + private int decompressedLength = -1; + + public InflaterDataAccess(DataAccess dataAccess, long offset, int length) { + this(dataAccess, offset, length, new Inflater(), 512); + } + + public InflaterDataAccess(DataAccess dataAccess, long offset, int length, Inflater inflater, int bufSize) { + super(dataAccess, offset, length); + this.inflater = inflater; + buffer = new byte[bufSize]; + } + + @Override + public InflaterDataAccess reset() throws IOException { + super.reset(); + inflater.reset(); + decompressedPos = 0; + return this; + } + + @Override + protected int available() { + throw new IllegalStateException("Can't tell how much uncompressed data left"); + } + + @Override + public boolean isEmpty() { + return super.available() <= 0 && inflater.finished(); // and/or inflater.getRemaining() <= 0 ? + } + + @Override + public long length() { + if (decompressedLength != -1) { + return decompressedLength; + } + int c = 0; + try { + int oldPos = decompressedPos; + while (!isEmpty()) { + readByte(); + c++; + } + decompressedLength = c + oldPos; + reset(); + seek(oldPos); + return decompressedLength; + } catch (IOException ex) { + ex.printStackTrace(); // FIXME log error + decompressedLength = -1; // better luck next time? + return 0; + } + } + + @Override + public void seek(long localOffset) throws IOException { + if (localOffset < 0 /* || localOffset >= length() */) { + throw new IllegalArgumentException(); + } + if (localOffset >= decompressedPos) { + skip((int) (localOffset - decompressedPos)); + } else { + reset(); + skip((int) localOffset); + } + } + + @Override + public void skip(int bytes) throws IOException { + if (bytes < 0) { + bytes += decompressedPos; + if (bytes < 0) { + throw new IOException("Underflow. Rewind past start of the slice."); + } + reset(); + // fall-through + } + while (!isEmpty() && bytes > 0) { + readByte(); + bytes--; + } + if (bytes != 0) { + throw new IOException("Underflow. Rewind past end of the slice"); + } + } + + @Override + public byte readByte() throws IOException { + readBytes(singleByte, 0, 1); + return singleByte[0]; + } + + @Override + public void readBytes(byte[] b, int off, int len) throws IOException { + try { + int n; + while (len > 0) { + while ((n = inflater.inflate(b, off, len)) == 0) { + // FIXME few last bytes (checksum?) may be ignored by inflater, thus inflate may return 0 in + // perfectly legal conditions (when all data already expanded, but there are still some bytes + // in the input stream + if (inflater.finished() || inflater.needsDictionary()) { + throw new EOFException(); + } + if (inflater.needsInput()) { + // fill: + int toRead = super.available(); + if (toRead > buffer.length) { + toRead = buffer.length; + } + super.readBytes(buffer, 0, toRead); + inflater.setInput(buffer, 0, toRead); + } + } + off += n; + len -= n; + decompressedPos += n; + if (len == 0) { + return; // filled + } + } + } catch (DataFormatException e) { + String s = e.getMessage(); + throw new ZipException(s != null ? s : "Invalid ZLIB data format"); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/Internals.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import static org.tmatesoft.hg.internal.RequiresFile.*; + +import java.util.ArrayList; +import java.util.List; + +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.PathRewrite; + +/** + * Fields/members that shall not be visible + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Internals { + + private int requiresFlags = 0; + private List<Filter.Factory> filterFactories; + + + public Internals() { + } + + public/*for tests, otherwise pkg*/ void setStorageConfig(int version, int flags) { + requiresFlags = flags; + } + + // XXX perhaps, should keep both fields right here, not in the HgRepository + public PathRewrite buildDataFilesHelper() { + return new StoragePathHelper((requiresFlags & STORE) != 0, (requiresFlags & FNCACHE) != 0, (requiresFlags & DOTENCODE) != 0); + } + + public PathRewrite buildRepositoryFilesHelper() { + if ((requiresFlags & STORE) != 0) { + return new PathRewrite() { + public String rewrite(String path) { + return "store/" + path; + } + }; + } else { + return new PathRewrite() { + public String rewrite(String path) { + //no-op + return path; + } + }; + } + } + + public ConfigFile newConfigFile() { + return new ConfigFile(); + } + + public List<Filter.Factory> getFilters(HgRepository hgRepo, ConfigFile cfg) { + if (filterFactories == null) { + filterFactories = new ArrayList<Filter.Factory>(); + if (cfg.hasEnabledExtension("eol")) { + NewlineFilter.Factory ff = new NewlineFilter.Factory(); + ff.initialize(hgRepo, cfg); + filterFactories.add(ff); + } + if (cfg.hasEnabledExtension("keyword")) { + KeywordFilter.Factory ff = new KeywordFilter.Factory(); + ff.initialize(hgRepo, cfg); + filterFactories.add(ff); + } + } + return filterFactories; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/KeywordFilter.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,323 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Map; +import java.util.TreeMap; + +import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class KeywordFilter implements Filter { + // present implementation is stateless, however, filter use pattern shall not assume that. In fact, Factory may us that + private final HgRepository repo; + private final boolean isExpanding; + private final TreeMap<String,String> keywords; + private final int minBufferLen; + private final Path path; + private RawChangeset latestFileCset; + + /** + * + * @param hgRepo + * @param path + * @param expand <code>true</code> to expand keywords, <code>false</code> to shrink + */ + private KeywordFilter(HgRepository hgRepo, Path p, boolean expand) { + repo = hgRepo; + path = p; + isExpanding = expand; + keywords = new TreeMap<String,String>(); + keywords.put("Id", "Id"); + keywords.put("Revision", "Revision"); + keywords.put("Author", "Author"); + keywords.put("Date", "Date"); + keywords.put("LastChangedRevision", "LastChangedRevision"); + keywords.put("LastChangedBy", "LastChangedBy"); + keywords.put("LastChangedDate", "LastChangedDate"); + keywords.put("Source", "Source"); + keywords.put("Header", "Header"); + + int l = 0; + for (String s : keywords.keySet()) { + if (s.length() > l) { + l = s.length(); + } + } + // FIXME later may implement #filter() not to read full kw value (just "$kw:"). However, limit of maxLen + 2 would keep valid. + // for buffers less then minBufferLen, there are chances #filter() implementation would never end + // (i.e. for input "$LongestKey"$ + minBufferLen = l + 2 + (isExpanding ? 0 : 120 /*any reasonable constant for max possible kw value length*/); + } + + /** + * @param src buffer ready to be read + * @return buffer ready to be read and original buffer's position modified to reflect consumed bytes. IOW, if source buffer + * on return has remaining bytes, they are assumed not-read (not processed) and next chunk passed to filter is supposed to + * start with them + */ + public ByteBuffer filter(ByteBuffer src) { + if (src.capacity() < minBufferLen) { + throw new IllegalStateException(String.format("Need buffer of at least %d bytes to ensure filter won't hang", minBufferLen)); + } + ByteBuffer rv = null; + int keywordStart = -1; + int x = src.position(); + int copyFrom = x; // needs to be updated each time we copy a slice, but not each time we modify source index (x) + while (x < src.limit()) { + if (keywordStart == -1) { + int i = indexOf(src, '$', x, false); + if (i == -1) { + if (rv == null) { + return src; + } else { + copySlice(src, copyFrom, src.limit(), rv); + rv.flip(); + src.position(src.limit()); + return rv; + } + } + keywordStart = i; + // fall-through + } + if (keywordStart >= 0) { + int i = indexOf(src, '$', keywordStart+1, true); + if (i == -1) { + // end of buffer reached + if (rv == null) { + if (keywordStart == x) { + // FIXME in fact, x might be equal to keywordStart and to src.position() here ('$' is first character in the buffer, + // and there are no other '$' not eols till the end of the buffer). This would lead to deadlock (filter won't consume any + // bytes). To prevent this, either shall copy bytes [keywordStart..buffer.limit()) to local buffer and use it on the next invocation, + // or add lookup of the keywords right after first '$' is found (do not wait for closing '$'). For now, large enough src buffer would be sufficient + // not to run into such situation + throw new IllegalStateException("Try src buffer of a greater size"); + } + rv = ByteBuffer.allocate(keywordStart - copyFrom); + } + // copy all from source till latest possible kw start + copySlice(src, copyFrom, keywordStart, rv); + rv.flip(); + // and tell caller we've consumed only to the potential kw start + src.position(keywordStart); + return rv; + } else if (src.get(i) == '$') { + // end of keyword, or start of a new one. + String keyword; + if ((keyword = matchKeyword(src, keywordStart, i)) != null) { + if (rv == null) { + // src.remaining(), not .capacity because src is not read, and remaining represents + // actual bytes count, while capacity - potential. + // Factor of 4 is pure guess and a HACK, need to be fixed with re-expanding buffer on demand + rv = ByteBuffer.allocate(isExpanding ? src.remaining() * 4 : src.remaining()); + } + copySlice(src, copyFrom, keywordStart+1, rv); + rv.put(keyword.getBytes()); + if (isExpanding) { + rv.put((byte) ':'); + rv.put((byte) ' '); + expandKeywordValue(keyword, rv); + rv.put((byte) ' '); + } + rv.put((byte) '$'); + keywordStart = -1; + x = i+1; + copyFrom = x; + continue; + } else { + if (rv != null) { + // we've already did some substitution, thus need to copy bytes we've scanned. + copySlice(src, x, i, rv); + copyFrom = i; + } // no else in attempt to avoid rv creation if no real kw would be found + keywordStart = i; + x = i; // '$' at i wasn't consumed, hence x points to i, not i+1. This is to avoid problems with case: "sdfsd $ asdfs $Id$ sdf" + continue; + } + } else { + assert src.get(i) == '\n' || src.get(i) == '\r'; + // line break + if (rv != null) { + copySlice(src, x, i+1, rv); + copyFrom = i+1; + } + x = i+1; + keywordStart = -1; // Wasn't keyword, really + continue; // try once again + } + } + } + if (keywordStart != -1) { + if (rv == null) { + // no expansion happened yet, and we have potential kw start + rv = ByteBuffer.allocate(keywordStart - src.position()); + copySlice(src, src.position(), keywordStart, rv); + } + src.position(keywordStart); + } + if (rv != null) { + rv.flip(); + return rv; + } + return src; + } + + /** + * @param keyword + * @param rv + */ + private void expandKeywordValue(String keyword, ByteBuffer rv) { + if ("Id".equals(keyword)) { + rv.put(identityString().getBytes()); + } else if ("Revision".equals(keyword)) { + rv.put(revision().getBytes()); + } else if ("Author".equals(keyword)) { + rv.put(username().getBytes()); + } else if ("Date".equals(keyword)) { + rv.put(date().getBytes()); + } else { + throw new IllegalStateException(String.format("Keyword %s is not yet supported", keyword)); + } + } + + private String matchKeyword(ByteBuffer src, int kwStart, int kwEnd) { + assert kwEnd - kwStart - 1 > 0; + assert src.get(kwStart) == src.get(kwEnd) && src.get(kwEnd) == '$'; + char[] chars = new char[kwEnd - kwStart - 1]; + int i; + for (i = 0; i < chars.length; i++) { + char c = (char) src.get(kwStart + 1 + i); + if (c == ':') { + break; + } + chars[i] = c; + } + String kw = new String(chars, 0, i); +// XXX may use subMap to look up keywords based on few available characters (not waiting till closing $) +// System.out.println(keywords.subMap("I", "J")); +// System.out.println(keywords.subMap("A", "B")); +// System.out.println(keywords.subMap("Au", "B")); + return keywords.get(kw); + } + + // copies part of the src buffer, [from..to). doesn't modify src position + static void copySlice(ByteBuffer src, int from, int to, ByteBuffer dst) { + if (to > src.limit()) { + throw new IllegalArgumentException("Bad right boundary"); + } + if (dst.remaining() < to - from) { + throw new IllegalArgumentException("Not enough room in the destination buffer"); + } + for (int i = from; i < to; i++) { + dst.put(src.get(i)); + } + } + + private static int indexOf(ByteBuffer b, char ch, int from, boolean newlineBreaks) { + for (int i = from; i < b.limit(); i++) { + byte c = b.get(i); + if (ch == c) { + return i; + } + if (newlineBreaks && (c == '\n' || c == '\r')) { + return i; + } + } + return -1; + } + + private String identityString() { + return String.format("%s,v %s %s %s", path, revision(), date(), username()); + } + + private String revision() { + // FIXME add cset's nodeid into Changeset class + int csetRev = repo.getFileNode(path).getChangesetLocalRevision(HgRepository.TIP); + return repo.getChangelog().getRevision(csetRev).shortNotation(); + } + + private String username() { + return getChangeset().user(); + } + + private String date() { + return String.format("%tY/%<tm/%<td %<tH:%<tM:%<tS", getChangeset().date()); + } + + private RawChangeset getChangeset() { + if (latestFileCset == null) { + // XXX consider use of ChangelogHelper + int csetRev = repo.getFileNode(path).getChangesetLocalRevision(HgRepository.TIP); + latestFileCset = repo.getChangelog().range(csetRev, csetRev).get(0); + } + return latestFileCset; + } + + public static class Factory implements Filter.Factory { + + private HgRepository repo; + private Path.Matcher matcher; + + public void initialize(HgRepository hgRepo, ConfigFile cfg) { + repo = hgRepo; + ArrayList<String> patterns = new ArrayList<String>(); + for (Map.Entry<String,String> e : cfg.getSection("keyword").entrySet()) { + if (!"ignore".equalsIgnoreCase(e.getValue())) { + patterns.add(e.getKey()); + } + } + matcher = new PathGlobMatcher(patterns.toArray(new String[patterns.size()])); + // TODO read and respect keyword patterns from [keywordmaps] + } + + public Filter create(Path path, Options opts) { + if (matcher.accept(path)) { + return new KeywordFilter(repo, path, opts.getDirection() == Filter.Direction.FromRepo); + } + return null; + } + } + +// +// public static void main(String[] args) throws Exception { +// FileInputStream fis = new FileInputStream(new File("/temp/kwoutput.txt")); +// FileOutputStream fos = new FileOutputStream(new File("/temp/kwoutput2.txt")); +// ByteBuffer b = ByteBuffer.allocate(256); +// KeywordFilter kwFilter = new KeywordFilter(false); +// while (fis.getChannel().read(b) != -1) { +// b.flip(); // get ready to be read +// ByteBuffer f = kwFilter.filter(b); +// fos.getChannel().write(f); // XXX in fact, f may not be fully consumed +// if (b.hasRemaining()) { +// b.compact(); +// } else { +// b.clear(); +// } +// } +// fis.close(); +// fos.flush(); +// fos.close(); +// } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/NewlineFilter.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,269 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import static org.tmatesoft.hg.internal.Filter.Direction.FromRepo; +import static org.tmatesoft.hg.internal.Filter.Direction.ToRepo; +import static org.tmatesoft.hg.internal.KeywordFilter.copySlice; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Map; + +import org.tmatesoft.hg.repo.HgInternals; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class NewlineFilter implements Filter { + + // if allowInconsistent is true, filter simply pass incorrect newline characters (single \r or \r\n on *nix and single \n on Windows) as is, + // i.e. doesn't try to convert them into appropriate newline characters. XXX revisit if Keyword extension behaves differently + private final boolean allowInconsistent; + private final boolean winToNix; + + private NewlineFilter(boolean failIfInconsistent, int transform) { + winToNix = transform == 0; + allowInconsistent = !failIfInconsistent; + } + + public ByteBuffer filter(ByteBuffer src) { + if (winToNix) { + return win2nix(src); + } else { + return nix2win(src); + } + } + + private ByteBuffer win2nix(ByteBuffer src) { + int x = src.position(); // source index + int lookupStart = x; + ByteBuffer dst = null; + while (x < src.limit()) { + // x, lookupStart, ir and in are absolute positions within src buffer, which is never read with modifying operations + int ir = indexOf('\r', src, lookupStart); + int in = indexOf('\n', src, lookupStart); + if (ir == -1) { + if (in == -1 || allowInconsistent) { + if (dst != null) { + copySlice(src, x, src.limit(), dst); + x = src.limit(); // consumed all + } + break; + } else { + fail(src, in); + } + } + // in == -1 while ir != -1 may be valid case if ir is the last char of the buffer, we check below for that + if (in != -1 && in != ir+1 && !allowInconsistent) { + fail(src, in); + } + if (dst == null) { + dst = ByteBuffer.allocate(src.remaining()); + } + copySlice(src, x, ir, dst); + if (ir+1 == src.limit()) { + // last char of the buffer - + // consume src till that char and let next iteration work on it + x = ir; + break; + } + if (in != ir + 1) { + x = ir+1; // generally in, but if allowInconsistent==true and \r is not followed by \n, then + // cases like "one \r two \r\n three" shall be processed correctly (second pair would be ignored if x==in) + lookupStart = ir+1; + } else { + x = in; + lookupStart = x+1; // skip \n for next lookup + } + } + src.position(x); // mark we've consumed up to x + return dst == null ? src : (ByteBuffer) dst.flip(); + } + + private ByteBuffer nix2win(ByteBuffer src) { + int x = src.position(); + ByteBuffer dst = null; + while (x < src.limit()) { + int in = indexOf('\n', src, x); + int ir = indexOf('\r', src, x, in == -1 ? src.limit() : in); + if (in == -1) { + if (ir == -1 || allowInconsistent) { + break; + } else { + fail(src, ir); + } + } else if (ir != -1 && !allowInconsistent) { + fail(src, ir); + } + + // x <= in < src.limit + // allowInconsistent && x <= ir < in || ir == -1 + if (dst == null) { + // buffer full of \n grows as much as twice in size + dst = ByteBuffer.allocate(src.remaining() * 2); + } + copySlice(src, x, in, dst); + if (ir == -1 || ir+1 != in) { + dst.put((byte) '\r'); + } // otherwise (ir!=-1 && ir+1==in) we found \r\n pair, don't convert to \r\r\n + // we may copy \n at src[in] on the next iteration, but would need extra lookupIndex variable then. + dst.put((byte) '\n'); + x = in+1; + } + src.position(x); + return dst == null ? src : (ByteBuffer) dst.flip(); + } + + + private void fail(ByteBuffer b, int pos) { + throw new RuntimeException(String.format("Inconsistent newline characters in the stream (char 0x%x, local index:%d)", b.get(pos), pos)); + } + + private static int indexOf(char ch, ByteBuffer b, int from) { + return indexOf(ch, b, from, b.limit()); + } + + // looks up in buf[from..to) + private static int indexOf(char ch, ByteBuffer b, int from, int to) { + for (int i = from; i < to; i++) { + byte c = b.get(i); + if (ch == c) { + return i; + } + } + return -1; + } + + public static class Factory implements Filter.Factory { + private boolean failIfInconsistent = true; + private Path.Matcher lfMatcher; + private Path.Matcher crlfMatcher; + private Path.Matcher binMatcher; + private Path.Matcher nativeMatcher; + private String nativeRepoFormat; + private String nativeOSFormat; + + public void initialize(HgRepository hgRepo, ConfigFile cfg) { + failIfInconsistent = cfg.getBoolean("eol", "only-consistent", true); + File cfgFile = new File(new HgInternals(hgRepo).getRepositoryDir().getParentFile(), ".hgeol"); + if (!cfgFile.canRead()) { + return; + } + // XXX if .hgeol is not checked out, we may get it from repository +// HgDataFile cfgFileNode = hgRepo.getFileNode(".hgeol"); +// if (!cfgFileNode.exists()) { +// return; +// } + // XXX perhaps, add HgDataFile.hasWorkingCopy and workingCopyContent()? + ConfigFile hgeol = new ConfigFile(); + hgeol.addLocation(cfgFile); + nativeRepoFormat = hgeol.getSection("repository").get("native"); + if (nativeRepoFormat == null) { + nativeRepoFormat = "LF"; + } + final String os = System.getProperty("os.name"); // XXX need centralized set of properties + nativeOSFormat = os.indexOf("Windows") != -1 ? "CRLF" : "LF"; + // I assume pattern ordering in .hgeol is not important + ArrayList<String> lfPatterns = new ArrayList<String>(); + ArrayList<String> crlfPatterns = new ArrayList<String>(); + ArrayList<String> nativePatterns = new ArrayList<String>(); + ArrayList<String> binPatterns = new ArrayList<String>(); + for (Map.Entry<String,String> e : hgeol.getSection("patterns").entrySet()) { + if ("CRLF".equals(e.getValue())) { + crlfPatterns.add(e.getKey()); + } else if ("LF".equals(e.getValue())) { + lfPatterns.add(e.getKey()); + } else if ("native".equals(e.getValue())) { + nativePatterns.add(e.getKey()); + } else if ("BIN".equals(e.getValue())) { + binPatterns.add(e.getKey()); + } else { + System.out.printf("Can't recognize .hgeol entry: %s for %s", e.getValue(), e.getKey()); // FIXME log warning + } + } + if (!crlfPatterns.isEmpty()) { + crlfMatcher = new PathGlobMatcher(crlfPatterns.toArray(new String[crlfPatterns.size()])); + } + if (!lfPatterns.isEmpty()) { + lfMatcher = new PathGlobMatcher(lfPatterns.toArray(new String[lfPatterns.size()])); + } + if (!binPatterns.isEmpty()) { + binMatcher = new PathGlobMatcher(binPatterns.toArray(new String[binPatterns.size()])); + } + if (!nativePatterns.isEmpty()) { + nativeMatcher = new PathGlobMatcher(nativePatterns.toArray(new String[nativePatterns.size()])); + } + } + + public Filter create(Path path, Options opts) { + if (binMatcher == null && crlfMatcher == null && lfMatcher == null && nativeMatcher == null) { + // not initialized - perhaps, no .hgeol found + return null; + } + if (binMatcher != null && binMatcher.accept(path)) { + return null; + } + if (crlfMatcher != null && crlfMatcher.accept(path)) { + return new NewlineFilter(failIfInconsistent, 1); + } else if (lfMatcher != null && lfMatcher.accept(path)) { + return new NewlineFilter(failIfInconsistent, 0); + } else if (nativeMatcher != null && nativeMatcher.accept(path)) { + if (nativeOSFormat.equals(nativeRepoFormat)) { + return null; + } + if (opts.getDirection() == FromRepo) { + int transform = "CRLF".equals(nativeOSFormat) ? 1 : 0; + return new NewlineFilter(failIfInconsistent, transform); + } else if (opts.getDirection() == ToRepo) { + int transform = "CRLF".equals(nativeOSFormat) ? 0 : 1; + return new NewlineFilter(failIfInconsistent, transform); + } + return null; + } + return null; + } + } + + public static void main(String[] args) throws Exception { + FileInputStream fis = new FileInputStream(new File("/temp/design.lf.txt")); + FileOutputStream fos = new FileOutputStream(new File("/temp/design.newline.out")); + ByteBuffer b = ByteBuffer.allocate(12); + NewlineFilter nlFilter = new NewlineFilter(true, 1); + while (fis.getChannel().read(b) != -1) { + b.flip(); // get ready to be read + ByteBuffer f = nlFilter.filter(b); + fos.getChannel().write(f); // XXX in fact, f may not be fully consumed + if (b.hasRemaining()) { + b.compact(); + } else { + b.clear(); + } + } + fis.close(); + fos.flush(); + fos.close(); + } + +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/PathGlobMatcher.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.util.regex.PatternSyntaxException; + +import org.tmatesoft.hg.util.Path; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class PathGlobMatcher implements Path.Matcher { + + private final PathRegexpMatcher delegate; + + /** + * + * @param globPatterns + * @throws NullPointerException if argument is null + * @throws IllegalArgumentException if any of the patterns is not valid + */ + public PathGlobMatcher(String... globPatterns) { + String[] regexp = new String[globPatterns.length]; //deliberately let fail with NPE + int i = 0; + for (String s : globPatterns) { + regexp[i] = glob2regexp(s); + } + try { + delegate = new PathRegexpMatcher(regexp); + } catch (PatternSyntaxException ex) { + ex.printStackTrace(); + throw new IllegalArgumentException(ex); + } + } + + + // HgIgnore.glob2regex is similar, but IsIgnore solves slightly different task + // (need to match partial paths, e.g. for glob 'bin' shall match not only 'bin' folder, but also any path below it, + // which is not generally the case + private static String glob2regexp(String glob) { + int end = glob.length() - 1; + boolean needLineEndMatch = glob.charAt(end) != '*'; + while (end > 0 && glob.charAt(end) == '*') end--; // remove trailing * that are useless for Pattern.find() + StringBuilder sb = new StringBuilder(end*2); + if (glob.charAt(0) != '*') { + sb.append('^'); + } + for (int i = 0; i <= end; i++) { + char ch = glob.charAt(i); + if (ch == '*') { + if (glob.charAt(i+1) == '*') { // i < end because we've stripped any trailing * earlier + // any char, including path segment separator + sb.append(".*?"); + i++; + } else { + // just path segments + sb.append("[^/]*?"); + } + continue; + } else if (ch == '?') { + sb.append("[^/]"); + continue; + } else if (ch == '.' || ch == '\\') { + sb.append('\\'); + } + sb.append(ch); + } + if (needLineEndMatch) { + sb.append('$'); + } + return sb.toString(); + } + + public boolean accept(Path path) { + return delegate.accept(path); + } + +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/PathRegexpMatcher.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.Path.Matcher; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class PathRegexpMatcher implements Matcher { + private Pattern[] patterns; + + // disjunction, matches if any pattern found + // uses pattern.find(), not pattern.matches() + public PathRegexpMatcher(Pattern... p) { + if (p == null) { + throw new IllegalArgumentException(); + } + patterns = p; + } + + public PathRegexpMatcher(String... p) throws PatternSyntaxException { + this(compile(p)); + } + + private static Pattern[] compile(String[] p) throws PatternSyntaxException { + // deliberately do no check for null, let it fail + Pattern[] rv = new Pattern[p.length]; + int i = 0; + for (String s : p) { + rv[i++] = Pattern.compile(s); + } + return rv; + } + + public boolean accept(Path path) { + for (Pattern p : patterns) { + if (p.matcher(path).find()) { + return true; + } + } + return false; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/RelativePathRewrite.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.File; + +import org.tmatesoft.hg.util.PathRewrite; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class RelativePathRewrite implements PathRewrite { + + private final String rootPath; + + public RelativePathRewrite(File root) { + this(root.getPath()); + } + + public RelativePathRewrite(String rootPath) { + this.rootPath = rootPath; + } + + public String rewrite(String path) { + if (path != null && path.startsWith(rootPath)) { + if (path.length() == rootPath.length()) { + return ""; + } + return path.substring(rootPath.length() + 1); + } + return path; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/RequiresFile.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class RequiresFile { + public static final int STORE = 1; + public static final int FNCACHE = 2; + public static final int DOTENCODE = 4; + + public RequiresFile() { + } + + public void parse(Internals repoImpl, File requiresFile) { + if (!requiresFile.exists()) { + return; + } + try { + boolean revlogv1 = false; + boolean store = false; + boolean fncache = false; + boolean dotencode = false; + BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(requiresFile))); + String line; + while ((line = br.readLine()) != null) { + revlogv1 |= "revlogv1".equals(line); + store |= "store".equals(line); + fncache |= "fncache".equals(line); + dotencode |= "dotencode".equals(line); + } + int flags = 0; + flags += store ? 1 : 0; + flags += fncache ? 2 : 0; + flags += dotencode ? 4 : 0; + repoImpl.setStorageConfig(revlogv1 ? 1 : 0, flags); + } catch (IOException ex) { + ex.printStackTrace(); // FIXME log + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/RevlogDump.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.io.BufferedInputStream; +import java.io.DataInput; +import java.io.DataInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.math.BigInteger; +import java.util.zip.Inflater; + +/** + * Utility to test/debug/troubleshoot + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class RevlogDump { + + /** + * Takes 3 command line arguments - + * repository path, + * path to index file (i.e. store/data/hello.c.i) in the repository (relative) + * and "dumpData" whether to print actual content or just revlog headers + */ + public static void main(String[] args) throws Exception { + String repo = "/temp/hg/hello/.hg/"; + String filename = "store/00changelog.i"; +// String filename = "store/data/hello.c.i"; +// String filename = "store/data/docs/readme.i"; + boolean dumpData = true; + if (args.length > 1) { + repo = args[0]; + filename = args[1]; + dumpData = args.length > 2 ? "dumpData".equals(args[2]) : false; + } + // + DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(new File(repo + filename)))); + DataInput di = dis; + dis.mark(10); + int versionField = di.readInt(); + dis.reset(); + final int INLINEDATA = 1 << 16; + + boolean inlineData = (versionField & INLINEDATA) != 0; + System.out.printf("%#8x, inline: %b\n", versionField, inlineData); + System.out.println("Index Offset Flags Packed Actual Base Rev Link Rev Parent1 Parent2 nodeid"); + int entryCount = 0; + while (dis.available() > 0) { + long l = di.readLong(); + long offset = l >>> 16; + int flags = (int) (l & 0X0FFFF); + int compressedLen = di.readInt(); + int actualLen = di.readInt(); + int baseRevision = di.readInt(); + int linkRevision = di.readInt(); + int parent1Revision = di.readInt(); + int parent2Revision = di.readInt(); + byte[] buf = new byte[32]; + di.readFully(buf, 12, 20); + dis.skip(12); + System.out.printf("%4d:%14d %6X %10d %10d %10d %10d %8d %8d %040x\n", entryCount, offset, flags, compressedLen, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, new BigInteger(buf)); + if (inlineData) { + String resultString; + byte[] data = new byte[compressedLen]; + di.readFully(data); + if (data[0] == 0x78 /* 'x' */) { + Inflater zlib = new Inflater(); + zlib.setInput(data, 0, compressedLen); + byte[] result = new byte[actualLen*2]; + int resultLen = zlib.inflate(result); + zlib.end(); + resultString = new String(result, 0, resultLen, "UTF-8"); + } else if (data[0] == 0x75 /* 'u' */) { + resultString = new String(data, 1, data.length - 1, "UTF-8"); + } else { + resultString = new String(data); + } + if (dumpData) { + System.out.println(resultString); + } + entryCount++; + } + } + dis.close(); + // + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/RevlogStream.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,431 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.repo.HgRepository; + + +/** + * ? Single RevlogStream per file per repository with accessor to record access session (e.g. with back/forward operations), + * or numerous RevlogStream with separate representation of the underlaying data (cached, lazy ChunkStream)? + * + * @see http://mercurial.selenic.com/wiki/Revlog + * @see http://mercurial.selenic.com/wiki/RevlogNG + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class RevlogStream { + + private List<IndexEntry> index; // indexed access highly needed + private boolean inline = false; + private final File indexFile; + private final DataAccessProvider dataAccess; + + // if we need anything else from HgRepo, might replace DAP parameter with HgRepo and query it for DAP. + public RevlogStream(DataAccessProvider dap, File indexFile) { + this.dataAccess = dap; + this.indexFile = indexFile; + } + + /*package*/ DataAccess getIndexStream() { + return dataAccess.create(indexFile); + } + + /*package*/ DataAccess getDataStream() { + final String indexName = indexFile.getName(); + File dataFile = new File(indexFile.getParentFile(), indexName.substring(0, indexName.length() - 1) + "d"); + return dataAccess.create(dataFile); + } + + public int revisionCount() { + initOutline(); + return index.size(); + } + + public int dataLength(int revision) { + // XXX in fact, use of iterate() instead of this implementation may be quite reasonable. + // + final int indexSize = revisionCount(); + DataAccess daIndex = getIndexStream(); // XXX may supply a hint that I'll need really few bytes of data (although at some offset) + if (revision == TIP) { + revision = indexSize - 1; + } + try { + int recordOffset = inline ? (int) index.get(revision).offset : revision * REVLOGV1_RECORD_SIZE; + daIndex.seek(recordOffset + 12); // 6+2+4 + int actualLen = daIndex.readInt(); + return actualLen; + } catch (IOException ex) { + ex.printStackTrace(); // log error. FIXME better handling + throw new IllegalStateException(ex); + } finally { + daIndex.done(); + } + } + + public byte[] nodeid(int revision) { + final int indexSize = revisionCount(); + if (revision == TIP) { + revision = indexSize - 1; + } + if (revision < 0 || revision >= indexSize) { + throw new IllegalArgumentException(Integer.toString(revision)); + } + DataAccess daIndex = getIndexStream(); + try { + int recordOffset = inline ? (int) index.get(revision).offset : revision * REVLOGV1_RECORD_SIZE; + daIndex.seek(recordOffset + 32); + byte[] rv = new byte[20]; + daIndex.readBytes(rv, 0, 20); + return rv; + } catch (IOException ex) { + ex.printStackTrace(); + throw new IllegalStateException(); + } finally { + daIndex.done(); + } + } + + public int linkRevision(int revision) { + final int last = revisionCount() - 1; + if (revision == TIP) { + revision = last; + } + if (revision < 0 || revision > last) { + throw new IllegalArgumentException(Integer.toString(revision)); + } + DataAccess daIndex = getIndexStream(); + try { + int recordOffset = inline ? (int) index.get(revision).offset : revision * REVLOGV1_RECORD_SIZE; + daIndex.seek(recordOffset + 20); + int linkRev = daIndex.readInt(); + return linkRev; + } catch (IOException ex) { + ex.printStackTrace(); + throw new IllegalStateException(); + } finally { + daIndex.done(); + } + } + + // Perhaps, RevlogStream should be limited to use of plain int revisions for access, + // while Nodeids should be kept on the level up, in Revlog. Guess, Revlog better keep + // map of nodeids, and once this comes true, we may get rid of this method. + // Unlike its counterpart, {@link Revlog#getLocalRevisionNumber()}, doesn't fail with exception if node not found, + /** + * @return integer in [0..revisionCount()) or {@link HgRepository#BAD_REVISION} if not found + */ + public int findLocalRevisionNumber(Nodeid nodeid) { + // XXX this one may be implemented with iterate() once there's mechanism to stop iterations + final int indexSize = revisionCount(); + DataAccess daIndex = getIndexStream(); + try { + byte[] nodeidBuf = new byte[20]; + for (int i = 0; i < indexSize; i++) { + daIndex.skip(8); + int compressedLen = daIndex.readInt(); + daIndex.skip(20); + daIndex.readBytes(nodeidBuf, 0, 20); + if (nodeid.equalsTo(nodeidBuf)) { + return i; + } + daIndex.skip(inline ? 12 + compressedLen : 12); + } + } catch (IOException ex) { + ex.printStackTrace(); // log error. FIXME better handling + throw new IllegalStateException(ex); + } finally { + daIndex.done(); + } + return BAD_REVISION; + } + + + private final int REVLOGV1_RECORD_SIZE = 64; + + // should be possible to use TIP, ALL, or -1, -2, -n notation of Hg + // ? boolean needsNodeid + public void iterate(int start, int end, boolean needData, Inspector inspector) { + initOutline(); + final int indexSize = index.size(); + if (indexSize == 0) { + return; + } + if (end == TIP) { + end = indexSize - 1; + } + if (start == TIP) { + start = indexSize - 1; + } + if (start < 0 || start >= indexSize) { + throw new IllegalArgumentException(String.format("Bad left range boundary %d in [0..%d]", start, indexSize-1)); + } + if (end < start || end >= indexSize) { + throw new IllegalArgumentException(String.format("Bad right range boundary %d in [0..%d]", end, indexSize-1)); + } + // XXX may cache [start .. end] from index with a single read (pre-read) + + DataAccess daIndex = null, daData = null; + daIndex = getIndexStream(); + if (needData && !inline) { + daData = getDataStream(); + } + try { + byte[] nodeidBuf = new byte[20]; + DataAccess lastUserData = null; + int i; + boolean extraReadsToBaseRev = false; + if (needData && index.get(start).baseRevision < start) { + i = index.get(start).baseRevision; + extraReadsToBaseRev = true; + } else { + i = start; + } + + daIndex.seek(inline ? index.get(i).offset : i * REVLOGV1_RECORD_SIZE); + for (; i <= end; i++ ) { + if (inline && needData) { + // inspector reading data (though FilterDataAccess) may have affected index position + daIndex.seek(index.get(i).offset); + } + long l = daIndex.readLong(); // 0 + @SuppressWarnings("unused") + long offset = l >>> 16; + @SuppressWarnings("unused") + int flags = (int) (l & 0X0FFFF); + int compressedLen = daIndex.readInt(); // +8 + int actualLen = daIndex.readInt(); // +12 + int baseRevision = daIndex.readInt(); // +16 + int linkRevision = daIndex.readInt(); // +20 + int parent1Revision = daIndex.readInt(); + int parent2Revision = daIndex.readInt(); + // Hg has 32 bytes here, uses 20 for nodeid, and keeps 12 last bytes empty + daIndex.readBytes(nodeidBuf, 0, 20); // +32 + daIndex.skip(12); + DataAccess userDataAccess = null; + if (needData) { + final byte firstByte; + long streamOffset = index.get(i).offset; + DataAccess streamDataAccess; + if (inline) { + streamDataAccess = daIndex; + streamOffset += REVLOGV1_RECORD_SIZE; // don't need to do seek as it's actual position in the index stream + } else { + streamDataAccess = daData; + daData.seek(streamOffset); + } + firstByte = streamDataAccess.readByte(); + if (firstByte == 0x78 /* 'x' */) { + userDataAccess = new InflaterDataAccess(streamDataAccess, streamOffset, compressedLen); + } else if (firstByte == 0x75 /* 'u' */) { + userDataAccess = new FilterDataAccess(streamDataAccess, streamOffset+1, compressedLen-1); + } else { + // XXX Python impl in fact throws exception when there's not 'x', 'u' or '0' + // but I don't see reason not to return data as is + userDataAccess = new FilterDataAccess(streamDataAccess, streamOffset, compressedLen); + } + // XXX + if (baseRevision != i) { // XXX not sure if this is the right way to detect a patch + // this is a patch + LinkedList<PatchRecord> patches = new LinkedList<PatchRecord>(); + while (!userDataAccess.isEmpty()) { + PatchRecord pr = PatchRecord.read(userDataAccess); +// System.out.printf("PatchRecord:%d %d %d\n", pr.start, pr.end, pr.len); + patches.add(pr); + } + userDataAccess.done(); + // + byte[] userData = apply(lastUserData, actualLen, patches); + userDataAccess = new ByteArrayDataAccess(userData); + } + } else { + if (inline) { + daIndex.skip(compressedLen); + } + } + if (!extraReadsToBaseRev || i >= start) { + inspector.next(i, actualLen, baseRevision, linkRevision, parent1Revision, parent2Revision, nodeidBuf, userDataAccess); + } + if (userDataAccess != null) { + userDataAccess.reset(); + if (lastUserData != null) { + lastUserData.done(); + } + lastUserData = userDataAccess; + } + } + } catch (IOException ex) { + throw new IllegalStateException(ex); // FIXME need better handling + } finally { + daIndex.done(); + if (daData != null) { + daData.done(); + } + } + } + + private void initOutline() { + if (index != null && !index.isEmpty()) { + return; + } + ArrayList<IndexEntry> res = new ArrayList<IndexEntry>(); + DataAccess da = getIndexStream(); + try { + int versionField = da.readInt(); + da.readInt(); // just to skip next 2 bytes of offset + flags + final int INLINEDATA = 1 << 16; + inline = (versionField & INLINEDATA) != 0; + long offset = 0; // first offset is always 0, thus Hg uses it for other purposes + while(true) { + int compressedLen = da.readInt(); + // 8+4 = 12 bytes total read here + @SuppressWarnings("unused") + int actualLen = da.readInt(); + int baseRevision = da.readInt(); + // 12 + 8 = 20 bytes read here +// int linkRevision = di.readInt(); +// int parent1Revision = di.readInt(); +// int parent2Revision = di.readInt(); +// byte[] nodeid = new byte[32]; + if (inline) { + res.add(new IndexEntry(offset + REVLOGV1_RECORD_SIZE * res.size(), baseRevision)); + da.skip(3*4 + 32 + compressedLen); // Check: 44 (skip) + 20 (read) = 64 (total RevlogNG record size) + } else { + res.add(new IndexEntry(offset, baseRevision)); + da.skip(3*4 + 32); + } + if (da.isEmpty()) { + // fine, done then + res.trimToSize(); + index = res; + break; + } else { + // start reading next record + long l = da.readLong(); + offset = l >>> 16; + } + } + } catch (IOException ex) { + ex.printStackTrace(); // log error + // too bad, no outline then. + index = Collections.emptyList(); + } finally { + da.done(); + } + + } + + + // perhaps, package-local or protected, if anyone else from low-level needs them + // XXX think over if we should keep offset in case of separate data file - we read the field anyway. Perhaps, distinct entry classes for Inline and non-inline indexes? + private static class IndexEntry { + public final long offset; // for separate .i and .d - copy of index record entry, for inline index - actual offset of the record in the .i file (record entry + revision * record size)) + //public final int length; // data past fixed record (need to decide whether including header size or not), and whether length is of compressed data or not + public final int baseRevision; + + public IndexEntry(long o, int baseRev) { + offset = o; + baseRevision = baseRev; + } + } + + // mpatch.c : apply() + // FIXME need to implement patch merge (fold, combine, gather and discard from aforementioned mpatch.[c|py]), also see Revlog and Mercurial PDF + public/*for HgBundle; until moved to better place*/static byte[] apply(DataAccess baseRevisionContent, int outcomeLen, List<PatchRecord> patch) throws IOException { + int last = 0, destIndex = 0; + if (outcomeLen == -1) { + outcomeLen = (int) baseRevisionContent.length(); + for (PatchRecord pr : patch) { + outcomeLen += pr.start - last + pr.len; + last = pr.end; + } + outcomeLen -= last; + last = 0; + } + byte[] rv = new byte[outcomeLen]; + for (PatchRecord pr : patch) { + baseRevisionContent.seek(last); + baseRevisionContent.readBytes(rv, destIndex, pr.start-last); + destIndex += pr.start - last; + System.arraycopy(pr.data, 0, rv, destIndex, pr.data.length); + destIndex += pr.data.length; + last = pr.end; + } + baseRevisionContent.seek(last); + baseRevisionContent.readBytes(rv, destIndex, (int) (baseRevisionContent.length() - last)); + return rv; + } + + // @see http://mercurial.selenic.com/wiki/BundleFormat, in Changelog group description + public static class PatchRecord { + /* + Given there are pr1 and pr2: + pr1.start to pr1.end will be replaced with pr's data (of pr1.len) + pr1.end to pr2.start gets copied from base + */ + public int start, end, len; + public byte[] data; + + // TODO consider PatchRecord that only records data position (absolute in data source), and acquires data as needed + private PatchRecord(int p1, int p2, int length, byte[] src) { + start = p1; + end = p2; + len = length; + data = src; + } + + /*package-local*/ static PatchRecord read(byte[] data, int offset) { + final int x = offset; // shorthand + int p1 = ((data[x] & 0xFF)<< 24) | ((data[x+1] & 0xFF) << 16) | ((data[x+2] & 0xFF) << 8) | (data[x+3] & 0xFF); + int p2 = ((data[x+4] & 0xFF) << 24) | ((data[x+5] & 0xFF) << 16) | ((data[x+6] & 0xFF) << 8) | (data[x+7] & 0xFF); + int len = ((data[x+8] & 0xFF) << 24) | ((data[x+9] & 0xFF) << 16) | ((data[x+10] & 0xFF) << 8) | (data[x+11] & 0xFF); + byte[] dataCopy = new byte[len]; + System.arraycopy(data, x+12, dataCopy, 0, len); + return new PatchRecord(p1, p2, len, dataCopy); + } + + public /*for HgBundle*/ static PatchRecord read(DataAccess da) throws IOException { + int p1 = da.readInt(); + int p2 = da.readInt(); + int len = da.readInt(); + byte[] src = new byte[len]; + da.readBytes(src, 0, len); + return new PatchRecord(p1, p2, len, src); + } + } + + // FIXME byte[] data might be too expensive, for few usecases it may be better to have intermediate Access object (when we don't need full data + // instantly - e.g. calculate hash, or comparing two revisions + public interface Inspector { + // XXX boolean retVal to indicate whether to continue? + // TODO specify nodeid and data length, and reuse policy (i.e. if revlog stream doesn't reuse nodeid[] for each call) + // implementers shall not invoke DataAccess.done(), it's accomplished by #iterate at appropraite moment + void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[/*20*/] nodeid, DataAccess data); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/internal/StoragePathHelper.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.internal; + +import java.util.Arrays; +import java.util.TreeSet; + +import org.tmatesoft.hg.util.PathRewrite; + +/** + * @see http://mercurial.selenic.com/wiki/CaseFoldingPlan + * @see http://mercurial.selenic.com/wiki/fncacheRepoFormat + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +class StoragePathHelper implements PathRewrite { + + private final boolean store; + private final boolean fncache; + private final boolean dotencode; + + public StoragePathHelper(boolean isStore, boolean isFncache, boolean isDotencode) { + store = isStore; + fncache = isFncache; + dotencode = isDotencode; + } + + // FIXME document what path argument is, whether it includes .i or .d, and whether it's 'normalized' (slashes) or not. + // since .hg/store keeps both .i files and files without extension (e.g. fncache), guees, for data == false + // we shall assume path has extension + public String rewrite(String path) { + final String STR_STORE = "store/"; + final String STR_DATA = "data/"; + final String STR_DH = "dh/"; + final String reservedChars = "\\:*?\"<>|"; + char[] hexByte = new char[2]; + + path = path.replace(".hg/", ".hg.hg/").replace(".i/", ".i.hg/").replace(".d/", ".d.hg/"); + StringBuilder sb = new StringBuilder(path.length() << 1); + if (store || fncache) { + // encodefilename + for (int i = 0; i < path.length(); i++) { + final char ch = path.charAt(i); + if (ch >= 'a' && ch <= 'z') { + sb.append(ch); // POIRAE + } else if (ch >= 'A' && ch <= 'Z') { + sb.append('_'); + sb.append(Character.toLowerCase(ch)); // Perhaps, (char) (((int) ch) + 32)? Even better, |= 0x20? + } else if (reservedChars.indexOf(ch) != -1) { + sb.append('~'); + sb.append(toHexByte(ch, hexByte)); + } else if ((ch >= '~' /*126*/ && ch <= 255) || ch < ' ' /*32*/) { + sb.append('~'); + sb.append(toHexByte(ch, hexByte)); + } else if (ch == '_') { + sb.append('_'); + sb.append('_'); + } else { + sb.append(ch); + } + } + // auxencode + if (fncache) { + encodeWindowsDeviceNames(sb); + } + } + final int MAX_PATH_LEN = 120; + if (fncache && (sb.length() + STR_DATA.length() + ".i".length() > MAX_PATH_LEN)) { + String digest = new DigestHelper().sha1(STR_DATA, path, ".i").asHexString(); + final int DIR_PREFIX_LEN = 8; + // not sure why (-4) is here. 120 - 40 = up to 80 for path with ext. dh/ + ext(.i) = 3+2 + final int MAX_DIR_PREFIX = 8 * (DIR_PREFIX_LEN + 1) - 4; + sb = new StringBuilder(MAX_PATH_LEN); + for (int i = 0; i < path.length(); i++) { + final char ch = path.charAt(i); + if (ch >= 'a' && ch <= 'z') { + sb.append(ch); + } else if (ch >= 'A' && ch <= 'Z') { + sb.append((char) (ch | 0x20)); // lowercase + } else if (reservedChars.indexOf(ch) != -1) { + sb.append('~'); + sb.append(toHexByte(ch, hexByte)); + } else if ((ch >= '~' /*126*/ && ch <= 255) || ch < ' ' /*32*/) { + sb.append('~'); + sb.append(toHexByte(ch, hexByte)); + } else { + sb.append(ch); + } + } + encodeWindowsDeviceNames(sb); + int fnameStart = sb.lastIndexOf("/"); // since we rewrite file names, it never ends with slash (for dirs, I'd pass length-2); + StringBuilder completeHashName = new StringBuilder(MAX_PATH_LEN); + completeHashName.append(STR_STORE); + completeHashName.append(STR_DH); + if (fnameStart == -1) { + // no dirs, just long filename + sb.setLength(MAX_PATH_LEN - 40 /*digest.length()*/ - STR_DH.length() - ".i".length()); + completeHashName.append(sb); + } else { + StringBuilder sb2 = new StringBuilder(MAX_PATH_LEN); + int x = 0; + do { + int i = sb.indexOf("/", x); + final int sb2Len = sb2.length(); + if (i-x <= DIR_PREFIX_LEN) { // a b c d e f g h / + sb2.append(sb, x, i + 1); // with slash + } else { + sb2.append(sb, x, x + DIR_PREFIX_LEN); + // may unexpectedly end with bad character + final int last = sb2.length()-1; + char lastChar = sb2.charAt(last); + assert lastChar == sb.charAt(x + DIR_PREFIX_LEN - 1); + if (lastChar == '.' || lastChar == ' ') { + sb2.setCharAt(last, '_'); + } + sb2.append('/'); + } + if (sb2.length()-1 > MAX_DIR_PREFIX) { + sb2.setLength(sb2Len); // strip off last segment, it's too much + break; + } + x = i+1; + } while (x < fnameStart); + assert sb2.charAt(sb2.length() - 1) == '/'; + int left = MAX_PATH_LEN - sb2.length() - 40 /*digest.length()*/ - STR_DH.length() - ".i".length(); + assert left >= 0; + fnameStart++; // move from / to actual name + sb2.append(sb, fnameStart, fnameStart + left > sb.length() ? sb.length() : fnameStart+left); + completeHashName.append(sb2); + } + completeHashName.append(digest); + sb = completeHashName; + } else if (store) { + sb.insert(0, STR_STORE + STR_DATA); + } + sb.append(".i"); + return sb.toString(); + } + + private void encodeWindowsDeviceNames(StringBuilder sb) { + char[] hexByte = new char[2]; + int x = 0; // last segment start + final TreeSet<String> windowsReservedFilenames = new TreeSet<String>(); + windowsReservedFilenames.addAll(Arrays.asList("con prn aux nul com1 com2 com3 com4 com5 com6 com7 com8 com9 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9".split(" "))); + do { + int i = sb.indexOf("/", x); + if (i == -1) { + i = sb.length(); + } + // windows reserved filenames are at least of length 3 + if (i - x >= 3) { + boolean found = false; + if (i-x == 3 || i-x == 4) { + found = windowsReservedFilenames.contains(sb.subSequence(x, i)); + } else if (sb.charAt(x+3) == '.') { // implicit i-x > 3 + found = windowsReservedFilenames.contains(sb.subSequence(x, x+3)); + } else if (i-x > 4 && sb.charAt(x+4) == '.') { + found = windowsReservedFilenames.contains(sb.subSequence(x, x+4)); + } + if (found) { + sb.insert(x+3, toHexByte(sb.charAt(x+2), hexByte)); + sb.setCharAt(x+2, '~'); + i += 2; + } + } + if (dotencode && (sb.charAt(x) == '.' || sb.charAt(x) == ' ')) { + sb.insert(x+1, toHexByte(sb.charAt(x), hexByte)); + sb.setCharAt(x, '~'); // setChar *after* charAt/insert to get ~2e, not ~7e for '.' + i += 2; + } + x = i+1; + } while (x < sb.length()); + } + + private static char[] toHexByte(int ch, char[] buf) { + assert buf.length > 1; + final String hexDigits = "0123456789abcdef"; + buf[0] = hexDigits.charAt((ch & 0x00F0) >>> 4); + buf[1] = hexDigits.charAt(ch & 0x0F); + return buf; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgBundle.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,172 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import java.io.File; +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; + +import org.tmatesoft.hg.core.HgException; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.ByteArrayChannel; +import org.tmatesoft.hg.internal.ByteArrayDataAccess; +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.DataAccessProvider; +import org.tmatesoft.hg.internal.DigestHelper; +import org.tmatesoft.hg.internal.RevlogStream; +import org.tmatesoft.hg.repo.HgChangelog.RawChangeset; +import org.tmatesoft.hg.util.CancelledException; + + +/** + * @see http://mercurial.selenic.com/wiki/BundleFormat + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgBundle { + + private final File bundleFile; + private final DataAccessProvider accessProvider; + + public HgBundle(DataAccessProvider dap, File bundle) { + accessProvider = dap; + bundleFile = bundle; + } + + public void changes(HgRepository hgRepo) throws HgException, IOException { + DataAccess da = accessProvider.create(bundleFile); + DigestHelper dh = new DigestHelper(); + try { + List<GroupElement> changelogGroup = readGroup(da); + if (changelogGroup.isEmpty()) { + throw new IllegalStateException("No changelog group in the bundle"); // XXX perhaps, just be silent and/or log? + } + // XXX in fact, bundle not necessarily starts with the first revision missing in hgRepo + // need to 'scroll' till the last one common. + final Nodeid base = changelogGroup.get(0).firstParent(); + if (!hgRepo.getChangelog().isKnown(base)) { + throw new IllegalArgumentException("unknown parent"); + } + // BundleFormat wiki says: + // Each Changelog entry patches the result of all previous patches + // (the previous, or parent patch of a given patch p is the patch that has a node equal to p's p1 field) + ByteArrayChannel bac = new ByteArrayChannel(); + hgRepo.getChangelog().rawContent(base, bac); // FIXME get DataAccess directly, to avoid + // extra byte[] (inside ByteArrayChannel) duplication just for the sake of subsequent ByteArrayDataChannel wrap. + ByteArrayDataAccess baseRevContent = new ByteArrayDataAccess(bac.toArray()); + for (GroupElement ge : changelogGroup) { + byte[] csetContent = RevlogStream.apply(baseRevContent, -1, ge.patches); + dh = dh.sha1(ge.firstParent(), ge.secondParent(), csetContent); // XXX ge may give me access to byte[] content of nodeid directly, perhaps, I don't need DH to be friend of Nodeid? + if (!ge.node().equalsTo(dh.asBinary())) { + throw new IllegalStateException("Integrity check failed on " + bundleFile + ", node:" + ge.node()); + } + ByteArrayDataAccess csetDataAccess = new ByteArrayDataAccess(csetContent); + RawChangeset cs = RawChangeset.parse(csetDataAccess); + System.out.println(cs.toString()); + baseRevContent = csetDataAccess.reset(); + } + } catch (CancelledException ex) { + System.out.println("Operation cancelled"); + } finally { + da.done(); + } + } + + public void dump() throws IOException { + DataAccess da = accessProvider.create(bundleFile); + try { + LinkedList<String> names = new LinkedList<String>(); + if (!da.isEmpty()) { + System.out.println("Changelog group"); + List<GroupElement> changelogGroup = readGroup(da); + for (GroupElement ge : changelogGroup) { + System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); + } + System.out.println("Manifest group"); + List<GroupElement> manifestGroup = readGroup(da); + for (GroupElement ge : manifestGroup) { + System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); + } + while (!da.isEmpty()) { + int fnameLen = da.readInt(); + if (fnameLen <= 4) { + break; // null chunk, the last one. + } + byte[] fname = new byte[fnameLen - 4]; + da.readBytes(fname, 0, fname.length); + names.add(new String(fname)); + List<GroupElement> fileGroup = readGroup(da); + System.out.println(names.getLast()); + for (GroupElement ge : fileGroup) { + System.out.printf(" %s %s %s %s; patches:%d\n", ge.node(), ge.firstParent(), ge.secondParent(), ge.cset(), ge.patches.size()); + } + } + } + System.out.println(names.size()); + for (String s : names) { + System.out.println(s); + } + } finally { + da.done(); + } + } + + private static List<GroupElement> readGroup(DataAccess da) throws IOException { + int len = da.readInt(); + LinkedList<GroupElement> rv = new LinkedList<HgBundle.GroupElement>(); + while (len > 4 && !da.isEmpty()) { + byte[] nb = new byte[80]; + da.readBytes(nb, 0, 80); + int dataLength = len-84; + LinkedList<RevlogStream.PatchRecord> patches = new LinkedList<RevlogStream.PatchRecord>(); + while (dataLength > 0) { + RevlogStream.PatchRecord pr = RevlogStream.PatchRecord.read(da); + patches.add(pr); + dataLength -= pr.len + 12; + } + rv.add(new GroupElement(nb, patches)); + len = da.isEmpty() ? 0 : da.readInt(); + } + return rv; + } + + static class GroupElement { + private byte[] header; // byte[80] takes 120 bytes, 4 Nodeids - 192 + private List<RevlogStream.PatchRecord> patches; + + GroupElement(byte[] fourNodeids, List<RevlogStream.PatchRecord> patchList) { + assert fourNodeids != null && fourNodeids.length == 80; + // patchList.size() > 0 + header = fourNodeids; + patches = patchList; + } + public Nodeid node() { + return Nodeid.fromBinary(header, 0); + } + public Nodeid firstParent() { + return Nodeid.fromBinary(header, 20); + } + public Nodeid secondParent() { + return Nodeid.fromBinary(header, 40); + } + public Nodeid cset() { // cs seems to be changeset + return Nodeid.fromBinary(header, 60); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgChangelog.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,313 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collections; +import java.util.Date; +import java.util.Formatter; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.TimeZone; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.RevlogStream; + +/** + * Representation of the Mercurial changelog file (list of ChangeSets) + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgChangelog extends Revlog { + + /* package-local */HgChangelog(HgRepository hgRepo, RevlogStream content) { + super(hgRepo, content); + } + + public void all(final HgChangelog.Inspector inspector) { + range(0, getLastRevision(), inspector); + } + + public void range(int start, int end, final HgChangelog.Inspector inspector) { + RevlogStream.Inspector i = new RevlogStream.Inspector() { + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { + RawChangeset cset = RawChangeset.parse(da); + // XXX there's no guarantee for Changeset.Callback that distinct instance comes each time, consider instance reuse + inspector.next(revisionNumber, Nodeid.fromBinary(nodeid, 0), cset); + } + }; + content.iterate(start, end, true, i); + } + + public List<RawChangeset> range(int start, int end) { + final ArrayList<RawChangeset> rv = new ArrayList<RawChangeset>(end - start + 1); + RevlogStream.Inspector i = new RevlogStream.Inspector() { + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { + RawChangeset cset = RawChangeset.parse(da); + rv.add(cset); + } + }; + content.iterate(start, end, true, i); + return rv; + } + + public void range(final HgChangelog.Inspector inspector, final int... revisions) { + if (revisions == null || revisions.length == 0) { + return; + } + RevlogStream.Inspector i = new RevlogStream.Inspector() { + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { + if (Arrays.binarySearch(revisions, revisionNumber) >= 0) { + RawChangeset cset = RawChangeset.parse(da); + inspector.next(revisionNumber, Nodeid.fromBinary(nodeid, 0), cset); + } + } + }; + Arrays.sort(revisions); + content.iterate(revisions[0], revisions[revisions.length - 1], true, i); + } + + public interface Inspector { + // TODO describe whether cset is new instance each time + void next(int revisionNumber, Nodeid nodeid, RawChangeset cset); + } + + /** + * Entry in the Changelog + */ + public static class RawChangeset implements Cloneable /* for those that would like to keep a copy */{ + // TODO immutable + private/* final */Nodeid manifest; + private String user; + private String comment; + private List<String> files; // unmodifiable collection (otherwise #files() and implicit #clone() shall be revised) + private Date time; + private int timezone; + private Map<String, String> extras; + + /** + * @see mercurial/changelog.py:read() + * + * <pre> + * format used: + * nodeid\n : manifest node in ascii + * user\n : user, no \n or \r allowed + * time tz extra\n : date (time is int or float, timezone is int) + * : extra is metadatas, encoded and separated by '\0' + * : older versions ignore it + * files\n\n : files modified by the cset, no \n or \r allowed + * (.*) : comment (free text, ideally utf-8) + * + * changelog v0 doesn't use extra + * </pre> + */ + private RawChangeset() { + } + + public Nodeid manifest() { + return manifest; + } + + public String user() { + return user; + } + + public String comment() { + return comment; + } + + public List<String> files() { + return files; + } + + public Date date() { + return time; + } + + public String dateString() { + // XXX keep once formatted? Perhaps, there's faster way to set up calendar/time zone? + StringBuilder sb = new StringBuilder(30); + Formatter f = new Formatter(sb, Locale.US); + TimeZone tz = TimeZone.getTimeZone("GMT"); + // apparently timezone field records number of seconds time differs from UTC, + // i.e. value to substract from time to get UTC time. Calendar seems to add + // timezone offset to UTC, instead, hence sign change. + tz.setRawOffset(timezone * -1000); + Calendar c = Calendar.getInstance(tz, Locale.US); + c.setTime(time); + f.format("%ta %<tb %<td %<tH:%<tM:%<tS %<tY %<tz", c); + return sb.toString(); + } + + public Map<String, String> extras() { + return extras; + } + + public String branch() { + return extras.get("branch"); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Changeset {"); + sb.append("User: ").append(user).append(", "); + sb.append("Comment: ").append(comment).append(", "); + sb.append("Manifest: ").append(manifest).append(", "); + sb.append("Date: ").append(time).append(", "); + sb.append("Files: ").append(files.size()); + for (String s : files) { + sb.append(", ").append(s); + } + if (extras != null) { + sb.append(", Extra: ").append(extras); + } + sb.append("}"); + return sb.toString(); + } + + @Override + public RawChangeset clone() { + try { + return (RawChangeset) super.clone(); + } catch (CloneNotSupportedException ex) { + throw new InternalError(ex.toString()); + } + } + + public static RawChangeset parse(DataAccess da) { + try { + byte[] data = da.byteArray(); + RawChangeset rv = new RawChangeset(); + rv.init(data, 0, data.length); + return rv; + } catch (IOException ex) { + throw new IllegalArgumentException(ex); // FIXME better handling of IOExc + } + } + + /* package-local */void init(byte[] data, int offset, int length) { + final int bufferEndIndex = offset + length; + final byte lineBreak = (byte) '\n'; + int breakIndex1 = indexOf(data, lineBreak, offset, bufferEndIndex); + if (breakIndex1 == -1) { + throw new IllegalArgumentException("Bad Changeset data"); + } + Nodeid _nodeid = Nodeid.fromAscii(data, 0, breakIndex1); + int breakIndex2 = indexOf(data, lineBreak, breakIndex1 + 1, bufferEndIndex); + if (breakIndex2 == -1) { + throw new IllegalArgumentException("Bad Changeset data"); + } + String _user = new String(data, breakIndex1 + 1, breakIndex2 - breakIndex1 - 1); + int breakIndex3 = indexOf(data, lineBreak, breakIndex2 + 1, bufferEndIndex); + if (breakIndex3 == -1) { + throw new IllegalArgumentException("Bad Changeset data"); + } + String _timeString = new String(data, breakIndex2 + 1, breakIndex3 - breakIndex2 - 1); + int space1 = _timeString.indexOf(' '); + if (space1 == -1) { + throw new IllegalArgumentException("Bad Changeset data"); + } + int space2 = _timeString.indexOf(' ', space1 + 1); + if (space2 == -1) { + space2 = _timeString.length(); + } + long unixTime = Long.parseLong(_timeString.substring(0, space1)); // XXX Float, perhaps + int _timezone = Integer.parseInt(_timeString.substring(space1 + 1, space2)); + // XXX not sure need to add timezone here - I can't figure out whether Hg keeps GMT time, and records timezone just for info, or unixTime is taken local + // on commit and timezone is recorded to adjust it to UTC. + Date _time = new Date(unixTime * 1000); + String _extras = space2 < _timeString.length() ? _timeString.substring(space2 + 1) : null; + Map<String, String> _extrasMap; + if (_extras == null) { + _extrasMap = Collections.singletonMap("branch", "default"); + } else { + _extrasMap = new HashMap<String, String>(); + for (String pair : _extras.split("\00")) { + int eq = pair.indexOf(':'); + // FIXME need to decode key/value, @see changelog.py:decodeextra + _extrasMap.put(pair.substring(0, eq), pair.substring(eq + 1)); + } + if (!_extrasMap.containsKey("branch")) { + _extrasMap.put("branch", "default"); + } + _extrasMap = Collections.unmodifiableMap(_extrasMap); + } + + // + int lastStart = breakIndex3 + 1; + int breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex); + ArrayList<String> _files = null; + if (breakIndex4 > lastStart) { + // if breakIndex4 == lastStart, we already found \n\n and hence there are no files (e.g. merge revision) + _files = new ArrayList<String>(5); + while (breakIndex4 != -1 && breakIndex4 + 1 < bufferEndIndex) { + _files.add(new String(data, lastStart, breakIndex4 - lastStart)); + lastStart = breakIndex4 + 1; + if (data[breakIndex4 + 1] == lineBreak) { + // found \n\n + break; + } else { + breakIndex4 = indexOf(data, lineBreak, lastStart, bufferEndIndex); + } + } + if (breakIndex4 == -1 || breakIndex4 >= bufferEndIndex) { + throw new IllegalArgumentException("Bad Changeset data"); + } + } else { + breakIndex4--; + } + String _comment; + try { + _comment = new String(data, breakIndex4 + 2, bufferEndIndex - breakIndex4 - 2, "UTF-8"); + // FIXME respect ui.fallbackencoding and try to decode if set + } catch (UnsupportedEncodingException ex) { + _comment = ""; + throw new IllegalStateException("Could hardly happen"); + } + // change this instance at once, don't leave it partially changes in case of error + this.manifest = _nodeid; + this.user = _user; + this.time = _time; + this.timezone = _timezone; + this.files = _files == null ? Collections.<String> emptyList() : Collections.unmodifiableList(_files); + this.comment = _comment; + this.extras = _extrasMap; + } + + private static int indexOf(byte[] src, byte what, int startOffset, int endIndex) { + for (int i = startOffset; i < endIndex; i++) { + if (src[i] == what) { + return i; + } + } + return -1; + } + } + +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgDataFile.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,383 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgInternals.wrongLocalRevision; +import static org.tmatesoft.hg.repo.HgRepository.*; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.TreeMap; + +import org.tmatesoft.hg.core.HgDataStreamException; +import org.tmatesoft.hg.core.HgException; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.FilterByteChannel; +import org.tmatesoft.hg.internal.RevlogStream; +import org.tmatesoft.hg.util.ByteChannel; +import org.tmatesoft.hg.util.CancelledException; +import org.tmatesoft.hg.util.Path; + + + +/** + * ? name:HgFileNode? + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgDataFile extends Revlog { + + // absolute from repo root? + // slashes, unix-style? + // repo location agnostic, just to give info to user, not to access real storage + private final Path path; + private Metadata metadata; // get initialized on first access to file content. + + /*package-local*/HgDataFile(HgRepository hgRepo, Path filePath, RevlogStream content) { + super(hgRepo, content); + path = filePath; + } + + /*package-local*/HgDataFile(HgRepository hgRepo, Path filePath) { + super(hgRepo); + path = filePath; + } + + // exists is not the best name possible. now it means no file with such name was ever known to the repo. + // it might be confused with files existed before but lately removed. + public boolean exists() { + return content != null; // XXX need better impl + } + + // human-readable (i.e. "COPYING", not "store/data/_c_o_p_y_i_n_g.i") + public Path getPath() { + return path; // hgRepo.backresolve(this) -> name? In this case, what about hashed long names? + } + + public int length(Nodeid nodeid) { + return content.dataLength(getLocalRevision(nodeid)); + } + + public void workingCopy(ByteChannel sink) throws IOException, CancelledException { + throw HgRepository.notImplemented(); + } + +// public void content(int revision, ByteChannel sink, boolean applyFilters) throws HgDataStreamException, IOException, CancelledException { +// byte[] content = content(revision); +// final CancelSupport cancelSupport = CancelSupport.Factory.get(sink); +// final ProgressSupport progressSupport = ProgressSupport.Factory.get(sink); +// ByteBuffer buf = ByteBuffer.allocate(512); +// int left = content.length; +// progressSupport.start(left); +// int offset = 0; +// cancelSupport.checkCancelled(); +// ByteChannel _sink = applyFilters ? new FilterByteChannel(sink, getRepo().getFiltersFromRepoToWorkingDir(getPath())) : sink; +// do { +// buf.put(content, offset, Math.min(left, buf.remaining())); +// buf.flip(); +// cancelSupport.checkCancelled(); +// // XXX I may not rely on returned number of bytes but track change in buf position instead. +// int consumed = _sink.write(buf); +// buf.compact(); +// offset += consumed; +// left -= consumed; +// progressSupport.worked(consumed); +// } while (left > 0); +// progressSupport.done(); // XXX shall specify whether #done() is invoked always or only if completed successfully. +// } + + /*XXX not sure distinct method contentWithFilters() is the best way to do, perhaps, callers shall add filters themselves?*/ + public void contentWithFilters(int revision, ByteChannel sink) throws HgDataStreamException, IOException, CancelledException { + content(revision, new FilterByteChannel(sink, getRepo().getFiltersFromRepoToWorkingDir(getPath()))); + } + + // for data files need to check heading of the file content for possible metadata + // @see http://mercurial.selenic.com/wiki/FileFormats#data.2BAC8- + public void content(int revision, ByteChannel sink) throws HgDataStreamException, IOException, CancelledException { + if (revision == TIP) { + revision = getLastRevision(); + } + if (revision == WORKING_COPY) { + workingCopy(sink); + return; + } + if (wrongLocalRevision(revision) || revision == BAD_REVISION) { + throw new IllegalArgumentException(String.valueOf(revision)); + } + if (sink == null) { + throw new IllegalArgumentException(); + } + if (metadata == null) { + metadata = new Metadata(); + } + ContentPipe insp; + if (metadata.none(revision)) { + insp = new ContentPipe(sink, 0); + } else if (metadata.known(revision)) { + insp = new ContentPipe(sink, metadata.dataOffset(revision)); + } else { + // do not know if there's metadata + insp = new MetadataContentPipe(sink, metadata); + } + insp.checkCancelled(); + super.content.iterate(revision, revision, true, insp); + try { + insp.checkFailed(); + } catch (HgDataStreamException ex) { + throw ex; + } catch (HgException ex) { + // shall not happen, unless we changed ContentPipe or its subclass + throw new HgDataStreamException(ex.getClass().getName(), ex); + } + } + + public void history(HgChangelog.Inspector inspector) { + history(0, getLastRevision(), inspector); + } + + public void history(int start, int end, HgChangelog.Inspector inspector) { + if (!exists()) { + throw new IllegalStateException("Can't get history of invalid repository file node"); + } + final int last = getLastRevision(); + if (start < 0 || start > last) { + throw new IllegalArgumentException(); + } + if (end == TIP) { + end = last; + } else if (end < start || end > last) { + throw new IllegalArgumentException(); + } + final int[] commitRevisions = new int[end - start + 1]; + RevlogStream.Inspector insp = new RevlogStream.Inspector() { + int count = 0; + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) { + commitRevisions[count++] = linkRevision; + } + }; + content.iterate(start, end, false, insp); + getRepo().getChangelog().range(inspector, commitRevisions); + } + + // for a given local revision of the file, find out local revision in the changelog + public int getChangesetLocalRevision(int revision) { + return content.linkRevision(revision); + } + + public Nodeid getChangesetRevision(Nodeid nid) { + int changelogRevision = getChangesetLocalRevision(getLocalRevision(nid)); + return getRepo().getChangelog().getRevision(changelogRevision); + } + + public boolean isCopy() throws HgDataStreamException { + if (metadata == null || !metadata.checked(0)) { + // content() always initializes metadata. + // FIXME this is expensive way to find out metadata, distinct RevlogStream.Iterator would be better. + try { + content(0, new ByteChannel() { // No-op channel + public int write(ByteBuffer buffer) throws IOException { + // pretend we consumed whole buffer + int rv = buffer.remaining(); + buffer.position(buffer.limit()); + return rv; + } + }); + } catch (Exception ex) { + throw new HgDataStreamException("Can't initialize metadata", ex); + } + } + if (!metadata.known(0)) { + return false; + } + return metadata.find(0, "copy") != null; + } + + public Path getCopySourceName() throws HgDataStreamException { + if (isCopy()) { + return Path.create(metadata.find(0, "copy")); + } + throw new UnsupportedOperationException(); // XXX REVISIT, think over if Exception is good (clients would check isCopy() anyway, perhaps null is sufficient?) + } + + public Nodeid getCopySourceRevision() throws HgDataStreamException { + if (isCopy()) { + return Nodeid.fromAscii(metadata.find(0, "copyrev")); // XXX reuse/cache Nodeid + } + throw new UnsupportedOperationException(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(getClass().getSimpleName()); + sb.append('('); + sb.append(getPath()); + sb.append(')'); + return sb.toString(); + } + + private static final class MetadataEntry { + private final String entry; + private final int valueStart; + /*package-local*/MetadataEntry(String key, String value) { + entry = key + value; + valueStart = key.length(); + } + /*package-local*/boolean matchKey(String key) { + return key.length() == valueStart && entry.startsWith(key); + } +// uncomment once/if needed +// public String key() { +// return entry.substring(0, valueStart); +// } + public String value() { + return entry.substring(valueStart); + } + } + + private static class Metadata { + // XXX sparse array needed + private final TreeMap<Integer, Integer> offsets = new TreeMap<Integer, Integer>(); + private final TreeMap<Integer, MetadataEntry[]> entries = new TreeMap<Integer, MetadataEntry[]>(); + + private final Integer NONE = new Integer(-1); // do not duplicate -1 integers at least within single file (don't want statics) + + // true when there's metadata for given revision + boolean known(int revision) { + Integer i = offsets.get(revision); + return i != null && NONE != i; + } + + // true when revision has been checked for metadata presence. + public boolean checked(int revision) { + return offsets.containsKey(revision); + } + + // true when revision has been checked and found not having any metadata + boolean none(int revision) { + Integer i = offsets.get(revision); + return i == NONE; + } + + // mark revision as having no metadata. + void recordNone(int revision) { + Integer i = offsets.get(revision); + if (i == NONE) { + return; // already there + } + if (i != null) { + throw new IllegalStateException(String.format("Trying to override Metadata state for revision %d (known offset: %d)", revision, i)); + } + offsets.put(revision, NONE); + } + + // since this is internal class, callers are supposed to ensure arg correctness (i.e. ask known() before) + int dataOffset(int revision) { + return offsets.get(revision); + } + void add(int revision, int dataOffset, Collection<MetadataEntry> e) { + assert !offsets.containsKey(revision); + offsets.put(revision, dataOffset); + entries.put(revision, e.toArray(new MetadataEntry[e.size()])); + } + String find(int revision, String key) { + for (MetadataEntry me : entries.get(revision)) { + if (me.matchKey(key)) { + return me.value(); + } + } + return null; + } + } + + private static class MetadataContentPipe extends ContentPipe { + + private final Metadata metadata; + + public MetadataContentPipe(ByteChannel sink, Metadata _metadata) { + super(sink, 0); + metadata = _metadata; + } + + @Override + protected void prepare(int revisionNumber, DataAccess da) throws HgException, IOException { + long daLength = da.length(); + if (daLength < 4 || da.readByte() != 1 || da.readByte() != 10) { + metadata.recordNone(revisionNumber); + da.reset(); + return; + } + int lastEntryStart = 2; + int lastColon = -1; + ArrayList<MetadataEntry> _metadata = new ArrayList<MetadataEntry>(); + // XXX in fact, need smth like ByteArrayBuilder, similar to StringBuilder, + // which can't be used here because we can't convert bytes to chars as we read them + // (there might be multi-byte encoding), and we need to collect all bytes before converting to string + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + String key = null, value = null; + boolean byteOne = false; + for (int i = 2; i < daLength; i++) { + byte b = da.readByte(); + if (b == '\n') { + if (byteOne) { // i.e. \n follows 1 + lastEntryStart = i+1; + // XXX is it possible to have here incomplete key/value (i.e. if last pair didn't end with \n) + break; + } + if (key == null || lastColon == -1 || i <= lastColon) { + throw new IllegalStateException(); // FIXME log instead and record null key in the metadata. Ex just to fail fast during dev + } + value = new String(bos.toByteArray()).trim(); + bos.reset(); + _metadata.add(new MetadataEntry(key, value)); + key = value = null; + lastColon = -1; + lastEntryStart = i+1; + continue; + } + // byteOne has to be consumed up to this line, if not jet, consume it + if (byteOne) { + // insert 1 we've read on previous step into the byte builder + bos.write(1); + // fall-through to consume current byte + byteOne = false; + } + if (b == (int) ':') { + assert value == null; + key = new String(bos.toByteArray()); + bos.reset(); + lastColon = i; + } else if (b == 1) { + byteOne = true; + } else { + bos.write(b); + } + } + _metadata.trimToSize(); + metadata.add(revisionNumber, lastEntryStart, _metadata); + if (da.isEmpty() || !byteOne) { + throw new HgDataStreamException(String.format("Metadata for revision %d is not closed properly", revisionNumber), null); + } + // da is in prepared state (i.e. we consumed all bytes up to metadata end). + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgDirstate.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,184 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import java.io.File; +import java.io.IOException; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.TreeSet; + +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.DataAccessProvider; +import org.tmatesoft.hg.util.Path; + + +/** + * @see http://mercurial.selenic.com/wiki/DirState + * @see http://mercurial.selenic.com/wiki/FileFormats#dirstate + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +class HgDirstate { + + private final DataAccessProvider accessProvider; + private final File dirstateFile; + // deliberate String, not Path as it seems useless to keep Path here + private Map<String, Record> normal; + private Map<String, Record> added; + private Map<String, Record> removed; + private Map<String, Record> merged; + + /*package-local*/ HgDirstate() { + // empty instance + accessProvider = null; + dirstateFile = null; + } + + public HgDirstate(DataAccessProvider dap, File dirstate) { + accessProvider = dap; + dirstateFile = dirstate; + } + + private void read() { + normal = added = removed = merged = Collections.<String, Record>emptyMap(); + if (dirstateFile == null || !dirstateFile.exists()) { + return; + } + DataAccess da = accessProvider.create(dirstateFile); + if (da.isEmpty()) { + return; + } + // not sure linked is really needed here, just for ease of debug + normal = new LinkedHashMap<String, Record>(); + added = new LinkedHashMap<String, Record>(); + removed = new LinkedHashMap<String, Record>(); + merged = new LinkedHashMap<String, Record>(); + try { + // XXX skip(40) if we don't need these? + byte[] parents = new byte[40]; + da.readBytes(parents, 0, 40); + parents = null; + do { + final byte state = da.readByte(); + final int fmode = da.readInt(); + final int size = da.readInt(); + final int time = da.readInt(); + final int nameLen = da.readInt(); + String fn1 = null, fn2 = null; + byte[] name = new byte[nameLen]; + da.readBytes(name, 0, nameLen); + for (int i = 0; i < nameLen; i++) { + if (name[i] == 0) { + fn1 = new String(name, 0, i, "UTF-8"); // XXX unclear from documentation what encoding is used there + fn2 = new String(name, i+1, nameLen - i - 1, "UTF-8"); // need to check with different system codepages + break; + } + } + if (fn1 == null) { + fn1 = new String(name); + } + Record r = new Record(fmode, size, time, fn1, fn2); + if (state == 'n') { + normal.put(r.name1, r); + } else if (state == 'a') { + added.put(r.name1, r); + } else if (state == 'r') { + removed.put(r.name1, r); + } else if (state == 'm') { + merged.put(r.name1, r); + } else { + // FIXME log error? + } + } while (!da.isEmpty()); + } catch (IOException ex) { + ex.printStackTrace(); // FIXME log error, clean dirstate? + } finally { + da.done(); + } + } + + // new, modifiable collection + /*package-local*/ TreeSet<String> all() { + read(); + TreeSet<String> rv = new TreeSet<String>(); + @SuppressWarnings("unchecked") + Map<String, Record>[] all = new Map[] { normal, added, removed, merged }; + for (int i = 0; i < all.length; i++) { + for (Record r : all[i].values()) { + rv.add(r.name1); + } + } + return rv; + } + + /*package-local*/ Record checkNormal(Path fname) { + return normal.get(fname.toString()); + } + + /*package-local*/ Record checkAdded(Path fname) { + return added.get(fname.toString()); + } + /*package-local*/ Record checkRemoved(Path fname) { + return removed.get(fname.toString()); + } + /*package-local*/ Record checkRemoved(String fname) { + return removed.get(fname); + } + /*package-local*/ Record checkMerged(Path fname) { + return merged.get(fname.toString()); + } + + + + + /*package-local*/ void dump() { + read(); + @SuppressWarnings("unchecked") + Map<String, Record>[] all = new Map[] { normal, added, removed, merged }; + char[] x = new char[] {'n', 'a', 'r', 'm' }; + for (int i = 0; i < all.length; i++) { + for (Record r : all[i].values()) { + System.out.printf("%c %3o%6d %30tc\t\t%s", x[i], r.mode, r.size, (long) r.time * 1000, r.name1); + if (r.name2 != null) { + System.out.printf(" --> %s", r.name2); + } + System.out.println(); + } + System.out.println(); + } + } + + /*package-local*/ static class Record { + final int mode; + final int size; + final int time; + final String name1; + final String name2; + + public Record(int fmode, int fsize, int ftime, String name1, String name2) { + mode = fmode; + size = fsize; + time = ftime; + this.name1 = name1; + this.name2 = name2; + + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgIgnore.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.regex.Pattern; + +import org.tmatesoft.hg.util.Path; + +/** + * Handling of ignored paths according to .hgignore configuration + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgIgnore { + + private List<Pattern> entries; + + HgIgnore() { + entries = Collections.emptyList(); + } + + /* package-local */void read(File hgignoreFile) throws IOException { + if (!hgignoreFile.exists()) { + return; + } + ArrayList<Pattern> result = new ArrayList<Pattern>(entries); // start with existing + String syntax = "regex"; // or "glob" + BufferedReader fr = new BufferedReader(new FileReader(hgignoreFile)); + String line; + while ((line = fr.readLine()) != null) { + line = line.trim(); + if (line.startsWith("syntax:")) { + syntax = line.substring("syntax:".length()).trim(); + if (!"regex".equals(syntax) && !"glob".equals(syntax)) { + throw new IllegalStateException(line); + } + } else if (line.length() > 0) { + // shall I account for local paths in the file (i.e. + // back-slashed on windows)? + int x; + if ((x = line.indexOf('#')) >= 0) { + line = line.substring(0, x).trim(); + if (line.length() == 0) { + continue; + } + } + if ("glob".equals(syntax)) { + // hgignore(5) + // (http://www.selenic.com/mercurial/hgignore.5.html) says slashes '\' are escape characters, + // hence no special treatment of Windows path + // however, own attempts make me think '\' on Windows are not treated as escapes + line = glob2regex(line); + } + result.add(Pattern.compile(line)); // case-sensitive + } + } + result.trimToSize(); + entries = result; + } + + // note, #isIgnored(), even if queried for directories and returned positive reply, may still get + // a file from that ignored folder to get examined. Thus, patterns like "bin" shall match not only a folder, + // but any file under that folder as well + // Alternatively, file walker may memorize folder is ignored and uses this information for all nested files. However, + // this approach would require walker (a) return directories (b) provide nesting information. This may become + // troublesome when one walks not over io.File, but Eclipse's IResource or any other custom VFS. + // + // + // might be interesting, although looks like of no direct use in my case + // @see http://stackoverflow.com/questions/1247772/is-there-an-equivalent-of-java-util-regex-for-glob-type-patterns + private String glob2regex(String line) { + assert line.length() > 0; + StringBuilder sb = new StringBuilder(line.length() + 10); + sb.append('^'); // help avoid matcher.find() to match 'bin' pattern in the middle of the filename + int start = 0, end = line.length() - 1; + // '*' at the beginning and end of a line are useless for Pattern + // XXX although how about **.txt - such globs can be seen in a config, are they valid for HgIgnore? + while (start <= end && line.charAt(start) == '*') start++; + while (end > start && line.charAt(end) == '*') end--; + + for (int i = start; i <= end; i++) { + char ch = line.charAt(i); + if (ch == '.' || ch == '\\') { + sb.append('\\'); + } else if (ch == '?') { + // simple '.' substitution might work out, however, more formally + // a char class seems more appropriate to avoid accidentally + // matching a subdirectory with ? char (i.e. /a/b?d against /a/bad, /a/bed and /a/b/d) + // @see http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_13_03 + // quote: "The slash character in a pathname shall be explicitly matched by using one or more slashes in the pattern; + // it shall neither be matched by the asterisk or question-mark special characters nor by a bracket expression" + sb.append("[^/]"); + continue; + } else if (ch == '*') { + sb.append("[^/]*?"); + continue; + } + sb.append(ch); + } + return sb.toString(); + } + + // TODO use PathGlobMatcher + public boolean isIgnored(Path path) { + for (Pattern p : entries) { + if (p.matcher(path).find()) { + return true; + } + } + return false; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgInternals.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.*; + +import java.io.File; +import java.net.InetAddress; +import java.net.UnknownHostException; + +import org.tmatesoft.hg.internal.ConfigFile; +import org.tmatesoft.hg.util.Path; + + +/** + * DO NOT USE THIS CLASS, INTENDED FOR TESTING PURPOSES. + * + * Debug helper, to access otherwise restricted (package-local) methods + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + + */ +public class HgInternals { + + private final HgRepository repo; + + public HgInternals(HgRepository hgRepo) { + repo = hgRepo; + } + + public void dumpDirstate() { + repo.loadDirstate().dump(); + } + + public boolean[] checkIgnored(String... toCheck) { + HgIgnore ignore = repo.getIgnore(); + boolean[] rv = new boolean[toCheck.length]; + for (int i = 0; i < toCheck.length; i++) { + rv[i] = ignore.isIgnored(Path.create(toCheck[i])); + } + return rv; + } + + public File getRepositoryDir() { + return repo.getRepositoryRoot(); + } + + public ConfigFile getRepoConfig() { + return repo.getConfigFile(); + } + + // in fact, need a setter for this anyway, shall move to internal.Internals perhaps? + public String getNextCommitUsername() { + String hgUser = System.getenv("HGUSER"); + if (hgUser != null && hgUser.trim().length() > 0) { + return hgUser.trim(); + } + String configValue = getRepoConfig().getString("ui", "username", null); + if (configValue != null) { + return configValue; + } + String email = System.getenv("EMAIL"); + if (email != null && email.trim().length() > 0) { + return email; + } + String username = System.getProperty("user.name"); + try { + String hostname = InetAddress.getLocalHost().getHostName(); + return username + '@' + hostname; + } catch (UnknownHostException ex) { + return username; + } + } + + // Convenient check of local revision number for validity (not all negative values are wrong as long as we use negative constants) + public static boolean wrongLocalRevision(int rev) { + return rev < 0 && rev != TIP && rev != WORKING_COPY && rev != BAD_REVISION; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgLookup.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import java.io.File; +import java.io.IOException; + +import org.tmatesoft.hg.core.HgException; + +/** + * Utility methods to find Mercurial repository at a given location + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgLookup { + + public HgRepository detectFromWorkingDir() throws HgException { + return detect(System.getProperty("user.dir")); + } + + public HgRepository detect(String location) throws HgException { + return detect(new File(location)); + } + + // look up in specified location and above + public HgRepository detect(File location) throws HgException { + File dir = location.getAbsoluteFile(); + File repository; + do { + repository = new File(dir, ".hg"); + if (repository.exists() && repository.isDirectory()) { + break; + } + repository = null; + dir = dir.getParentFile(); + + } while(dir != null); + if (repository == null) { + // return invalid repository + return new HgRepository(location.getPath()); + } + try { + String repoPath = repository.getParentFile().getCanonicalPath(); + return new HgRepository(repoPath, repository); + } catch (IOException ex) { + throw new HgException(location.toString(), ex); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgManifest.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import java.io.IOException; + +import org.tmatesoft.hg.core.HgBadStateException; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.RevlogStream; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgManifest extends Revlog { + + /*package-local*/ HgManifest(HgRepository hgRepo, RevlogStream content) { + super(hgRepo, content); + } + + public void walk(int start, int end, final Inspector inspector) { + RevlogStream.Inspector insp = new RevlogStream.Inspector() { + + private boolean gtg = true; // good to go + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { + if (!gtg) { + return; + } + try { + gtg = gtg && inspector.begin(revisionNumber, new Nodeid(nodeid, true)); + int i; + String fname = null; + String flags = null; + Nodeid nid = null; + byte[] data = da.byteArray(); + for (i = 0; gtg && i < actualLen; i++) { + int x = i; + for( ; data[i] != '\n' && i < actualLen; i++) { + if (fname == null && data[i] == 0) { + fname = new String(data, x, i - x); + x = i+1; + } + } + if (i < actualLen) { + assert data[i] == '\n'; + int nodeidLen = i - x < 40 ? i-x : 40; + nid = Nodeid.fromAscii(data, x, nodeidLen); + if (nodeidLen + x < i) { + // 'x' and 'l' for executable bits and symlinks? + // hg --debug manifest shows 644 for each regular file in my repo + flags = new String(data, x + nodeidLen, i-x-nodeidLen); + } + gtg = gtg && inspector.next(nid, fname, flags); + } + nid = null; + fname = flags = null; + } + gtg = gtg && inspector.end(revisionNumber); + } catch (IOException ex) { + throw new HgBadStateException(ex); + } + } + }; + content.iterate(start, end, true, insp); + } + + public interface Inspector { + boolean begin(int revision, Nodeid nid); + boolean next(Nodeid nid, String fname, String flags); + boolean end(int revision); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgRepository.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,277 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import java.io.File; +import java.io.IOException; +import java.lang.ref.SoftReference; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; + +import org.tmatesoft.hg.internal.ConfigFile; +import org.tmatesoft.hg.internal.DataAccessProvider; +import org.tmatesoft.hg.internal.Filter; +import org.tmatesoft.hg.internal.RelativePathRewrite; +import org.tmatesoft.hg.internal.RequiresFile; +import org.tmatesoft.hg.internal.RevlogStream; +import org.tmatesoft.hg.util.FileIterator; +import org.tmatesoft.hg.util.FileWalker; +import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.PathRewrite; + + + +/** + * Shall be as state-less as possible, all the caching happens outside the repo, in commands/walkers + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public final class HgRepository { + + // if new constants added, consider fixing HgInternals#badLocalRevision + public static final int TIP = -1; + public static final int BAD_REVISION = Integer.MIN_VALUE; + public static final int WORKING_COPY = -2; + + // temp aux marker method + public static IllegalStateException notImplemented() { + return new IllegalStateException("Not implemented"); + } + + private final File repoDir; // .hg folder + private final String repoLocation; + private final DataAccessProvider dataAccess; + private final PathRewrite normalizePath; + private final PathRewrite dataPathHelper; + private final PathRewrite repoPathHelper; + + private HgChangelog changelog; + private HgManifest manifest; + private HgTags tags; + // XXX perhaps, shall enable caching explicitly + private final HashMap<Path, SoftReference<RevlogStream>> streamsCache = new HashMap<Path, SoftReference<RevlogStream>>(); + + private final org.tmatesoft.hg.internal.Internals impl = new org.tmatesoft.hg.internal.Internals(); + private HgIgnore ignore; + private ConfigFile configFile; + + HgRepository(String repositoryPath) { + repoDir = null; + repoLocation = repositoryPath; + dataAccess = null; + dataPathHelper = repoPathHelper = null; + normalizePath = null; + } + + HgRepository(String repositoryPath, File repositoryRoot) { + assert ".hg".equals(repositoryRoot.getName()) && repositoryRoot.isDirectory(); + assert repositoryPath != null; + assert repositoryRoot != null; + repoDir = repositoryRoot; + repoLocation = repositoryPath; + dataAccess = new DataAccessProvider(); + final boolean runningOnWindows = System.getProperty("os.name").indexOf("Windows") != -1; + if (runningOnWindows) { + normalizePath = new PathRewrite() { + + public String rewrite(String path) { + // TODO handle . and .. (although unlikely to face them from GUI client) + path = path.replace('\\', '/').replace("//", "/"); + if (path.startsWith("/")) { + path = path.substring(1); + } + return path; + } + }; + } else { + normalizePath = new PathRewrite.Empty(); // or strip leading slash, perhaps? + } + parseRequires(); + dataPathHelper = impl.buildDataFilesHelper(); + repoPathHelper = impl.buildRepositoryFilesHelper(); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "[" + getLocation() + (isInvalid() ? "(BAD)" : "") + "]"; + } + + public String getLocation() { + return repoLocation; + } + + public boolean isInvalid() { + return repoDir == null || !repoDir.exists() || !repoDir.isDirectory(); + } + + public HgChangelog getChangelog() { + if (this.changelog == null) { + String storagePath = repoPathHelper.rewrite("00changelog.i"); + RevlogStream content = resolve(Path.create(storagePath)); + this.changelog = new HgChangelog(this, content); + } + return this.changelog; + } + + public HgManifest getManifest() { + if (this.manifest == null) { + RevlogStream content = resolve(Path.create(repoPathHelper.rewrite("00manifest.i"))); + this.manifest = new HgManifest(this, content); + } + return this.manifest; + } + + public final HgTags getTags() { + if (tags == null) { + tags = new HgTags(); + try { + tags.readGlobal(new File(repoDir.getParentFile(), ".hgtags")); + tags.readLocal(new File(repoDir, "localtags")); + } catch (IOException ex) { + ex.printStackTrace(); // FIXME log or othewise report + } + } + return tags; + } + + public HgDataFile getFileNode(String path) { + String nPath = normalizePath.rewrite(path); + String storagePath = dataPathHelper.rewrite(nPath); + RevlogStream content = resolve(Path.create(storagePath)); + Path p = Path.create(nPath); + if (content == null) { + return new HgDataFile(this, p); + } + return new HgDataFile(this, p, content); + } + + public HgDataFile getFileNode(Path path) { + String storagePath = dataPathHelper.rewrite(path.toString()); + RevlogStream content = resolve(Path.create(storagePath)); + // XXX no content when no file? or HgDataFile.exists() to detect that? + if (content == null) { + return new HgDataFile(this, path); + } + return new HgDataFile(this, path, content); + } + + /* clients need to rewrite path from their FS to a repository-friendly paths, and, perhaps, vice versa*/ + public PathRewrite getToRepoPathHelper() { + return normalizePath; + } + + // local to hide use of io.File. + /*package-local*/ File getRepositoryRoot() { + return repoDir; + } + + // XXX package-local, unless there are cases when required from outside (guess, working dir/revision walkers may hide dirstate access and no public visibility needed) + /*package-local*/ final HgDirstate loadDirstate() { + return new HgDirstate(getDataAccess(), new File(repoDir, "dirstate")); + } + + // package-local, see comment for loadDirstate + /*package-local*/ final HgIgnore getIgnore() { + // TODO read config for additional locations + if (ignore == null) { + ignore = new HgIgnore(); + try { + File ignoreFile = new File(repoDir.getParentFile(), ".hgignore"); + ignore.read(ignoreFile); + } catch (IOException ex) { + ex.printStackTrace(); // log warn + } + } + return ignore; + } + + /*package-local*/ DataAccessProvider getDataAccess() { + return dataAccess; + } + + // FIXME not sure repository shall create walkers + /*package-local*/ FileIterator createWorkingDirWalker() { + File repoRoot = repoDir.getParentFile(); + Path.Source pathSrc = new Path.SimpleSource(new PathRewrite.Composite(new RelativePathRewrite(repoRoot), getToRepoPathHelper())); + // Impl note: simple source is enough as files in the working dir are all unique + // even if they might get reused (i.e. after FileIterator#reset() and walking once again), + // path caching is better to be done in the code which knows that path are being reused + return new FileWalker(repoRoot, pathSrc); + } + + /** + * Perhaps, should be separate interface, like ContentLookup + * path - repository storage path (i.e. one usually with .i or .d) + */ + /*package-local*/ RevlogStream resolve(Path path) { + final SoftReference<RevlogStream> ref = streamsCache.get(path); + RevlogStream cached = ref == null ? null : ref.get(); + if (cached != null) { + return cached; + } + File f = new File(repoDir, path.toString()); + if (f.exists()) { + RevlogStream s = new RevlogStream(dataAccess, f); + streamsCache.put(path, new SoftReference<RevlogStream>(s)); + return s; + } + return null; // XXX empty stream instead? + } + + // can't expose internal class, otherwise seems reasonable to have it in API + /*package-local*/ ConfigFile getConfigFile() { + if (configFile == null) { + configFile = impl.newConfigFile(); + configFile.addLocation(new File(System.getProperty("user.home"), ".hgrc")); + // last one, overrides anything else + // <repo>/.hg/hgrc + configFile.addLocation(new File(getRepositoryRoot(), "hgrc")); + } + return configFile; + } + + /*package-local*/ List<Filter> getFiltersFromRepoToWorkingDir(Path p) { + return instantiateFilters(p, new Filter.Options(Filter.Direction.FromRepo)); + } + + /*package-local*/ List<Filter> getFiltersFromWorkingDirToRepo(Path p) { + return instantiateFilters(p, new Filter.Options(Filter.Direction.ToRepo)); + } + + private List<Filter> instantiateFilters(Path p, Filter.Options opts) { + List<Filter.Factory> factories = impl.getFilters(this, getConfigFile()); + if (factories.isEmpty()) { + return Collections.emptyList(); + } + ArrayList<Filter> rv = new ArrayList<Filter>(factories.size()); + for (Filter.Factory ff : factories) { + Filter f = ff.create(p, opts); + if (f != null) { + rv.add(f); + } + } + return rv; + } + + private void parseRequires() { + new RequiresFile().parse(impl, new File(repoDir, "requires")); + } + +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgStatusCollector.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,385 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.TreeSet; + +import org.tmatesoft.hg.core.HgDataStreamException; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.PathPool; +import org.tmatesoft.hg.util.PathRewrite; + + +/** + * RevisionWalker? + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgStatusCollector { + + private final HgRepository repo; + private final Map<Integer, ManifestRevisionInspector> cache; // sparse array, in fact + private PathPool pathPool; + + public HgStatusCollector(HgRepository hgRepo) { + this.repo = hgRepo; + cache = new TreeMap<Integer, ManifestRevisionInspector>(); + ManifestRevisionInspector emptyFakeState = new ManifestRevisionInspector(); + emptyFakeState.begin(-1, null); + emptyFakeState.end(-1); // FIXME HgRepo.TIP == -1 as well, need to distinguish fake "prior to first" revision from "the very last" + cache.put(-1, emptyFakeState); + } + + public HgRepository getRepo() { + return repo; + } + + private ManifestRevisionInspector get(int rev) { + ManifestRevisionInspector i = cache.get(rev); + if (i == null) { + i = new ManifestRevisionInspector(); + cache.put(rev, i); + repo.getManifest().walk(rev, rev, i); + } + return i; + } + + /*package-local*/ ManifestRevisionInspector raw(int rev) { + return get(rev); + } + /*package-local*/ PathPool getPathPool() { + if (pathPool == null) { + pathPool = new PathPool(new PathRewrite.Empty()); + } + return pathPool; + } + + /** + * Allows sharing of a common path cache + */ + public void setPathPool(PathPool pathPool) { + this.pathPool = pathPool; + } + + + // hg status --change <rev> + public void change(int rev, HgStatusInspector inspector) { + int[] parents = new int[2]; + repo.getChangelog().parents(rev, parents, null, null); + walk(parents[0], rev, inspector); + } + + // I assume revision numbers are the same for changelog and manifest - here + // user would like to pass changelog revision numbers, and I use them directly to walk manifest. + // if this assumption is wrong, fix this (lookup manifest revisions from changeset). + public void walk(int rev1, int rev2, HgStatusInspector inspector) { + if (rev1 == rev2) { + throw new IllegalArgumentException(); + } + if (inspector == null) { + throw new IllegalArgumentException(); + } + if (inspector instanceof Record) { + ((Record) inspector).init(rev1, rev2, this); + } + if (rev1 == TIP) { + rev1 = repo.getManifest().getLastRevision(); + } + if (rev2 == TIP) { + rev2 = repo.getManifest().getLastRevision(); + } + // in fact, rev1 and rev2 are often next (or close) to each other, + // thus, we can optimize Manifest reads here (manifest.walk(rev1, rev2)) + ManifestRevisionInspector r1, r2 ; + if (!cache.containsKey(rev1) && !cache.containsKey(rev2) && Math.abs(rev1 - rev2) < 5 /*subjective equivalent of 'close enough'*/) { + int minRev = rev1 < rev2 ? rev1 : rev2; + int maxRev = minRev == rev1 ? rev2 : rev1; + if (minRev > 0) { + minRev--; // expand range a bit + // XXX perhaps, if revlog.baseRevision is cheap, shall expand minRev up to baseRevision + // which gonna be read anyway + } + + repo.getManifest().walk(minRev, maxRev, new HgManifest.Inspector() { + private ManifestRevisionInspector delegate; + + public boolean begin(int revision, Nodeid nid) { + cache.put(revision, delegate = new ManifestRevisionInspector()); + delegate.begin(revision, nid); + return true; + } + + public boolean next(Nodeid nid, String fname, String flags) { + delegate.next(nid, fname, flags); + return true; + } + + public boolean end(int revision) { + delegate.end(revision); + delegate = null; + return true; + } + }); + } + r1 = get(rev1); + r2 = get(rev2); + + PathPool pp = getPathPool(); + + TreeSet<String> r1Files = new TreeSet<String>(r1.files()); + for (String fname : r2.files()) { + if (r1Files.remove(fname)) { + Nodeid nidR1 = r1.nodeid(fname); + Nodeid nidR2 = r2.nodeid(fname); + String flagsR1 = r1.flags(fname); + String flagsR2 = r2.flags(fname); + if (nidR1.equals(nidR2) && ((flagsR2 == null && flagsR1 == null) || flagsR2.equals(flagsR1))) { + inspector.clean(pp.path(fname)); + } else { + inspector.modified(pp.path(fname)); + } + } else { + try { + Path copyTarget = pp.path(fname); + Path copyOrigin = getOriginIfCopy(repo, copyTarget, r1Files, rev1); + if (copyOrigin != null) { + inspector.copied(pp.path(copyOrigin) /*pipe through pool, just in case*/, copyTarget); + } else { + inspector.added(copyTarget); + } + } catch (HgDataStreamException ex) { + ex.printStackTrace(); + // FIXME perhaps, shall record this exception to dedicated mediator and continue + // for a single file not to be irresolvable obstacle for a status operation + } + } + } + for (String left : r1Files) { + inspector.removed(pp.path(left)); + } + } + + public Record status(int rev1, int rev2) { + Record rv = new Record(); + walk(rev1, rev2, rv); + return rv; + } + + /*package-local*/static Path getOriginIfCopy(HgRepository hgRepo, Path fname, Collection<String> originals, int originalChangelogRevision) throws HgDataStreamException { + HgDataFile df = hgRepo.getFileNode(fname); + while (df.isCopy()) { + Path original = df.getCopySourceName(); + if (originals.contains(original.toString())) { + df = hgRepo.getFileNode(original); + int changelogRevision = df.getChangesetLocalRevision(0); + if (changelogRevision <= originalChangelogRevision) { + // copy/rename source was known prior to rev1 + // (both r1Files.contains is true and original was created earlier than rev1) + // without r1Files.contains changelogRevision <= rev1 won't suffice as the file + // might get removed somewhere in between (changelogRevision < R < rev1) + return original; + } + break; // copy/rename done later + } + df = hgRepo.getFileNode(original); // try more steps away + } + return null; + } + + // XXX for r1..r2 status, only modified, added, removed (and perhaps, clean) make sense + // XXX Need to specify whether copy targets are in added or not (@see Inspector#copied above) + public static class Record implements HgStatusInspector { + private List<Path> modified, added, removed, clean, missing, unknown, ignored; + private Map<Path, Path> copied; + + private int startRev, endRev; + private HgStatusCollector statusHelper; + + // XXX StatusCollector may additionally initialize Record instance to speed lookup of changed file revisions + // here I need access to ManifestRevisionInspector via #raw(). Perhaps, non-static class (to get + // implicit reference to StatusCollector) may be better? + // Since users may want to reuse Record instance we've once created (and initialized), we need to + // ensure functionality is correct for each/any call (#walk checks instanceof Record and fixes it up) + // Perhaps, distinct helper (sc.getRevisionHelper().nodeid(fname)) would be better, just not clear + // how to supply [start..end] values there easily + /*package-local*/void init(int startRevision, int endRevision, HgStatusCollector self) { + startRev = startRevision; + endRev = endRevision; + statusHelper = self; + } + + public Nodeid nodeidBeforeChange(Path fname) { + if (statusHelper == null || startRev == BAD_REVISION) { + return null; + } + if ((modified == null || !modified.contains(fname)) && (removed == null || !removed.contains(fname))) { + return null; + } + return statusHelper.raw(startRev).nodeid(fname.toString()); + } + public Nodeid nodeidAfterChange(Path fname) { + if (statusHelper == null || endRev == BAD_REVISION) { + return null; + } + if ((modified == null || !modified.contains(fname)) && (added == null || !added.contains(fname))) { + return null; + } + return statusHelper.raw(endRev).nodeid(fname.toString()); + } + + public List<Path> getModified() { + return proper(modified); + } + + public List<Path> getAdded() { + return proper(added); + } + + public List<Path> getRemoved() { + return proper(removed); + } + + public Map<Path,Path> getCopied() { + if (copied == null) { + return Collections.emptyMap(); + } + return Collections.unmodifiableMap(copied); + } + + public List<Path> getClean() { + return proper(clean); + } + + public List<Path> getMissing() { + return proper(missing); + } + + public List<Path> getUnknown() { + return proper(unknown); + } + + public List<Path> getIgnored() { + return proper(ignored); + } + + private List<Path> proper(List<Path> l) { + if (l == null) { + return Collections.emptyList(); + } + return Collections.unmodifiableList(l); + } + + // + // + + public void modified(Path fname) { + modified = doAdd(modified, fname); + } + + public void added(Path fname) { + added = doAdd(added, fname); + } + + public void copied(Path fnameOrigin, Path fnameAdded) { + if (copied == null) { + copied = new LinkedHashMap<Path, Path>(); + } + added(fnameAdded); + copied.put(fnameAdded, fnameOrigin); + } + + public void removed(Path fname) { + removed = doAdd(removed, fname); + } + + public void clean(Path fname) { + clean = doAdd(clean, fname); + } + + public void missing(Path fname) { + missing = doAdd(missing, fname); + } + + public void unknown(Path fname) { + unknown = doAdd(unknown, fname); + } + + public void ignored(Path fname) { + ignored = doAdd(ignored, fname); + } + + private static List<Path> doAdd(List<Path> l, Path p) { + if (l == null) { + l = new LinkedList<Path>(); + } + l.add(p); + return l; + } + } + + /*package-local*/ static final class ManifestRevisionInspector implements HgManifest.Inspector { + private final TreeMap<String, Nodeid> idsMap; + private final TreeMap<String, String> flagsMap; + + public ManifestRevisionInspector() { + idsMap = new TreeMap<String, Nodeid>(); + flagsMap = new TreeMap<String, String>(); + } + + public Collection<String> files() { + return idsMap.keySet(); + } + + public Nodeid nodeid(String fname) { + return idsMap.get(fname); + } + + public String flags(String fname) { + return flagsMap.get(fname); + } + + // + + public boolean next(Nodeid nid, String fname, String flags) { + idsMap.put(fname, nid); + flagsMap.put(fname, flags); + return true; + } + + public boolean end(int revision) { + // in fact, this class cares about single revision + return false; + } + + public boolean begin(int revision, Nodeid nid) { + return true; + } + } + +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgStatusInspector.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import org.tmatesoft.hg.util.Path; + +/** + * Callback to get file status information + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface HgStatusInspector { + void modified(Path fname); + void added(Path fname); + // XXX need to specify whether StatusCollector invokes added() along with copied or not! + void copied(Path fnameOrigin, Path fnameAdded); // if copied files of no interest, should delegate to self.added(fnameAdded); + void removed(Path fname); + void clean(Path fname); + void missing(Path fname); // aka deleted (tracked by Hg, but not available in FS any more + void unknown(Path fname); // not tracked + void ignored(Path fname); +} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgTags.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import org.tmatesoft.hg.core.Nodeid; + +/** + * @see http://mercurial.selenic.com/wiki/TagDesign + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgTags { + // global tags come from ".hgtags" + // local come from ".hg/localtags" + + private final Map<Nodeid, List<String>> globalToName; + private final Map<Nodeid, List<String>> localToName; + private final Map<String, List<Nodeid>> globalFromName; + private final Map<String, List<Nodeid>> localFromName; + + + /*package-local*/ HgTags() { + globalToName = new HashMap<Nodeid, List<String>>(); + localToName = new HashMap<Nodeid, List<String>>(); + globalFromName = new TreeMap<String, List<Nodeid>>(); + localFromName = new TreeMap<String, List<Nodeid>>(); + } + + /*package-local*/ void readLocal(File localTags) throws IOException { + if (localTags == null || localTags.isDirectory()) { + throw new IllegalArgumentException(String.valueOf(localTags)); + } + read(localTags, localToName, localFromName); + } + + /*package-local*/ void readGlobal(File globalTags) throws IOException { + if (globalTags == null || globalTags.isDirectory()) { + throw new IllegalArgumentException(String.valueOf(globalTags)); + } + read(globalTags, globalToName, globalFromName); + } + + private void read(File f, Map<Nodeid,List<String>> nid2name, Map<String, List<Nodeid>> name2nid) throws IOException { + if (!f.canRead()) { + return; + } + BufferedReader r = null; + try { + r = new BufferedReader(new FileReader(f)); + read(r, nid2name, name2nid); + } finally { + if (r != null) { + r.close(); + } + } + } + + private void read(BufferedReader reader, Map<Nodeid,List<String>> nid2name, Map<String, List<Nodeid>> name2nid) throws IOException { + String line; + while ((line = reader.readLine()) != null) { + line = line.trim(); + if (line.length() == 0) { + continue; + } + if (line.length() < 40+2 /*nodeid, space and at least single-char tagname*/) { + System.out.println("Bad tags line:" + line); // FIXME log or otherwise report (IStatus analog?) + continue; + } + int spacePos = line.indexOf(' '); + if (spacePos != -1) { + assert spacePos == 40; + final byte[] nodeidBytes = line.substring(0, spacePos).getBytes(); + Nodeid nid = Nodeid.fromAscii(nodeidBytes, 0, nodeidBytes.length); + String tagName = line.substring(spacePos+1); + List<Nodeid> nids = name2nid.get(tagName); + if (nids == null) { + nids = new LinkedList<Nodeid>(); + // tagName is substring of full line, thus need a copy to let the line be GC'ed + // new String(tagName.toCharArray()) is more expressive, but results in 1 extra arraycopy + tagName = new String(tagName); + name2nid.put(tagName, nids); + } + // XXX repo.getNodeidCache().nodeid(nid); + ((LinkedList<Nodeid>) nids).addFirst(nid); + List<String> revTags = nid2name.get(nid); + if (revTags == null) { + revTags = new LinkedList<String>(); + nid2name.put(nid, revTags); + } + revTags.add(tagName); + } else { + System.out.println("Bad tags line:" + line); // FIXME see above + } + } + } + + public List<String> tags(Nodeid nid) { + ArrayList<String> rv = new ArrayList<String>(5); + List<String> l; + if ((l = localToName.get(nid)) != null) { + rv.addAll(l); + } + if ((l = globalToName.get(nid)) != null) { + rv.addAll(l); + } + return rv; + } + + public boolean isTagged(Nodeid nid) { + return localToName.containsKey(nid) || globalToName.containsKey(nid); + } + + public List<Nodeid> tagged(String tagName) { + ArrayList<Nodeid> rv = new ArrayList<Nodeid>(5); + List<Nodeid> l; + if ((l = localFromName.get(tagName)) != null) { + rv.addAll(l); + } + if ((l = globalFromName.get(tagName)) != null) { + rv.addAll(l); + } + return rv; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/HgWorkingCopyStatusCollector.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,335 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import static java.lang.Math.max; +import static java.lang.Math.min; +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.util.Collections; +import java.util.Set; +import java.util.TreeSet; + +import org.tmatesoft.hg.core.HgDataStreamException; +import org.tmatesoft.hg.core.HgException; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.ByteArrayChannel; +import org.tmatesoft.hg.internal.FilterByteChannel; +import org.tmatesoft.hg.repo.HgStatusCollector.ManifestRevisionInspector; +import org.tmatesoft.hg.util.ByteChannel; +import org.tmatesoft.hg.util.CancelledException; +import org.tmatesoft.hg.util.FileIterator; +import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.PathPool; +import org.tmatesoft.hg.util.PathRewrite; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class HgWorkingCopyStatusCollector { + + private final HgRepository repo; + private final FileIterator repoWalker; + private HgDirstate dirstate; + private HgStatusCollector baseRevisionCollector; + private PathPool pathPool; + + public HgWorkingCopyStatusCollector(HgRepository hgRepo) { + this(hgRepo, hgRepo.createWorkingDirWalker()); + } + + HgWorkingCopyStatusCollector(HgRepository hgRepo, FileIterator hgRepoWalker) { + this.repo = hgRepo; + this.repoWalker = hgRepoWalker; + } + + /** + * Optionally, supply a collector instance that may cache (or have already cached) base revision + * @param sc may be null + */ + public void setBaseRevisionCollector(HgStatusCollector sc) { + baseRevisionCollector = sc; + } + + /*package-local*/ PathPool getPathPool() { + if (pathPool == null) { + if (baseRevisionCollector == null) { + pathPool = new PathPool(new PathRewrite.Empty()); + } else { + return baseRevisionCollector.getPathPool(); + } + } + return pathPool; + } + + public void setPathPool(PathPool pathPool) { + this.pathPool = pathPool; + } + + + private HgDirstate getDirstate() { + if (dirstate == null) { + dirstate = repo.loadDirstate(); + } + return dirstate; + } + + // may be invoked few times + public void walk(int baseRevision, HgStatusInspector inspector) { + final HgIgnore hgIgnore = repo.getIgnore(); + TreeSet<String> knownEntries = getDirstate().all(); + final boolean isTipBase; + if (baseRevision == TIP) { + baseRevision = repo.getManifest().getRevisionCount() - 1; + isTipBase = true; + } else { + isTipBase = baseRevision == repo.getManifest().getRevisionCount() - 1; + } + HgStatusCollector.ManifestRevisionInspector collect = null; + Set<String> baseRevFiles = Collections.emptySet(); + if (!isTipBase) { + if (baseRevisionCollector != null) { + collect = baseRevisionCollector.raw(baseRevision); + } else { + collect = new HgStatusCollector.ManifestRevisionInspector(); + repo.getManifest().walk(baseRevision, baseRevision, collect); + } + baseRevFiles = new TreeSet<String>(collect.files()); + } + if (inspector instanceof HgStatusCollector.Record) { + HgStatusCollector sc = baseRevisionCollector == null ? new HgStatusCollector(repo) : baseRevisionCollector; + ((HgStatusCollector.Record) inspector).init(baseRevision, BAD_REVISION, sc); + } + repoWalker.reset(); + final PathPool pp = getPathPool(); + while (repoWalker.hasNext()) { + repoWalker.next(); + Path fname = repoWalker.name(); + File f = repoWalker.file(); + if (hgIgnore.isIgnored(fname)) { + inspector.ignored(pp.path(fname)); + } else if (knownEntries.remove(fname.toString())) { + // modified, added, removed, clean + if (collect != null) { // need to check against base revision, not FS file + checkLocalStatusAgainstBaseRevision(baseRevFiles, collect, baseRevision, fname, f, inspector); + baseRevFiles.remove(fname.toString()); + } else { + checkLocalStatusAgainstFile(fname, f, inspector); + } + } else { + inspector.unknown(pp.path(fname)); + } + } + if (collect != null) { + for (String r : baseRevFiles) { + inspector.removed(pp.path(r)); + } + } + for (String m : knownEntries) { + // missing known file from a working dir + if (getDirstate().checkRemoved(m) == null) { + // not removed from the repository = 'deleted' + inspector.missing(pp.path(m)); + } else { + // removed from the repo + // if we check against non-tip revision, do not report files that were added past that revision and now removed. + if (collect == null || baseRevFiles.contains(m)) { + inspector.removed(pp.path(m)); + } + } + } + } + + public HgStatusCollector.Record status(int baseRevision) { + HgStatusCollector.Record rv = new HgStatusCollector.Record(); + walk(baseRevision, rv); + return rv; + } + + //******************************************** + + + private void checkLocalStatusAgainstFile(Path fname, File f, HgStatusInspector inspector) { + HgDirstate.Record r; + if ((r = getDirstate().checkNormal(fname)) != null) { + // either clean or modified + if (f.lastModified() / 1000 == r.time && r.size == f.length()) { + inspector.clean(getPathPool().path(fname)); + } else { + // check actual content to avoid false modified files + HgDataFile df = repo.getFileNode(fname); + if (!areTheSame(f, df, HgRepository.TIP)) { + inspector.modified(df.getPath()); + } + } + } else if ((r = getDirstate().checkAdded(fname)) != null) { + if (r.name2 == null) { + inspector.added(getPathPool().path(fname)); + } else { + inspector.copied(getPathPool().path(r.name2), getPathPool().path(fname)); + } + } else if ((r = getDirstate().checkRemoved(fname)) != null) { + inspector.removed(getPathPool().path(fname)); + } else if ((r = getDirstate().checkMerged(fname)) != null) { + inspector.modified(getPathPool().path(fname)); + } + } + + // XXX refactor checkLocalStatus methods in more OO way + private void checkLocalStatusAgainstBaseRevision(Set<String> baseRevNames, ManifestRevisionInspector collect, int baseRevision, Path fname, File f, HgStatusInspector inspector) { + // fname is in the dirstate, either Normal, Added, Removed or Merged + Nodeid nid1 = collect.nodeid(fname.toString()); + String flags = collect.flags(fname.toString()); + HgDirstate.Record r; + if (nid1 == null) { + // normal: added? + // added: not known at the time of baseRevision, shall report + // merged: was not known, report as added? + if ((r = getDirstate().checkNormal(fname)) != null) { + try { + Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, baseRevNames, baseRevision); + if (origin != null) { + inspector.copied(getPathPool().path(origin), getPathPool().path(fname)); + return; + } + } catch (HgDataStreamException ex) { + ex.printStackTrace(); + // FIXME report to a mediator, continue status collection + } + } else if ((r = getDirstate().checkAdded(fname)) != null) { + if (r.name2 != null && baseRevNames.contains(r.name2)) { + baseRevNames.remove(r.name2); // XXX surely I shall not report rename source as Removed? + inspector.copied(getPathPool().path(r.name2), getPathPool().path(fname)); + return; + } + // fall-through, report as added + } else if (getDirstate().checkRemoved(fname) != null) { + // removed: removed file was not known at the time of baseRevision, and we should not report it as removed + return; + } + inspector.added(getPathPool().path(fname)); + } else { + // was known; check whether clean or modified + // when added - seems to be the case of a file added once again, hence need to check if content is different + if ((r = getDirstate().checkNormal(fname)) != null || (r = getDirstate().checkMerged(fname)) != null || (r = getDirstate().checkAdded(fname)) != null) { + // either clean or modified + HgDataFile fileNode = repo.getFileNode(fname); + final int lengthAtRevision = fileNode.length(nid1); + if (r.size /* XXX File.length() ?! */ != lengthAtRevision || flags != todoGenerateFlags(fname /*java.io.File*/)) { + inspector.modified(getPathPool().path(fname)); + } else { + // check actual content to see actual changes + if (areTheSame(f, fileNode, fileNode.getLocalRevision(nid1))) { + inspector.clean(getPathPool().path(fname)); + } else { + inspector.modified(getPathPool().path(fname)); + } + } + } + // only those left in idsMap after processing are reported as removed + } + + // TODO think over if content comparison may be done more effectively by e.g. calculating nodeid for a local file and comparing it with nodeid from manifest + // we don't need to tell exact difference, hash should be enough to detect difference, and it doesn't involve reading historical file content, and it's relatively + // cheap to calc hash on a file (no need to keep it completely in memory). OTOH, if I'm right that the next approach is used for nodeids: + // changeset nodeid + hash(actual content) => entry (Nodeid) in the next Manifest + // then it's sufficient to check parents from dirstate, and if they do not match parents from file's baseRevision (non matching parents means different nodeids). + // The question is whether original Hg treats this case (same content, different parents and hence nodeids) as 'modified' or 'clean' + } + + private boolean areTheSame(File f, HgDataFile dataFile, int localRevision) { + // XXX consider adding HgDataDile.compare(File/byte[]/whatever) operation to optimize comparison + ByteArrayChannel bac = new ByteArrayChannel(); + boolean ioFailed = false; + try { + // need content with metadata striped off - although theoretically chances are metadata may be different, + // WC doesn't have it anyway + dataFile.content(localRevision, bac); + } catch (CancelledException ex) { + // silently ignore - can't happen, ByteArrayChannel is not cancellable + } catch (IOException ex) { + ioFailed = true; + } catch (HgException ex) { + ioFailed = true; + } + return !ioFailed && areTheSame(f, bac.toArray(), dataFile.getPath()); + } + + private boolean areTheSame(File f, final byte[] data, Path p) { + FileInputStream fis = null; + try { + try { + fis = new FileInputStream(f); + FileChannel fc = fis.getChannel(); + ByteBuffer fb = ByteBuffer.allocate(min(data.length, 8192)); + final boolean[] checkValue = new boolean[] { true }; + ByteChannel check = new ByteChannel() { + int x = 0; + final boolean debug = false; // XXX may want to add global variable to allow clients to turn + public int write(ByteBuffer buffer) { + for (int i = buffer.remaining(); i > 0; i--, x++) { + if (data[x] != buffer.get()) { + if (debug) { + byte[] xx = new byte[15]; + if (buffer.position() > 5) { + buffer.position(buffer.position() - 5); + } + buffer.get(xx); + System.out.print("expected >>" + new String(data, max(0, x - 4), 20) + "<< but got >>"); + System.out.println(new String(xx) + "<<"); + } + checkValue[0] = false; + break; + } + } + buffer.position(buffer.limit()); // mark as read + return buffer.limit(); + } + }; + FilterByteChannel filters = new FilterByteChannel(check, repo.getFiltersFromWorkingDirToRepo(p)); + while (fc.read(fb) != -1 && checkValue[0]) { + fb.flip(); + filters.write(fb); + fb.compact(); + } + return checkValue[0]; + } catch (IOException ex) { + if (fis != null) { + fis.close(); + } + ex.printStackTrace(); // log warn + } + } catch (/*TODO typed*/Exception ex) { + ex.printStackTrace(); + } + return false; + } + + private static String todoGenerateFlags(Path fname) { + // FIXME implement + return null; + } + +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/Revlog.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,358 @@ +/* + * Copyright (c) 2010-2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.repo; + +import static org.tmatesoft.hg.repo.HgRepository.BAD_REVISION; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import org.tmatesoft.hg.core.HgBadStateException; +import org.tmatesoft.hg.core.HgException; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.internal.DataAccess; +import org.tmatesoft.hg.internal.RevlogStream; +import org.tmatesoft.hg.util.ByteChannel; +import org.tmatesoft.hg.util.CancelSupport; +import org.tmatesoft.hg.util.CancelledException; +import org.tmatesoft.hg.util.ProgressSupport; + + +/** + * Base class for all Mercurial entities that are serialized in a so called revlog format (changelog, manifest, data files). + * + * Implementation note: + * Hides actual actual revlog stream implementation and its access methods (i.e. RevlogStream.Inspector), iow shall not expose anything internal + * in public methods. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +abstract class Revlog { + + private final HgRepository repo; + protected final RevlogStream content; + + protected Revlog(HgRepository hgRepo, RevlogStream contentStream) { + if (hgRepo == null) { + throw new IllegalArgumentException(); + } + if (contentStream == null) { + throw new IllegalArgumentException(); + } + repo = hgRepo; + content = contentStream; + } + + // invalid Revlog + protected Revlog(HgRepository hgRepo) { + repo = hgRepo; + content = null; + } + + public final HgRepository getRepo() { + return repo; + } + + public final int getRevisionCount() { + return content.revisionCount(); + } + + public final int getLastRevision() { + return content.revisionCount() - 1; + } + + public final Nodeid getRevision(int revision) { + // XXX cache nodeids? + return Nodeid.fromBinary(content.nodeid(revision), 0); + } + + public final int getLocalRevision(Nodeid nid) { + int revision = content.findLocalRevisionNumber(nid); + if (revision == BAD_REVISION) { + throw new IllegalArgumentException(String.format("%s doesn't represent a revision of %s", nid.toString(), this /*XXX HgDataFile.getPath might be more suitable here*/)); + } + return revision; + } + + // Till now, i follow approach that NULL nodeid is never part of revlog + public final boolean isKnown(Nodeid nodeid) { + final int rn = content.findLocalRevisionNumber(nodeid); + if (Integer.MIN_VALUE == rn) { + return false; + } + if (rn < 0 || rn >= content.revisionCount()) { + // Sanity check + throw new IllegalStateException(); + } + return true; + } + + /** + * Access to revision data as is (decompressed, but otherwise unprocessed, i.e. not parsed for e.g. changeset or manifest entries) + * @param nodeid + */ + protected void rawContent(Nodeid nodeid, ByteChannel sink) throws HgException, IOException, CancelledException { + rawContent(getLocalRevision(nodeid), sink); + } + + /** + * @param revision - repo-local index of this file change (not a changelog revision number!) + */ + protected void rawContent(int revision, ByteChannel sink) throws HgException, IOException, CancelledException { + if (sink == null) { + throw new IllegalArgumentException(); + } + ContentPipe insp = new ContentPipe(sink, 0); + insp.checkCancelled(); + content.iterate(revision, revision, true, insp); + insp.checkFailed(); + } + + /** + * XXX perhaps, return value Nodeid[2] and boolean needNodeids is better (and higher level) API for this query? + * + * @param revision - revision to query parents, or {@link HgRepository#TIP} + * @param parentRevisions - int[2] to get local revision numbers of parents (e.g. {6, -1}) + * @param parent1 - byte[20] or null, if parent's nodeid is not needed + * @param parent2 - byte[20] or null, if second parent's nodeid is not needed + * @return + */ + public void parents(int revision, int[] parentRevisions, byte[] parent1, byte[] parent2) { + if (revision != TIP && !(revision >= 0 && revision < content.revisionCount())) { + throw new IllegalArgumentException(String.valueOf(revision)); + } + if (parentRevisions == null || parentRevisions.length < 2) { + throw new IllegalArgumentException(String.valueOf(parentRevisions)); + } + if (parent1 != null && parent1.length < 20) { + throw new IllegalArgumentException(parent1.toString()); + } + if (parent2 != null && parent2.length < 20) { + throw new IllegalArgumentException(parent2.toString()); + } + class ParentCollector implements RevlogStream.Inspector { + public int p1 = -1; + public int p2 = -1; + public byte[] nodeid; + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { + p1 = parent1Revision; + p2 = parent2Revision; + this.nodeid = new byte[20]; + // nodeid arg now comes in 32 byte from (as in file format description), however upper 12 bytes are zeros. + System.arraycopy(nodeid, nodeid.length > 20 ? nodeid.length - 20 : 0, this.nodeid, 0, 20); + } + }; + ParentCollector pc = new ParentCollector(); + content.iterate(revision, revision, false, pc); + parentRevisions[0] = pc.p1; + parentRevisions[1] = pc.p2; + if (parent1 != null) { + if (parentRevisions[0] == -1) { + Arrays.fill(parent1, 0, 20, (byte) 0); + } else { + content.iterate(parentRevisions[0], parentRevisions[0], false, pc); + System.arraycopy(pc.nodeid, 0, parent1, 0, 20); + } + } + if (parent2 != null) { + if (parentRevisions[1] == -1) { + Arrays.fill(parent2, 0, 20, (byte) 0); + } else { + content.iterate(parentRevisions[1], parentRevisions[1], false, pc); + System.arraycopy(pc.nodeid, 0, parent2, 0, 20); + } + } + } + + /* + * XXX think over if it's better to do either: + * pw = getChangelog().new ParentWalker(); pw.init() and pass pw instance around as needed + * or + * add Revlog#getParentWalker(), static class, make cons() and #init package-local, and keep SoftReference to allow walker reuse. + * + * and yes, walker is not a proper name + */ + public final class ParentWalker { + private Map<Nodeid, Nodeid> firstParent; + private Map<Nodeid, Nodeid> secondParent; + private Set<Nodeid> allNodes; + + public ParentWalker() { + firstParent = secondParent = Collections.emptyMap(); + allNodes = Collections.emptySet(); + } + + public void init() { + final RevlogStream stream = Revlog.this.content; + final int revisionCount = stream.revisionCount(); + firstParent = new HashMap<Nodeid, Nodeid>(revisionCount); + secondParent = new HashMap<Nodeid, Nodeid>(firstParent.size() >> 1); // assume branches/merges are less frequent + allNodes = new LinkedHashSet<Nodeid>(); + + RevlogStream.Inspector insp = new RevlogStream.Inspector() { + final Nodeid[] sequentialRevisionNodeids = new Nodeid[revisionCount]; + int ix = 0; + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { + if (ix != revisionNumber) { + // XXX temp code, just to make sure I understand what's going on here + throw new IllegalStateException(); + } + if (parent1Revision >= revisionNumber || parent2Revision >= revisionNumber) { + throw new IllegalStateException(); // sanity, revisions are sequential + } + final Nodeid nid = new Nodeid(nodeid, true); + sequentialRevisionNodeids[ix++] = nid; + allNodes.add(nid); + if (parent1Revision != -1) { + firstParent.put(nid, sequentialRevisionNodeids[parent1Revision]); + if (parent2Revision != -1) { + secondParent.put(nid, sequentialRevisionNodeids[parent2Revision]); + } + } + } + }; + stream.iterate(0, -1, false, insp); + } + + public Set<Nodeid> allNodes() { + return Collections.unmodifiableSet(allNodes); + } + + // FIXME need to decide whether Nodeid(00 * 20) is always known or not + public boolean knownNode(Nodeid nid) { + return allNodes.contains(nid); + } + + // null if none + public Nodeid firstParent(Nodeid nid) { + return firstParent.get(nid); + } + + // never null, Nodeid.NULL if none known + public Nodeid safeFirstParent(Nodeid nid) { + Nodeid rv = firstParent(nid); + return rv == null ? Nodeid.NULL : rv; + } + + public Nodeid secondParent(Nodeid nid) { + return secondParent.get(nid); + } + + public Nodeid safeSecondParent(Nodeid nid) { + Nodeid rv = secondParent(nid); + return rv == null ? Nodeid.NULL : rv; + } + + public boolean appendParentsOf(Nodeid nid, Collection<Nodeid> c) { + Nodeid p1 = firstParent(nid); + boolean modified = false; + if (p1 != null) { + modified = c.add(p1); + Nodeid p2 = secondParent(nid); + if (p2 != null) { + modified = c.add(p2) || modified; + } + } + return modified; + } + } + + protected static class ContentPipe implements RevlogStream.Inspector, CancelSupport { + private final ByteChannel sink; + private final CancelSupport cancelSupport; + private Exception failure; + private final int offset; + + /** + * @param _sink - cannot be <code>null</code> + * @param seekOffset - when positive, orders to pipe bytes to the sink starting from specified offset, not from the first byte available in DataAccess + */ + public ContentPipe(ByteChannel _sink, int seekOffset) { + assert _sink != null; + sink = _sink; + cancelSupport = CancelSupport.Factory.get(_sink); + offset = seekOffset; + } + + protected void prepare(int revisionNumber, DataAccess da) throws HgException, IOException { + if (offset > 0) { // save few useless reset/rewind operations + da.seek(offset); + } + } + + public void next(int revisionNumber, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess da) { + try { + prepare(revisionNumber, da); // XXX perhaps, prepare shall return DA (sliced, if needed) + final ProgressSupport progressSupport = ProgressSupport.Factory.get(sink); + ByteBuffer buf = ByteBuffer.allocate(512); + progressSupport.start(da.length()); + while (!da.isEmpty()) { + cancelSupport.checkCancelled(); + da.readBytes(buf); + buf.flip(); + // XXX I may not rely on returned number of bytes but track change in buf position instead. + int consumed = sink.write(buf); + // FIXME in fact, bad sink implementation (that consumes no bytes) would result in endless loop. Need to account for this + buf.compact(); + progressSupport.worked(consumed); + } + progressSupport.done(); // XXX shall specify whether #done() is invoked always or only if completed successfully. + } catch (IOException ex) { + recordFailure(ex); + } catch (CancelledException ex) { + recordFailure(ex); + } catch (HgException ex) { + recordFailure(ex); + } + } + + public void checkCancelled() throws CancelledException { + cancelSupport.checkCancelled(); + } + + protected void recordFailure(Exception ex) { + assert failure == null; + failure = ex; + } + + public void checkFailed() throws HgException, IOException, CancelledException { + if (failure == null) { + return; + } + if (failure instanceof IOException) { + throw (IOException) failure; + } + if (failure instanceof CancelledException) { + throw (CancelledException) failure; + } + if (failure instanceof HgException) { + throw (HgException) failure; + } + throw new HgBadStateException(failure); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/repo/package.html Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,5 @@ +<html> +<boody> +Low-level API operations +</bidy> +</html> \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/Adaptable.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +/** + * Extension mechanism. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface Adaptable { + + <T> T getAdapter(Class<T> adapterClass); +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/ByteChannel.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Much like {@link java.nio.channels.WritableByteChannel} except for thrown exception + * + * XXX Perhaps, we'll add CharChannel in the future to deal with character conversions/encodings + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface ByteChannel { + // XXX does int return value makes any sense given buffer keeps its read state + // not clear what retvalue should be in case some filtering happened inside write - i.e. return + // number of bytes consumed in + int write(ByteBuffer buffer) throws IOException, CancelledException; +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/CancelSupport.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +/** + * Mix-in for objects that support cancellation. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface CancelSupport { + + /** + * This method is invoked to check if target had been brought to canceled state. Shall silently return if target is + * in regular state. + * @throws CancelledException when target internal state has been changed to canceled. + */ + void checkCancelled() throws CancelledException; + + + // Yeah, this factory class looks silly now, but perhaps in the future I'll need wrappers for other cancellation sources? + // just don't want to have general Utils class with methods like get() below + static class Factory { + + /** + * Obtain non-null cancel support object. + * + * @param target any object (or <code>null</code>) that might have cancel support + * @return target if it's capable checking cancellation status or no-op implementation that never cancels. + */ + public static CancelSupport get(Object target) { + if (target instanceof CancelSupport) { + return (CancelSupport) target; + } + if (target instanceof Adaptable) { + CancelSupport cs = ((Adaptable) target).getAdapter(CancelSupport.class); + if (cs != null) { + return cs; + } + } + return new CancelSupport() { + public void checkCancelled() { + } + }; + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/CancelledException.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +@SuppressWarnings("serial") +public class CancelledException extends Exception { + + public CancelledException() { + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/FileIterator.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +import java.io.File; + +/** + * Abstracts iteration over file system. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface FileIterator { + + /** + * Brings iterator into initial state to facilitate new use. + */ + void reset(); + + /** + * @return whether can shift to next element + */ + boolean hasNext(); + + /** + * Shift to next element + */ + void next(); + + /** + * @return repository-local path to the current element. + */ + Path name(); + + /** + * @return filesystem element. + */ + File file(); +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/FileWalker.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +import java.io.File; +import java.util.LinkedList; +import java.util.NoSuchElementException; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class FileWalker implements FileIterator { + + private final File startDir; + private final Path.Source pathHelper; + private final LinkedList<File> dirQueue; + private final LinkedList<File> fileQueue; + private File nextFile; + private Path nextPath; + + public FileWalker(File dir, Path.Source pathFactory) { + startDir = dir; + pathHelper = pathFactory; + dirQueue = new LinkedList<File>(); + fileQueue = new LinkedList<File>(); + reset(); + } + + public void reset() { + fileQueue.clear(); + dirQueue.clear(); + dirQueue.add(startDir); + nextFile = null; + nextPath = null; + } + + public boolean hasNext() { + return fill(); + } + + public void next() { + if (!fill()) { + throw new NoSuchElementException(); + } + nextFile = fileQueue.removeFirst(); + nextPath = pathHelper.path(nextFile.getPath()); + } + + public Path name() { + return nextPath; + } + + public File file() { + return nextFile; + } + + private File[] listFiles(File f) { + // in case we need to solve os-related file issues (mac with some encodings?) + return f.listFiles(); + } + + // return true when fill added any elements to fileQueue. + private boolean fill() { + while (fileQueue.isEmpty()) { + if (dirQueue.isEmpty()) { + return false; + } + while (!dirQueue.isEmpty()) { + File dir = dirQueue.removeFirst(); + for (File f : listFiles(dir)) { + if (f.isDirectory()) { + if (!".hg".equals(f.getName())) { + dirQueue.addLast(f); + } + } else { + fileQueue.addLast(f); + } + } + break; + } + } + return !fileQueue.isEmpty(); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/Path.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +/** + * Identify repository files (not String nor io.File). Convenient for pattern matching. Memory-friendly. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public final class Path implements CharSequence, Comparable<Path>/*Cloneable? - although clone for paths make no sense*/{ +// private String[] segments; +// private int flags; // dir, unparsed + private String path; + + /*package-local*/Path(String p) { + path = p; + } + + /** + * Check if this is directory's path. + * Note, this method doesn't perform any file system operation. + * + * @return true when this path points to a directory + */ + public boolean isDirectory() { + // XXX simple logic for now. Later we may decide to have an explicit factory method to create directory paths + return path.charAt(path.length() - 1) == '/'; + } + + public int length() { + return path.length(); + } + + public char charAt(int index) { + return path.charAt(index); + } + + public CharSequence subSequence(int start, int end) { + // new Path if start-end matches boundaries of any subpath + return path.substring(start, end); + } + + @Override + public String toString() { + return path; // CharSequence demands toString() impl + } + + public int compareTo(Path o) { + return path.compareTo(o.path); + } + + @Override + public boolean equals(Object obj) { + if (obj != null && getClass() == obj.getClass()) { + return this == obj || path.equals(((Path) obj).path); + } + return false; + } + @Override + public int hashCode() { + return path.hashCode(); + } + + public static Path create(String path) { + if (path == null) { + throw new IllegalArgumentException(); + } + if (path.indexOf('\\') != -1) { + throw new IllegalArgumentException(); + } + Path rv = new Path(path); + return rv; + } + + /** + * Path filter. + */ + public interface Matcher { + boolean accept(Path path); + } + + /** + * Factory for paths + */ + public interface Source { + Path path(String p); + } + + /** + * Straightforward {@link Source} implementation that creates new Path instance for each supplied string + */ + public static class SimpleSource implements Source { + private final PathRewrite normalizer; + + public SimpleSource(PathRewrite pathRewrite) { + if (pathRewrite == null) { + throw new IllegalArgumentException(); + } + normalizer = pathRewrite; + } + + public Path path(String p) { + return Path.create(normalizer.rewrite(p)); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/PathPool.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +import java.lang.ref.SoftReference; +import java.util.WeakHashMap; + + +/** + * Produces path from strings and caches result for reuse + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class PathPool implements Path.Source { + private final WeakHashMap<String, SoftReference<Path>> cache; + private final PathRewrite pathRewrite; + + public PathPool(PathRewrite rewrite) { + pathRewrite = rewrite; + cache = new WeakHashMap<String, SoftReference<Path>>(); + } + + public Path path(String p) { + p = pathRewrite.rewrite(p); + return get(p, true); + } + + // pipes path object through cache to reuse instance, if possible + public Path path(Path p) { + String s = pathRewrite.rewrite(p.toString()); + Path cached = get(s, false); + if (cached == null) { + cache.put(s, new SoftReference<Path>(cached = p)); + } + return cached; + } + + // XXX what would be parent of an empty path? + // Path shall have similar functionality + public Path parent(Path path) { + if (path.length() == 0) { + throw new IllegalArgumentException(); + } + for (int i = path.length() - 2 /*if path represents a dir, trailing char is slash, skip*/; i >= 0; i--) { + if (path.charAt(i) == '/') { + return get(path.subSequence(0, i+1).toString(), true); + } + } + return get("", true); + } + + private Path get(String p, boolean create) { + SoftReference<Path> sr = cache.get(p); + Path path = sr == null ? null : sr.get(); + if (path == null) { + if (create) { + path = Path.create(p); + cache.put(p, new SoftReference<Path>(path)); + } else if (sr != null) { + // cached path no longer used, clear cache entry - do not wait for RefQueue to step in + cache.remove(p); + } + } + return path; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/PathRewrite.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +import java.util.LinkedList; +import java.util.List; + +/** + * File names often need transformations, like Windows-style path to Unix or human-readable data file name to storage location. + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface PathRewrite { + + // XXX think over CharSequence use instead of String + public String rewrite(String path); + + public static class Empty implements PathRewrite { + public String rewrite(String path) { + return path; + } + } + + public class Composite implements PathRewrite { + private List<PathRewrite> chain; + + public Composite(PathRewrite... e) { + LinkedList<PathRewrite> r = new LinkedList<PathRewrite>(); + for (int i = 0; e != null && i < e.length; i++) { + r.addLast(e[i]); + } + chain = r; + } + public Composite chain(PathRewrite e) { + chain.add(e); + return this; + } + + public String rewrite(String path) { + for (PathRewrite pr : chain) { + path = pr.rewrite(path); + } + return path; + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/org/tmatesoft/hg/util/ProgressSupport.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.util; + +/** + * Mix-in to report progress of a long-running operation + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface ProgressSupport { + + public void start(long totalUnits); + public void worked(int units); + public void done(); + + static class Factory { + + /** + * @param target object that might be capable to report progress. Can be <code>null</code> + * @return support object extracted from target or an empty, no-op implementation + */ + public static ProgressSupport get(Object target) { + if (target instanceof ProgressSupport) { + return (ProgressSupport) target; + } + if (target instanceof Adaptable) { + ProgressSupport ps = ((Adaptable) target).getAdapter(ProgressSupport.class); + if (ps != null) { + return ps; + } + } + return new ProgressSupport() { + public void start(long totalUnits) { + } + public void worked(int units) { + } + public void done() { + } + }; + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/Configuration.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import static org.junit.Assert.*; + +import java.io.File; + +import org.tmatesoft.hg.repo.HgLookup; +import org.tmatesoft.hg.repo.HgRepository; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class Configuration { + + private static Configuration inst; + private final File root; + private final HgLookup lookup; + + private Configuration(File reposRoot) { + root = reposRoot; + lookup = new HgLookup(); + } + + public static Configuration get() { + if (inst == null) { + String repo2 = System.getProperty("hg4j.tests.repos"); + assertNotNull(repo2); + File rr = new File(repo2); + assertTrue(rr.exists()); + inst = new Configuration(rr); + } + return inst; + } + + public HgRepository own() throws Exception { + return lookup.detectFromWorkingDir(); + } + + // fails if repo not found + public HgRepository find(String key) throws Exception { + HgRepository rv = lookup.detect(new File(root, key)); + assertNotNull(rv); + assertFalse(rv.isInvalid()); + return rv; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/ErrorCollectorExt.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import static org.junit.Assert.assertThat; + +import java.util.concurrent.Callable; + +import org.hamcrest.Matcher; +import org.junit.rules.ErrorCollector; + +/** + * Expose verify method for allow not-junit runs to check test outcome + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +final class ErrorCollectorExt extends ErrorCollector { + public void verify() throws Throwable { + super.verify(); + } + + public <T> void checkThat(final String reason, final T value, final Matcher<T> matcher) { + checkSucceeds(new Callable<Object>() { + public Object call() throws Exception { + assertThat(reason, value, matcher); + return value; + } + }); + } +} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/ExecHelper.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import java.io.File; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.CharBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.StringTokenizer; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class ExecHelper { + + private final OutputParser parser; + private final File dir; + + public ExecHelper(OutputParser outParser, File workingDir) { + parser = outParser; + dir = workingDir; + } + + public void run(String... cmd) throws IOException, InterruptedException { + ProcessBuilder pb = null; + if (System.getProperty("os.name").startsWith("Windows")) { + StringTokenizer st = new StringTokenizer(System.getenv("PATH"), ";"); + while (st.hasMoreTokens()) { + File pe = new File(st.nextToken()); + if (new File(pe, cmd[0] + ".exe").exists()) { + break; + } + // PATHEXT controls precedence of .exe, .bat and .cmd files, ususlly .exe wins + if (new File(pe, cmd[0] + ".bat").exists() || new File(pe, cmd[0] + ".cmd").exists()) { + ArrayList<String> command = new ArrayList<String>(); + command.add("cmd.exe"); + command.add("/C"); + command.addAll(Arrays.asList(cmd)); + pb = new ProcessBuilder(command); + break; + } + } + } + if (pb == null) { + pb = new ProcessBuilder(cmd); + } + Process p = pb.directory(dir).redirectErrorStream(true).start(); + InputStreamReader stdOut = new InputStreamReader(p.getInputStream()); + LinkedList<CharBuffer> l = new LinkedList<CharBuffer>(); + int r = -1; + CharBuffer b = null; + do { + if (b == null || b.remaining() < b.capacity() / 3) { + b = CharBuffer.allocate(512); + l.add(b); + } + r = stdOut.read(b); + } while (r != -1); + int total = 0; + for (CharBuffer cb : l) { + total += cb.position(); + cb.flip(); + } + CharBuffer res = CharBuffer.allocate(total); + for (CharBuffer cb : l) { + res.put(cb); + } + res.flip(); + p.waitFor(); + parser.parse(res); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/LogOutputParser.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import java.util.LinkedList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.tmatesoft.hg.repo.HgRepository; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class LogOutputParser implements OutputParser { + private final List<Record> result = new LinkedList<Record>(); + private Pattern pattern1; + private Pattern pattern2; + private Pattern pattern3; + private Pattern pattern4; + private Pattern pattern5; + + public LogOutputParser(boolean outputWithDebug) { + if (outputWithDebug) { + pattern1 = Pattern.compile("^changeset:\\s+(\\d+):([a-f0-9]{40})\n(^tag:(.+)$)?", Pattern.MULTILINE); + pattern2 = Pattern.compile("^parent:\\s+(-?\\d+):([a-f0-9]{40})\n", Pattern.MULTILINE); + pattern3 = Pattern.compile("^manifest:\\s+(\\d+):([a-f0-9]{40})\nuser:\\s+(\\S.+)\ndate:\\s+(\\S.+)\n", Pattern.MULTILINE); + pattern4 = Pattern.compile("^description:\\n", Pattern.MULTILINE); + pattern5 = Pattern.compile("\\n\\n"); + //p = "^manifest:\\s+(\\d+):([a-f0-9]{40})\nuser:(.+)$"; + } else { + throw HgRepository.notImplemented(); + } + } + + public void reset() { + result.clear(); + } + + public List<Record> getResult() { + return result; + } + + public void parse(CharSequence seq) { + Matcher m = pattern1.matcher(seq); + while (m.find()) { + Record r = new Record(); + r.changesetIndex = Integer.parseInt(m.group(1)); + r.changesetNodeid = m.group(2); + //tags = m.group(4); + m.usePattern(pattern2); + if (m.find()) { + r.parent1Index = Integer.parseInt(m.group(1)); + r.parent1Nodeid = m.group(2); + } + if (m.find()) { + r.parent2Index = Integer.parseInt(m.group(1)); + r.parent2Nodeid = m.group(2); + } + m.usePattern(pattern3); + if (m.find()) { + r.user = m.group(3); + r.date = m.group(4); + } + m.usePattern(pattern4); + if (m.find()) { + int commentStart = m.end(); + m.usePattern(pattern5); + if (m.find()) { + r.description = seq.subSequence(commentStart, m.start()).toString(); + } + } + result.add(r); + m.usePattern(pattern1); + } + } + + public static class Record { + public int changesetIndex; + public String changesetNodeid; + public int parent1Index; + public int parent2Index; + public String parent1Nodeid; + public String parent2Nodeid; + public String user; + public String date; + public String description; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/ManifestOutputParser.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.util.Path; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class ManifestOutputParser implements OutputParser { + + private final Pattern pattern; + private final LinkedHashMap<Path, Nodeid> result = new LinkedHashMap<Path, Nodeid>(); + + public ManifestOutputParser() { + pattern = Pattern.compile("^([a-f0-9]{40}) (\\d{3}) (.+)$", Pattern.MULTILINE); + } + + public void reset() { + result.clear(); + } + + public Map<Path, Nodeid> getResult() { + return result; + } + + public void parse(CharSequence seq) { + Matcher m = pattern.matcher(seq); + while (m.find()) { + result.put(Path.create(m.group(3)), Nodeid.fromAscii(m.group(1).getBytes(), 0, 40)); + } + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/OutputParser.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public interface OutputParser { + + public void parse(CharSequence seq); +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/StatusOutputParser.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import java.io.File; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.tmatesoft.hg.repo.HgStatusCollector; +import org.tmatesoft.hg.util.Path; +import org.tmatesoft.hg.util.PathPool; +import org.tmatesoft.hg.util.PathRewrite; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class StatusOutputParser implements OutputParser { + + private final Pattern pattern; + // although using StatusCollector.Record is not really quite honest for testing, + // it's deemed acceptable as long as that class is primitive 'collect all results' + private HgStatusCollector.Record result = new HgStatusCollector.Record(); + private final PathPool pathHelper; + + public StatusOutputParser() { +// pattern = Pattern.compile("^([MAR?IC! ]) ([\\w \\.-/\\\\]+)$", Pattern.MULTILINE); + pattern = Pattern.compile("^([MAR?IC! ]) (.+)$", Pattern.MULTILINE); + pathHelper = new PathPool(new PathRewrite() { + + private final boolean winPathSeparator = File.separatorChar == '\\'; + + public String rewrite(String s) { + if (winPathSeparator) { + // Java impl always give slashed path, while Hg uses local, os-specific convention + s = s.replace('\\', '/'); + } + return s; + } + }); + } + + public void reset() { + result = new HgStatusCollector.Record(); + } + + public void parse(CharSequence seq) { + Matcher m = pattern.matcher(seq); + Path lastEntry = null; + while (m.find()) { + Path fname = pathHelper.path(m.group(2)); + switch ((int) m.group(1).charAt(0)) { + case (int) 'M' : { + result.modified(fname); + lastEntry = fname; // for files modified through merge there's also 'copy' source + break; + } + case (int) 'A' : { + result.added(fname); + lastEntry = fname; + break; + } + case (int) 'R' : { + result.removed(fname); + break; + } + case (int) '?' : { + result.unknown(fname); + break; + } + case (int) 'I' : { + result.ignored(fname); + break; + } + case (int) 'C' : { + result.clean(fname); + break; + } + case (int) '!' : { + result.missing(fname); + break; + } + case (int) ' ' : { + // last added is copy destination + // to get or to remove it - depends on what StatusCollector does in this case + result.copied(fname, lastEntry); + lastEntry = null; + break; + } + } + } + } + + // + public List<Path> getModified() { + return result.getModified(); + } + + public List<Path> getAdded() { + List<Path> rv = new LinkedList<Path>(result.getAdded()); + for (Path p : result.getCopied().keySet()) { + rv.remove(p); // remove only one duplicate + } + return rv; + } + + public List<Path> getRemoved() { + return result.getRemoved(); + } + + public Map<Path,Path> getCopied() { + return result.getCopied(); + } + + public List<Path> getClean() { + return result.getClean(); + } + + public List<Path> getMissing() { + return result.getMissing(); + } + + public List<Path> getUnknown() { + return result.getUnknown(); + } + + public List<Path> getIgnored() { + return result.getIgnored(); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/TestByteChannel.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import static org.junit.Assert.assertArrayEquals; + +import org.junit.Assert; +import org.junit.Test; +import org.tmatesoft.hg.internal.ByteArrayChannel; +import org.tmatesoft.hg.repo.HgDataFile; +import org.tmatesoft.hg.repo.HgRepository; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class TestByteChannel { + + private HgRepository repo; + + public static void main(String[] args) throws Exception { +// HgRepoFacade rf = new HgRepoFacade(); +// rf.init(); +// HgDataFile file = rf.getRepository().getFileNode("src/org/tmatesoft/hg/internal/KeywordFilter.java"); +// for (int i = file.getLastRevision(); i >= 0; i--) { +// System.out.print("Content for revision:" + i); +// compareContent(file, i); +// System.out.println(" OK"); +// } + //CatCommand cmd = rf.createCatCommand(); + } + +// private static void compareContent(HgDataFile file, int rev) throws Exception { +// byte[] oldAccess = file.content(rev); +// ByteArrayChannel ch = new ByteArrayChannel(); +// file.content(rev, ch); +// byte[] newAccess = ch.toArray(); +// Assert.assertArrayEquals(oldAccess, newAccess); +// // don't trust anyone (even JUnit) +// if (!Arrays.equals(oldAccess, newAccess)) { +// throw new RuntimeException("Failed:" + rev); +// } +// } + + @Test + public void testContent() throws Exception { + repo = Configuration.get().find("log-1"); + final byte[] expectedContent = new byte[] { 'a', ' ', 13, 10 }; + ByteArrayChannel ch = new ByteArrayChannel(); + repo.getFileNode("dir/b").content(0, ch); + assertArrayEquals(expectedContent, ch.toArray()); + repo.getFileNode("d").content(HgRepository.TIP, ch = new ByteArrayChannel() ); + assertArrayEquals(expectedContent, ch.toArray()); + } + + @Test + public void testStripMetadata() throws Exception { + repo = Configuration.get().find("log-1"); + ByteArrayChannel ch = new ByteArrayChannel(); + HgDataFile dir_b = repo.getFileNode("dir/b"); + Assert.assertTrue(dir_b.isCopy()); + Assert.assertEquals("b", dir_b.getCopySourceName().toString()); + Assert.assertEquals("e44751cdc2d14f1eb0146aa64f0895608ad15917", dir_b.getCopySourceRevision().toString()); + dir_b.content(0, ch); + // assert rawContent has 1 10 ... 1 10 + assertArrayEquals("a \r\n".getBytes(), ch.toArray()); + // + // try once again to make sure metadata records/extracts correct offsets + dir_b.content(0, ch = new ByteArrayChannel()); + assertArrayEquals("a \r\n".getBytes(), ch.toArray()); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/TestHistory.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,227 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertTrue; + +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +import org.junit.Rule; +import org.junit.Test; +import org.tmatesoft.hg.core.HgChangeset; +import org.tmatesoft.hg.core.HgLogCommand; +import org.tmatesoft.hg.core.HgLogCommand.CollectHandler; +import org.tmatesoft.hg.core.HgLogCommand.FileHistoryHandler; +import org.tmatesoft.hg.core.HgLogCommand.FileRevision; +import org.tmatesoft.hg.repo.HgLookup; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.test.LogOutputParser.Record; +import org.tmatesoft.hg.util.Path; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class TestHistory { + + @Rule + public ErrorCollectorExt errorCollector = new ErrorCollectorExt(); + + private HgRepository repo; + private final ExecHelper eh; + private LogOutputParser changelogParser; + + public static void main(String[] args) throws Throwable { + TestHistory th = new TestHistory(); + th.testCompleteLog(); + th.testFollowHistory(); + th.errorCollector.verify(); +// th.testPerformance(); + th.testOriginalTestLogRepo(); + th.testUsernames(); + th.testBranches(); + // + th.errorCollector.verify(); + } + + public TestHistory() throws Exception { + this(new HgLookup().detectFromWorkingDir()); + } + + private TestHistory(HgRepository hgRepo) { + repo = hgRepo; + eh = new ExecHelper(changelogParser = new LogOutputParser(true), null); + + } + + @Test + public void testCompleteLog() throws Exception { + changelogParser.reset(); + eh.run("hg", "log", "--debug"); + List<HgChangeset> r = new HgLogCommand(repo).execute(); + report("hg log - COMPLETE REPO HISTORY", r, true); + } + + @Test + public void testFollowHistory() throws Exception { + final Path f = Path.create("cmdline/org/tmatesoft/hg/console/Remote.java"); + try { + if (repo.getFileNode(f).exists()) { // FIXME getFileNode shall not fail with IAE + changelogParser.reset(); + eh.run("hg", "log", "--debug", "--follow", f.toString()); + + class H extends CollectHandler implements FileHistoryHandler { + boolean copyReported = false; + boolean fromMatched = false; + public void copy(FileRevision from, FileRevision to) { + copyReported = true; + fromMatched = "src/com/tmate/hgkit/console/Remote.java".equals(from.getPath().toString()); + } + }; + H h = new H(); + new HgLogCommand(repo).file(f, true).execute(h); + String what = "hg log - FOLLOW FILE HISTORY"; + errorCollector.checkThat(what + "#copyReported ", h.copyReported, is(true)); + errorCollector.checkThat(what + "#copyFromMatched", h.fromMatched, is(true)); + // + // cmdline always gives in changesets in order from newest (bigger rev number) to oldest. + // LogCommand does other way round, from oldest to newest, follewed by revisions of copy source, if any + // (apparently older than oldest of the copy target). Hence need to sort Java results according to rev numbers + final LinkedList<HgChangeset> sorted = new LinkedList<HgChangeset>(h.getChanges()); + Collections.sort(sorted, new Comparator<HgChangeset>() { + public int compare(HgChangeset cs1, HgChangeset cs2) { + return cs1.getRevision() < cs2.getRevision() ? 1 : -1; + } + }); + report(what, sorted, false); + } + } catch (IllegalArgumentException ex) { + System.out.println("Can't test file history with follow because need to query specific file with history"); + } + } + + private void report(String what, List<HgChangeset> r, boolean reverseConsoleResults) { + final List<Record> consoleResult = changelogParser.getResult(); + if (reverseConsoleResults) { + Collections.reverse(consoleResult); + } + Iterator<Record> consoleResultItr = consoleResult.iterator(); + for (HgChangeset cs : r) { + Record cr = consoleResultItr.next(); + int x = cs.getRevision() == cr.changesetIndex ? 0x1 : 0; + x |= cs.getDate().equals(cr.date) ? 0x2 : 0; + x |= cs.getNodeid().toString().equals(cr.changesetNodeid) ? 0x4 : 0; + x |= cs.getUser().equals(cr.user) ? 0x8 : 0; + x |= cs.getComment().equals(cr.description) ? 0x10 : 0; + errorCollector.checkThat(String.format(what + ". Error in %d hg4j rev comparing to %d cmdline's.", cs.getRevision(), cr.changesetIndex), x, equalTo(0x1f)); + consoleResultItr.remove(); + } + errorCollector.checkThat(what + ". Insufficient results from Java ", consoleResultItr.hasNext(), equalTo(false)); + } + + public void testPerformance() throws Exception { + final int runs = 10; + final long start1 = System.currentTimeMillis(); + for (int i = 0; i < runs; i++) { + changelogParser.reset(); + eh.run("hg", "log", "--debug"); + } + final long start2 = System.currentTimeMillis(); + for (int i = 0; i < runs; i++) { + new HgLogCommand(repo).execute(); + } + final long end = System.currentTimeMillis(); + System.out.printf("'hg log --debug', %d runs: Native client total %d (%d per run), Java client %d (%d)\n", runs, start2-start1, (start2-start1)/runs, end-start2, (end-start2)/runs); + } + + @Test + public void testOriginalTestLogRepo() throws Exception { + repo = Configuration.get().find("log-1"); + HgLogCommand cmd = new HgLogCommand(repo); + // funny enough, but hg log -vf a -R c:\temp\hg\test-log\a doesn't work, while --cwd <same> works fine + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "a", "--cwd", repo.getLocation()); + report("log a", cmd.file("a", false).execute(), true); + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-f", "a", "--cwd", repo.getLocation()); + List<HgChangeset> r = cmd.file("a", true).execute(); + report("log -f a", r, true); + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-f", "e", "--cwd", repo.getLocation()); + report("log -f e", cmd.file("e", true).execute(), false /*#1, below*/); + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "dir/b", "--cwd", repo.getLocation()); + report("log dir/b", cmd.file("dir/b", false).execute(), true); + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-f", "dir/b", "--cwd", repo.getLocation()); + report("log -f dir/b", cmd.file("dir/b", true).execute(), false /*#1, below*/); + /* + * #1: false works because presently commands dispatches history of the queried file, and then history + * of it's origin. With history comprising of renames only, this effectively gives reversed (newest to oldest) + * order of revisions. + */ + } + + @Test + public void testUsernames() throws Exception { + repo = Configuration.get().find("log-users"); + final String user1 = "User One <user1@example.org>"; + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-u", user1, "--cwd", repo.getLocation()); + report("log -u " + user1, new HgLogCommand(repo).user(user1).execute(), true); + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-u", "user1", "-u", "user2", "--cwd", repo.getLocation()); + report("log -u user1 -u user2", new HgLogCommand(repo).user("user1").user("user2").execute(), true); + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-u", "user3", "--cwd", repo.getLocation()); + report("log -u user3", new HgLogCommand(repo).user("user3").execute(), true); + } + + @Test + public void testBranches() throws Exception { + repo = Configuration.get().find("log-branches"); + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-b", "default", "--cwd", repo.getLocation()); + report("log -b default" , new HgLogCommand(repo).branch("default").execute(), true); + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-b", "test", "--cwd", repo.getLocation()); + report("log -b test" , new HgLogCommand(repo).branch("test").execute(), true); + // + assertTrue("log -b dummy shall yeild empty result", new HgLogCommand(repo).branch("dummy").execute().isEmpty()); + // + changelogParser.reset(); + eh.run("hg", "log", "--debug", "-b", "default", "-b", "test", "--cwd", repo.getLocation()); + report("log -b default -b test" , new HgLogCommand(repo).branch("default").branch("test").execute(), true); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/TestManifest.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.junit.Assert.assertTrue; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.Map; + +import org.junit.Rule; +import org.junit.Test; +import org.tmatesoft.hg.core.HgLogCommand.FileRevision; +import org.tmatesoft.hg.core.HgManifestCommand; +import org.tmatesoft.hg.core.Nodeid; +import org.tmatesoft.hg.repo.HgLookup; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.util.Path; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class TestManifest { + + @Rule + public ErrorCollectorExt errorCollector = new ErrorCollectorExt(); + + private final HgRepository repo; + private ManifestOutputParser manifestParser; + private ExecHelper eh; + final LinkedList<FileRevision> revisions = new LinkedList<FileRevision>(); + private HgManifestCommand.Handler handler = new HgManifestCommand.Handler() { + + public void file(FileRevision fileRevision) { + revisions.add(fileRevision); + } + + public void end(Nodeid manifestRevision) {} + public void dir(Path p) {} + public void begin(Nodeid manifestRevision) {} + }; + + public static void main(String[] args) throws Throwable { + TestManifest tm = new TestManifest(); + tm.testTip(); + tm.testFirstRevision(); + tm.testRevisionInTheMiddle(); + tm.errorCollector.verify(); + } + + public TestManifest() throws Exception { + this(new HgLookup().detectFromWorkingDir()); + } + + private TestManifest(HgRepository hgRepo) { + repo = hgRepo; + assertTrue(!repo.isInvalid()); + eh = new ExecHelper(manifestParser = new ManifestOutputParser(), null); + } + + @Test + public void testTip() throws Exception { + testRevision(TIP); + } + + @Test + public void testFirstRevision() throws Exception { + testRevision(0); + } + + @Test + public void testRevisionInTheMiddle() throws Exception { + int rev = repo.getManifest().getRevisionCount() / 2; + if (rev == 0) { + throw new IllegalStateException("Need manifest with few revisions"); + } + testRevision(rev); + } + + private void testRevision(int rev) throws Exception { + manifestParser.reset(); + eh.run("hg", "manifest", "--debug", "--rev", String.valueOf(rev)); + revisions.clear(); + new HgManifestCommand(repo).revision(rev).execute(handler); + report("manifest " + (rev == TIP ? "TIP:" : "--rev " + rev)); + } + + private void report(String what) throws Exception { + final Map<Path, Nodeid> cmdLineResult = new LinkedHashMap<Path, Nodeid>(manifestParser.getResult()); + for (FileRevision fr : revisions) { + Nodeid nid = cmdLineResult.remove(fr.getPath()); + errorCollector.checkThat("Extra " + fr.getPath() + " in Java result", nid, notNullValue()); + if (nid != null) { + errorCollector.checkThat("Non-matching nodeid:" + nid, nid, equalTo(fr.getRevision())); + } + } + errorCollector.checkThat("Non-matched entries from command line:", cmdLineResult, equalTo(Collections.<Path,Nodeid>emptyMap())); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/TestStatus.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,237 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.tmatesoft.hg.core.HgStatus.*; +import static org.tmatesoft.hg.core.HgStatus.Kind.*; +import static org.tmatesoft.hg.repo.HgRepository.TIP; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import org.junit.Assume; +import org.junit.Rule; +import org.junit.Test; +import org.tmatesoft.hg.core.HgStatus; +import org.tmatesoft.hg.core.HgStatusCommand; +import org.tmatesoft.hg.repo.HgLookup; +import org.tmatesoft.hg.repo.HgRepository; +import org.tmatesoft.hg.repo.HgStatusCollector; +import org.tmatesoft.hg.repo.HgWorkingCopyStatusCollector; +import org.tmatesoft.hg.util.Path; + + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class TestStatus { + + @Rule + public ErrorCollectorExt errorCollector = new ErrorCollectorExt(); + + private final HgRepository repo; + private StatusOutputParser statusParser; + private ExecHelper eh; + + public static void main(String[] args) throws Throwable { + TestStatus test = new TestStatus(); + test.testLowLevel(); + test.testStatusCommand(); + test.testPerformance(); + test.errorCollector.verify(); + } + + public TestStatus() throws Exception { + this(new HgLookup().detectFromWorkingDir()); + } + + private TestStatus(HgRepository hgRepo) { + repo = hgRepo; + Assume.assumeTrue(!repo.isInvalid()); + statusParser = new StatusOutputParser(); + eh = new ExecHelper(statusParser, null); + } + + @Test + public void testLowLevel() throws Exception { + final HgWorkingCopyStatusCollector wcc = new HgWorkingCopyStatusCollector(repo); + statusParser.reset(); + eh.run("hg", "status", "-A"); + HgStatusCollector.Record r = wcc.status(HgRepository.TIP); + report("hg status -A", r, statusParser); + // + statusParser.reset(); + int revision = 3; + eh.run("hg", "status", "-A", "--rev", String.valueOf(revision)); + r = wcc.status(revision); + report("status -A --rev " + revision, r, statusParser); + // + statusParser.reset(); + eh.run("hg", "status", "-A", "--change", String.valueOf(revision)); + r = new HgStatusCollector.Record(); + new HgStatusCollector(repo).change(revision, r); + report("status -A --change " + revision, r, statusParser); + // + statusParser.reset(); + int rev2 = 80; + final String range = String.valueOf(revision) + ":" + String.valueOf(rev2); + eh.run("hg", "status", "-A", "--rev", range); + r = new HgStatusCollector(repo).status(revision, rev2); + report("Status -A -rev " + range, r, statusParser); + } + + @Test + public void testStatusCommand() throws Exception { + final HgStatusCommand sc = new HgStatusCommand(repo).all(); + StatusCollector r; + statusParser.reset(); + eh.run("hg", "status", "-A"); + sc.execute(r = new StatusCollector()); + report("hg status -A", r); + // + statusParser.reset(); + int revision = 3; + eh.run("hg", "status", "-A", "--rev", String.valueOf(revision)); + sc.base(revision).execute(r = new StatusCollector()); + report("status -A --rev " + revision, r); + // + statusParser.reset(); + eh.run("hg", "status", "-A", "--change", String.valueOf(revision)); + sc.base(TIP).revision(revision).execute(r = new StatusCollector()); + report("status -A --change " + revision, r); + + // TODO check not -A, but defaults()/custom set of modifications + } + + private static class StatusCollector implements HgStatusCommand.Handler { + private final Map<HgStatus.Kind, List<Path>> map = new TreeMap<HgStatus.Kind, List<Path>>(); + + public void handleStatus(HgStatus s) { + List<Path> l = map.get(s.getKind()); + if (l == null) { + l = new LinkedList<Path>(); + map.put(s.getKind(), l); + } + l.add(s.getPath()); + } + + public List<Path> get(Kind k) { + List<Path> rv = map.get(k); + if (rv == null) { + return Collections.emptyList(); + } + return rv; + } + } + + public void testRemovedAgainstNonTip() { + /* + status --rev N when a file added past revision N was removed ((both physically and in dirstate), but not yet committed + + Reports extra REMOVED file (the one added and removed in between). Shall not + */ + } + + /* + * With warm-up of previous tests, 10 runs, time in milliseconds + * 'hg status -A': Native client total 953 (95 per run), Java client 94 (9) + * 'hg status -A --rev 3:80': Native client total 1828 (182 per run), Java client 235 (23) + * 'hg log --debug', 10 runs: Native client total 1766 (176 per run), Java client 78 (7) + * + * 18.02.2011 + * 'hg status -A --rev 3:80', 10 runs: Native client total 2000 (200 per run), Java client 250 (25) + * 'hg log --debug', 10 runs: Native client total 2297 (229 per run), Java client 125 (12) + */ + public void testPerformance() throws Exception { + final int runs = 10; + final long start1 = System.currentTimeMillis(); + for (int i = 0; i < runs; i++) { + statusParser.reset(); + eh.run("hg", "status", "-A", "--rev", "3:80"); + } + final long start2 = System.currentTimeMillis(); + for (int i = 0; i < runs; i++) { + StatusCollector r = new StatusCollector(); + new HgStatusCommand(repo).all().base(3).revision(80).execute(r); + } + final long end = System.currentTimeMillis(); + System.out.printf("'hg status -A --rev 3:80', %d runs: Native client total %d (%d per run), Java client %d (%d)\n", runs, start2-start1, (start2-start1)/runs, end-start2, (end-start2)/runs); + } + + private void report(String what, StatusCollector r) { + reportNotEqual(what + "#MODIFIED", r.get(Modified), statusParser.getModified()); + reportNotEqual(what + "#ADDED", r.get(Added), statusParser.getAdded()); + reportNotEqual(what + "#REMOVED", r.get(Removed), statusParser.getRemoved()); + reportNotEqual(what + "#CLEAN", r.get(Clean), statusParser.getClean()); + reportNotEqual(what + "#IGNORED", r.get(Ignored), statusParser.getIgnored()); + reportNotEqual(what + "#MISSING", r.get(Missing), statusParser.getMissing()); + reportNotEqual(what + "#UNKNOWN", r.get(Unknown), statusParser.getUnknown()); + // FIXME test copies + } + + private void report(String what, HgStatusCollector.Record r, StatusOutputParser statusParser) { + reportNotEqual(what + "#MODIFIED", r.getModified(), statusParser.getModified()); + reportNotEqual(what + "#ADDED", r.getAdded(), statusParser.getAdded()); + reportNotEqual(what + "#REMOVED", r.getRemoved(), statusParser.getRemoved()); + reportNotEqual(what + "#CLEAN", r.getClean(), statusParser.getClean()); + reportNotEqual(what + "#IGNORED", r.getIgnored(), statusParser.getIgnored()); + reportNotEqual(what + "#MISSING", r.getMissing(), statusParser.getMissing()); + reportNotEqual(what + "#UNKNOWN", r.getUnknown(), statusParser.getUnknown()); + List<Path> copiedKeyDiff = difference(r.getCopied().keySet(), statusParser.getCopied().keySet()); + HashMap<Path, String> copyDiff = new HashMap<Path,String>(); + if (copiedKeyDiff.isEmpty()) { + for (Path jk : r.getCopied().keySet()) { + Path jv = r.getCopied().get(jk); + if (statusParser.getCopied().containsKey(jk)) { + Path cmdv = statusParser.getCopied().get(jk); + if (!jv.equals(cmdv)) { + copyDiff.put(jk, jv + " instead of " + cmdv); + } + } else { + copyDiff.put(jk, "ERRONEOUSLY REPORTED IN JAVA"); + } + } + } + errorCollector.checkThat(what + "#Non-matching 'copied' keys: ", copiedKeyDiff, equalTo(Collections.<Path>emptyList())); + errorCollector.checkThat(what + "#COPIED", copyDiff, equalTo(Collections.<Path,String>emptyMap())); + } + + private <T> void reportNotEqual(String what, Collection<T> l1, Collection<T> l2) { + List<T> diff = difference(l1, l2); + errorCollector.checkThat(what, diff, equalTo(Collections.<T>emptyList())); + } + + private static <T> List<T> difference(Collection<T> l1, Collection<T> l2) { + LinkedList<T> result = new LinkedList<T>(l2); + for (T t : l1) { + if (l2.contains(t)) { + result.remove(t); + } else { + result.add(t); + } + } + return result; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/org/tmatesoft/hg/test/TestStorePath.java Wed Mar 09 05:22:17 2011 +0100 @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2011 TMate Software Ltd + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * For information on how to redistribute this software under + * the terms of a license other than GNU General Public License + * contact TMate Software at support@hg4j.com + */ +package org.tmatesoft.hg.test; + +import static org.hamcrest.CoreMatchers.equalTo; +import junit.framework.Assert; + +import org.junit.Rule; +import org.junit.Test; +import org.tmatesoft.hg.internal.Internals; +import org.tmatesoft.hg.util.PathRewrite; + +/** + * + * @author Artem Tikhomirov + * @author TMate Software Ltd. + */ +public class TestStorePath { + + @Rule + public ErrorCollectorExt errorCollector = new ErrorCollectorExt(); + + private PathRewrite storePathHelper; + + public static void main(String[] args) throws Throwable { + final TestStorePath test = new TestStorePath(); + test.testWindowsFilenames(); + test.testHashLongPath(); + test.errorCollector.verify(); + } + + public TestStorePath() { + final Internals i = new Internals(); + i.setStorageConfig(1, 0x7); + storePathHelper = i.buildDataFilesHelper(); + } + + @Test + public void testWindowsFilenames() { + // see http://mercurial.selenic.com/wiki/fncacheRepoFormat#Encoding_of_Windows_reserved_names + String n1 = "aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c"; + String r1 = "store/data/au~78.bla/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i"; + Assert.assertEquals("Windows filenames are ", r1, storePathHelper.rewrite(n1)); + } + + @Test + public void testHashLongPath() { + String n1 = "AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT"; + String r1 = "store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i"; + String n2 = "enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider"; + String r2 = "store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i"; + String n3 = "AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-QUICK-BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT"; + String r3 = "store/dh/au~78.the-quick-brown-fox-ju~3amps-over-the-lazy-dog-the-quick-brown-fox-jud4dcadd033000ab2b26eb66bae1906bcb15d4a70.i"; + // TODO segment[8] == [. ], segment[8] in the middle of windows reserved name or character (to see if ~xx is broken) + errorCollector.checkThat(storePathHelper.rewrite(n1), equalTo(r1)); + errorCollector.checkThat(storePathHelper.rewrite(n2), equalTo(r2)); + errorCollector.checkThat(storePathHelper.rewrite(n3), equalTo(r3)); + } +}